repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
rezakrimi/MaxoutTextSummarization | data/process_data.py | import sys
sys.path.insert(0, '../')
import config
from data_utils import (make_conll_format, make_embedding, make_vocab,
make_vocab_from_squad, process_file)
def make_sent_dataset():
train_src_file = "./para-train.txt"
train_trg_file = "./tgt-train.txt"
embedding_file = "./glove.840B.300d.txt"
embedding = "./embedding.pkl"
word2idx_file = "./word2idx.pkl"
# make vocab file
word2idx = make_vocab(train_src_file, train_trg_file, word2idx_file, config.vocab_size)
make_embedding(embedding_file, embedding, word2idx)
def make_para_dataset():
embedding_file = "./glove.840B.300d.txt"
embedding = "./embedding.pkl"
src_word2idx_file = "./word2idx.pkl"
train_squad = "../squad/train-v1.1.json"
dev_squad = "../squad/dev-v1.1.json"
train_src_file = "../squad/para-train.txt"
train_trg_file = "../squad/tgt-train.txt"
dev_src_file = "../squad/para-dev.txt"
dev_trg_file = "../squad/tgt-dev.txt"
test_src_file = "../squad/para-test.txt"
test_trg_file = "../squad/tgt-test.txt"
# pre-process training data
train_examples, counter = process_file(train_squad)
make_conll_format(train_examples, train_src_file, train_trg_file)
word2idx = make_vocab_from_squad(src_word2idx_file, counter, config.vocab_size)
make_embedding(embedding_file, embedding, word2idx)
# split dev into dev and test
dev_test_examples, _ = process_file(dev_squad)
# random.shuffle(dev_test_examples)
num_dev = len(dev_test_examples) // 2
dev_examples = dev_test_examples[:num_dev]
test_examples = dev_test_examples[num_dev:]
make_conll_format(dev_examples, dev_src_file, dev_trg_file)
make_conll_format(test_examples, test_src_file, test_trg_file)
if __name__ == "__main__":
# make_sent_dataset()
make_para_dataset()
|
rezakrimi/MaxoutTextSummarization | data_utils.py | <reponame>rezakrimi/MaxoutTextSummarization<filename>data_utils.py
import json
import pickle
import time
from collections import defaultdict
from copy import deepcopy
import numpy as np
import torch
import torch.utils.data as data
from tqdm import tqdm
import nltk
import config
PAD_TOKEN = "<PAD>"
UNK_TOKEN = "UNKNOWN"
START_TOKEN = "<s>"
END_TOKEN = "EOS"
PAD_ID = 0
UNK_ID = 1
START_ID = 2
END_ID = 3
class SQuadDataset(data.Dataset):
def __init__(self, src_file, trg_file, max_length, word2idx, debug=False):
self.src = open(src_file, "r").readlines()
self.trg = open(trg_file, "r").readlines()
assert len(self.src) == len(self.trg), \
"the number of source sequence {}" " and target sequence {} must be the same" \
.format(len(self.src), len(self.trg))
self.max_length = max_length
self.word2idx = word2idx
self.num_seqs = len(self.src)
if debug:
self.src = self.src[:100]
self.trg = self.trg[:100]
self.num_seqs = 100
def __getitem__(self, index):
src_seq = self.src[index]
trg_seq = self.trg[index]
src_seq, ext_src_seq, oov_lst = self.context2ids(
src_seq, self.word2idx)
trg_seq, ext_trg_seq = self.question2ids(
trg_seq, self.word2idx, oov_lst)
return src_seq, ext_src_seq, trg_seq, ext_trg_seq, oov_lst
def __len__(self):
return self.num_seqs
def context2ids(self, sequence, word2idx):
ids = list()
extended_ids = list()
oov_lst = list()
ids.append(word2idx[START_TOKEN])
extended_ids.append(word2idx[START_TOKEN])
tokens = sequence.strip().split(" ")
for token in tokens:
if token in word2idx:
ids.append(word2idx[token])
extended_ids.append(word2idx[token])
else:
ids.append(word2idx[UNK_TOKEN])
if token not in oov_lst:
oov_lst.append(token)
extended_ids.append(len(word2idx) + oov_lst.index(token))
ids.append(word2idx[END_TOKEN])
extended_ids.append(word2idx[END_TOKEN])
ids = torch.tensor(ids, dtype=torch.long)
extended_ids = torch.tensor(extended_ids, dtype=torch.long)
return ids, extended_ids, oov_lst
def question2ids(self, sequence, word2idx, oov_lst):
ids = list()
extended_ids = list()
ids.append(word2idx[START_TOKEN])
extended_ids.append(word2idx[START_TOKEN])
tokens = sequence.strip().split(" ")
for token in tokens:
if token in word2idx:
ids.append(word2idx[token])
extended_ids.append(word2idx[token])
else:
ids.append(word2idx[UNK_TOKEN])
if token in oov_lst:
extended_ids.append(len(word2idx) + oov_lst.index(token))
else:
extended_ids.append(word2idx[UNK_TOKEN])
ids.append(word2idx[END_TOKEN])
extended_ids.append(word2idx[END_TOKEN])
ids = torch.Tensor(ids)
extended_ids = torch.Tensor(extended_ids)
return ids, extended_ids
def collate_fn(data):
def merge(sequences):
lengths = [len(sequence) for sequence in sequences]
padded_seqs = torch.zeros(len(sequences), max(lengths)).long()
for i, seq in enumerate(sequences):
end = lengths[i]
padded_seqs[i, :end] = seq[:end]
return padded_seqs
data.sort(key=lambda x: len(x[0]), reverse=True)
src_seqs, ext_src_seqs, trg_seqs, ext_trg_seqs, oov_lst = zip(*data)
src_seqs = merge(src_seqs)
ext_src_seqs = merge(ext_src_seqs)
trg_seqs = merge(trg_seqs)
ext_trg_seqs = merge(ext_trg_seqs)
return src_seqs, ext_src_seqs, trg_seqs, ext_trg_seqs, oov_lst
class SQuadDatasetWithTag(data.Dataset):
def __init__(self, src_file, trg_file, max_length, word2idx, debug=False):
self.srcs = []
self.tags = []
lines = open(src_file, "r").readlines()
sentence, tags = [], []
self.entity2idx = {"O": 0, "B_ans": 1, "I_ans": 2}
for line in lines:
line = line.strip()
if len(line) == 0:
sentence.insert(0, START_TOKEN)
sentence.append(END_TOKEN)
self.srcs.append(sentence)
tags.insert(0, self.entity2idx["O"])
tags.append(self.entity2idx["O"])
self.tags.append(tags)
assert len(sentence) == len(tags)
sentence, tags = [], []
else:
tokens = line.split("\t")
word, tag = tokens[0], tokens[1]
sentence.append(word)
tags.append(self.entity2idx[tag])
self.trgs = open(trg_file, "r").readlines()
assert len(self.srcs) == len(self.trgs), \
"the number of source sequence {}" " and target sequence {} must be the same" \
.format(len(self.srcs), len(self.trgs))
self.max_length = max_length
self.word2idx = word2idx
self.num_seqs = len(self.srcs)
if debug:
self.srcs = self.srcs[:100]
self.trgs = self.trgs[:100]
self.tags = self.tags[:100]
self.num_seqs = 100
def __getitem__(self, index):
src_seq = self.srcs[index]
trg_seq = self.trgs[index]
tag_seq = self.tags[index]
tag_seq = torch.Tensor(tag_seq[:self.max_length])
src_seq, ext_src_seq, oov_lst = self.context2ids(
src_seq, self.word2idx)
trg_seq, ext_trg_seq = self.question2ids(
trg_seq, self.word2idx, oov_lst)
return src_seq, ext_src_seq, trg_seq, ext_trg_seq, oov_lst, tag_seq
def __len__(self):
return self.num_seqs
def context2ids(self, tokens, word2idx):
ids = list()
extended_ids = list()
oov_lst = list()
# START and END token is already in tokens lst
for token in tokens:
if token in word2idx:
ids.append(word2idx[token])
extended_ids.append(word2idx[token])
else:
ids.append(word2idx[UNK_TOKEN])
if token not in oov_lst:
oov_lst.append(token)
extended_ids.append(len(word2idx) + oov_lst.index(token))
if len(ids) == self.max_length:
break
ids = torch.Tensor(ids)
extended_ids = torch.Tensor(extended_ids)
return ids, extended_ids, oov_lst
def question2ids(self, sequence, word2idx, oov_lst):
ids = list()
extended_ids = list()
ids.append(word2idx[START_TOKEN])
extended_ids.append(word2idx[START_TOKEN])
tokens = sequence.strip().split(" ")
for token in tokens:
if token in word2idx:
ids.append(word2idx[token])
extended_ids.append(word2idx[token])
else:
ids.append(word2idx[UNK_TOKEN])
if token in oov_lst:
extended_ids.append(len(word2idx) + oov_lst.index(token))
else:
extended_ids.append(word2idx[UNK_TOKEN])
ids.append(word2idx[END_TOKEN])
extended_ids.append(word2idx[END_TOKEN])
ids = torch.Tensor(ids)
extended_ids = torch.Tensor(extended_ids)
return ids, extended_ids
def collate_fn_tag(data):
def merge(sequences):
lengths = [len(sequence) for sequence in sequences]
padded_seqs = torch.zeros(len(sequences), max(lengths)).long()
for i, seq in enumerate(sequences):
end = lengths[i]
padded_seqs[i, :end] = seq[:end]
return padded_seqs
data.sort(key=lambda x: len(x[0]), reverse=True)
src_seqs, ext_src_seqs, trg_seqs, ext_trg_seqs, oov_lst, tag_seqs = zip(
*data)
src_seqs = merge(src_seqs)
ext_src_seqs = merge(ext_src_seqs)
trg_seqs = merge(trg_seqs)
ext_trg_seqs = merge(ext_trg_seqs)
tag_seqs = merge(tag_seqs)
assert src_seqs.size(1) == tag_seqs.size(
1), "length of tokens and tags should be equal"
return src_seqs, ext_src_seqs, trg_seqs, ext_trg_seqs, tag_seqs, oov_lst
def get_loader(src_file, trg_file, word2idx,
batch_size, use_tag=False, debug=False, shuffle=False):
dataset = SQuadDatasetWithTag(src_file, trg_file, config.max_len,
word2idx, debug)
dataloader = data.DataLoader(dataset=dataset,
batch_size=batch_size,
shuffle=shuffle,
collate_fn=collate_fn_tag)
return dataloader
def make_vocab(src_file, trg_file, output_file, max_vocab_size):
word2idx = dict()
word2idx[PAD_TOKEN] = 0
word2idx[UNK_TOKEN] = 1
word2idx[START_TOKEN] = 2
word2idx[END_TOKEN] = 3
counter = dict()
with open(src_file, "r", encoding="utf-8") as f:
for line in f:
tokens = line.split()
for token in tokens:
if token in counter:
counter[token] += 1
else:
counter[token] = 1
with open(trg_file, "r", encoding="utf-8") as f:
for line in f:
tokens = line.split()
for token in tokens:
if token in counter:
counter[token] += 1
else:
counter[token] = 1
sorted_vocab = sorted(counter.items(), key=lambda kv: kv[1], reverse=True)
for i, (word, _) in enumerate(sorted_vocab, start=4):
if i == max_vocab_size:
break
word2idx[word] = i
with open(output_file, "wb") as f:
pickle.dump(word2idx, f)
return word2idx
def make_vocab_from_squad(output_file, counter, max_vocab_size):
sorted_vocab = sorted(counter.items(), key=lambda kv: kv[1], reverse=True)
word2idx = dict()
word2idx[PAD_TOKEN] = 0
word2idx[UNK_TOKEN] = 1
word2idx[START_TOKEN] = 2
word2idx[END_TOKEN] = 3
for idx, (token, freq) in enumerate(sorted_vocab, start=4):
if len(word2idx) == max_vocab_size:
break
word2idx[token] = idx
with open(output_file, "wb") as f:
pickle.dump(word2idx, f)
return word2idx
def make_vocab_from_dm(output_file, counter, max_vocab_size):
sorted_vocab = sorted(counter.items(), key=lambda kv: kv[1], reverse=True)
word2idx = dict()
word2idx[PAD_TOKEN] = 0
word2idx[UNK_TOKEN] = 1
word2idx[START_TOKEN] = 2
word2idx[END_TOKEN] = 3
for idx, (token, freq) in enumerate(sorted_vocab, start=4):
if len(word2idx) == max_vocab_size:
break
word2idx[token] = idx
with open(output_file, "wb") as f:
pickle.dump(word2idx, f)
return word2idx
def make_embedding(embedding_file, output_file, word2idx):
word2embedding = dict()
lines = open(embedding_file, "r", encoding="utf-8").readlines()
for line in tqdm(lines):
word_vec = line.split(" ")
word = word_vec[0]
vec = np.array(word_vec[1:], dtype=np.float32)
word2embedding[word] = vec
embedding = np.zeros((len(word2idx), 300), dtype=np.float32)
num_oov = 0
for word, idx in word2idx.items():
if word in word2embedding:
embedding[idx] = word2embedding[word]
else:
embedding[idx] = word2embedding[UNK_TOKEN]
num_oov += 1
print("num OOV : {}".format(num_oov))
with open(output_file, "wb") as f:
pickle.dump(embedding, f)
return embedding
def make_conll_format2(examples, src_file, trg_file):
src_fw = open(src_file, "w")
trg_fw = open(trg_file, "w")
for example in tqdm(examples):
c_tokens = example["context_tokens"]
if "\n" in c_tokens:
print(c_tokens)
print("new line")
copied_tokens = deepcopy(c_tokens)
q_tokens = example["ques_tokens"]
for token in copied_tokens:
if "\t" in token:
src_fw.write(token + "\n")
else:
src_fw.write(token + "\t" + "O" + "\n")
src_fw.write("\n")
question = " ".join(q_tokens)
trg_fw.write(question + "\n")
src_fw.close()
trg_fw.close()
def time_since(t):
""" Function for time. """
return time.time() - t
def progress_bar(completed, total, step=5):
""" Function returning a string progress bar. """
percent = int((completed / total) * 100)
bar = '[='
arrow_reached = False
for t in range(step, 101, step):
if arrow_reached:
bar += ' '
else:
if percent // t != 0:
bar += '='
else:
bar = bar[:-1]
bar += '>'
arrow_reached = True
if percent == 100:
bar = bar[:-1]
bar += '='
bar += ']'
return bar
def user_friendly_time(s):
""" Display a user friendly time from number of second. """
s = int(s)
if s < 60:
return "{}s".format(s)
m = s // 60
s = s % 60
if m < 60:
return "{}m {}s".format(m, s)
h = m // 60
m = m % 60
if h < 24:
return "{}h {}m {}s".format(h, m, s)
d = h // 24
h = h % 24
return "{}d {}h {}m {}s".format(d, h, m, s)
def eta(start, completed, total):
""" Function returning an ETA. """
# Computation
took = time_since(start)
time_per_step = took / completed
remaining_steps = total - completed
remaining_time = time_per_step * remaining_steps
return user_friendly_time(remaining_time)
def outputids2words(id_list, idx2word, article_oovs=None):
"""
:param id_list: list of indices
:param idx2word: dictionary mapping idx to word
:param article_oovs: list of oov words
:return: list of words
"""
words = []
for idx in id_list:
try:
word = idx2word[idx]
except KeyError:
if article_oovs is not None:
article_oov_idx = idx - len(idx2word)
try:
word = article_oovs[article_oov_idx]
except IndexError:
print("there's no such a word in extended vocab")
else:
word = idx2word[UNK_ID]
words.append(word)
return words
def convert_idx(text, tokens):
current = 0
spans = []
for token in tokens:
current = text.find(token, current)
if current < 0:
print("Token {} cannot be found".format(token))
raise Exception()
spans.append((current, current + len(token)))
current += len(token)
return spans
def word_tokenize(tokens):
return [token.replace("''", '"').replace("``", '"') for token in nltk.word_tokenize(tokens)]
def get_truncated_context(context, answer_text, answer_end, parser):
# get sentences up to the sentence that contains answer span
doc = parser(context)
sentences = doc.sentences # list of Sentence objects
sents_text = []
for sentence in sentences:
sent = []
for token in sentence.tokens:
sent.append(token.text)
sents_text.append(" ".join(sent))
sentences = sents_text
stop_idx = -1
for idx, sentence in enumerate(sentences):
if answer_text in sentence:
chars = " ".join(sentences[:idx + 1])
if len(chars) >= answer_end:
stop_idx = idx
break
if stop_idx == -1:
print(answer_text)
print(context)
truncated_sentences = sentences[:stop_idx + 1]
truncated_context = " ".join(truncated_sentences).lower()
return truncated_context
def tokenize(doc, parser):
words = []
sentences = parser(doc).sentences
for sent in sentences:
toks = sent.tokens
for token in toks:
words.append(token.text.lower())
return words
def process_file(file_name):
counter = defaultdict(lambda: 0)
examples = list()
total = 0
with open(file_name, "r") as f:
source = json.load(f)
articles = source["data"]
for article in tqdm(articles):
for para in article["paragraphs"]:
context = para["context"].replace(
"''", '" ').replace("``", '" ').lower()
context_tokens = word_tokenize(context)
spans = convert_idx(context, context_tokens)
for qa in para["qas"]:
total += 1
ques = qa["question"].replace(
"''", '" ').replace("``", '" ').lower()
ques_tokens = word_tokenize(ques)
for token in ques_tokens:
counter[token] += 1
y1s, y2s = [], []
answer_texts = []
for answer in qa["answers"]:
answer_text = answer["text"]
answer_start = answer["answer_start"]
answer_end = answer_start + len(answer_text)
answer_texts.append(answer_text)
answer_span = []
for token in context_tokens:
counter[token] += len(para["qas"])
for idx, span in enumerate(spans):
if not (answer_end <= span[0] or answer_start >= span[1]):
answer_span.append(idx)
y1, y2 = answer_span[0], answer_span[-1]
y1s.append(y1)
y2s.append(y2)
example = {"context_tokens": context_tokens, "ques_tokens": ques_tokens,
"y1s": y1s, "y2s": y2s, "answers": answer_texts}
examples.append(example)
return examples, counter
def make_conll_format(examples, src_file, trg_file):
src_fw = open(src_file, "w")
trg_fw = open(trg_file, "w")
for example in tqdm(examples):
c_tokens = example["context_tokens"]
if "\n" in c_tokens:
print(c_tokens)
print("new line")
copied_tokens = deepcopy(c_tokens)
q_tokens = example["ques_tokens"]
# always select the first candidate answer
start = example["y1s"][0]
end = example["y2s"][0]
for idx in range(start, end + 1):
token = copied_tokens[idx]
if idx == start:
tag = "B_ans"
copied_tokens[idx] = token + "\t" + tag
else:
tag = "I_ans"
copied_tokens[idx] = token + "\t" + tag
for token in copied_tokens:
if "\t" in token:
src_fw.write(token + "\n")
else:
src_fw.write(token + "\t" + "O" + "\n")
src_fw.write("\n")
question = " ".join(q_tokens)
trg_fw.write(question + "\n")
src_fw.close()
trg_fw.close()
def split_dev(input_file, dev_file, test_file):
with open(input_file) as f:
input_file = json.load(f)
input_data = input_file["data"]
# split the original SQuAD dev set into new dev / test set
num_total = len(input_data)
num_dev = int(num_total * 0.5)
dev_data = input_data[:num_dev]
test_data = input_data[num_dev:]
dev_dict = {"data": dev_data}
test_dict = {"data": test_data}
with open(dev_file, "w") as f:
json.dump(dev_dict, f)
with open(test_file, "w") as f:
json.dump(test_dict, f)
|
rezakrimi/MaxoutTextSummarization | model.py | import config
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils.rnn import pad_packed_sequence, pack_padded_sequence
from torch_scatter import scatter_max
from data_utils import UNK_ID
INF = 1e12
class Encoder(nn.Module):
def __init__(self, embeddings, vocab_size, embedding_size, hidden_size, num_layers, dropout):
super(Encoder, self).__init__()
self.embedding = nn.Embedding(vocab_size, embedding_size)
self.tag_embedding = nn.Embedding(3, 3)
lstm_input_size = embedding_size + 3
if embeddings is not None:
self.embedding = nn.Embedding(vocab_size, embedding_size). \
from_pretrained(embeddings, freeze=config.freeze_embedding)
self.num_layers = num_layers
if self.num_layers == 1:
dropout = 0.0
self.lstm = nn.LSTM(lstm_input_size, hidden_size, dropout=dropout,
num_layers=num_layers, bidirectional=True, batch_first=True)
self.linear_trans = nn.Linear(2 * hidden_size, 2 * hidden_size)
self.update_layer = nn.Linear(
4 * hidden_size, 2 * hidden_size, bias=False)
self.gate = nn.Linear(4 * hidden_size, 2 * hidden_size, bias=False)
def gated_self_attn(self, queries, memories, mask):
# queries: [b,t,d]
# memories: [b,t,d]
# mask: [b,t]
energies = torch.matmul(queries, memories.transpose(1, 2)) # [b, t, t]
mask = mask.unsqueeze(1)
energies = energies.masked_fill(mask == 0, value=-1e12)
scores = F.softmax(energies, dim=2)
context = torch.matmul(scores, queries)
inputs = torch.cat([queries, context], dim=2)
f_t = torch.tanh(self.update_layer(inputs))
g_t = torch.sigmoid(self.gate(inputs))
updated_output = g_t * f_t + (1 - g_t) * queries
return updated_output
def forward(self, src_seq, src_len, tag_seq):
total_length = src_seq.size(1)
embedded = self.embedding(src_seq)
tag_embedded = self.tag_embedding(tag_seq)
embedded = torch.cat((embedded, tag_embedded), dim=2)
packed = pack_padded_sequence(embedded,
src_len,
batch_first=True,
enforce_sorted=False)
outputs, states = self.lstm(packed) # states : tuple of [4, b, d]
outputs, _ = pad_packed_sequence(outputs,
batch_first=True,
total_length=total_length) # [b, t, d]
h, c = states
# self attention
mask = torch.sign(src_seq)
memories = self.linear_trans(outputs)
outputs = self.gated_self_attn(outputs, memories, mask)
_, b, d = h.size()
h = h.view(2, 2, b, d) # [n_layers, bi, b, d]
h = torch.cat((h[:, 0, :, :], h[:, 1, :, :]), dim=-1)
c = c.view(2, 2, b, d)
c = torch.cat((c[:, 0, :, :], c[:, 1, :, :]), dim=-1)
concat_states = (h, c)
return outputs, concat_states
class Decoder(nn.Module):
def __init__(self, embeddings, vocab_size,
embedding_size, hidden_size, num_layers, dropout):
super(Decoder, self).__init__()
self.vocab_size = vocab_size
self.embedding = nn.Embedding(vocab_size, embedding_size)
if embeddings is not None:
self.embedding = nn.Embedding(vocab_size, embedding_size). \
from_pretrained(embeddings, freeze=config.freeze_embedding)
if num_layers == 1:
dropout = 0.0
self.encoder_trans = nn.Linear(hidden_size, hidden_size)
self.reduce_layer = nn.Linear(
embedding_size + hidden_size, embedding_size)
self.lstm = nn.LSTM(embedding_size, hidden_size, batch_first=True,
num_layers=num_layers, bidirectional=False, dropout=dropout)
self.concat_layer = nn.Linear(2 * hidden_size, hidden_size)
self.logit_layer = nn.Linear(hidden_size, vocab_size)
@staticmethod
def attention(query, memories, mask):
# query : [b, 1, d]
energy = torch.matmul(query, memories.transpose(1, 2)) # [b, 1, t]
energy = energy.squeeze(1).masked_fill(mask == 0, value=-1e12)
attn_dist = F.softmax(energy, dim=1).unsqueeze(dim=1) # [b, 1, t]
context_vector = torch.matmul(attn_dist, memories) # [b, 1, d]
return context_vector, energy
def get_encoder_features(self, encoder_outputs):
return self.encoder_trans(encoder_outputs)
def forward(self, trg_seq, ext_src_seq, init_states, encoder_outputs, encoder_mask):
# trg_seq : [b,t]
# init_states : [2,b,d]
# encoder_outputs : [b,t,d]
# init_states : a tuple of [2, b, d]
device = trg_seq.device
batch_size, max_len = trg_seq.size()
hidden_size = encoder_outputs.size(-1)
memories = self.get_encoder_features(encoder_outputs)
logits = []
# init decoder hidden states and context vector
prev_states = init_states
prev_context = torch.zeros((batch_size, 1, hidden_size))
prev_context = prev_context.to(device)
for i in range(max_len):
y_i = trg_seq[:, i].unsqueeze(1) # [b, 1]
embedded = self.embedding(y_i) # [b, 1, d]
lstm_inputs = self.reduce_layer(
torch.cat([embedded, prev_context], 2))
output, states = self.lstm(lstm_inputs, prev_states)
# encoder-decoder attention
context, energy = self.attention(output, memories, encoder_mask)
concat_input = torch.cat((output, context), dim=2).squeeze(dim=1)
logit_input = torch.tanh(self.concat_layer(concat_input))
logit = self.logit_layer(logit_input) # [b, |V|]
# maxout pointer network
if config.use_pointer:
num_oov = max(torch.max(ext_src_seq - self.vocab_size + 1), 0)
zeros = torch.zeros((batch_size, num_oov),
device=config.device)
extended_logit = torch.cat([logit, zeros], dim=1)
out = torch.zeros_like(extended_logit) - INF
out, _ = scatter_max(energy, ext_src_seq, out=out)
out = out.masked_fill(out == -INF, 0)
logit = extended_logit + out
logit = logit.masked_fill(logit == 0, -INF)
logits.append(logit)
# update prev state and context
prev_states = states
prev_context = context
logits = torch.stack(logits, dim=1) # [b, t, |V|]
return logits
def decode(self, y, ext_x, prev_states, prev_context, encoder_features, encoder_mask):
# forward one step lstm
# y : [b]
embedded = self.embedding(y.unsqueeze(1))
lstm_inputs = self.reduce_layer(torch.cat([embedded, prev_context], 2))
output, states = self.lstm(lstm_inputs, prev_states)
context, energy = self.attention(output,
encoder_features,
encoder_mask)
concat_input = torch.cat((output, context), 2).squeeze(1)
logit_input = torch.tanh(self.concat_layer(concat_input))
logit = self.logit_layer(logit_input) # [b, |V|]
if config.use_pointer:
batch_size = y.size(0)
num_oov = max(torch.max(ext_x - self.vocab_size + 1), 0)
zeros = torch.zeros((batch_size, num_oov), device=config.device)
extended_logit = torch.cat([logit, zeros], dim=1)
out = torch.zeros_like(extended_logit) - INF
out, _ = scatter_max(energy, ext_x, out=out)
out = out.masked_fill(out == -INF, 0)
logit = extended_logit + out
logit = logit.masked_fill(logit == -INF, 0)
# forcing UNK prob 0
logit[:, UNK_ID] = -INF
return logit, states, context
class Seq2seq(nn.Module):
def __init__(self, embedding=None):
super(Seq2seq, self).__init__()
self.encoder = Encoder(embedding,
config.vocab_size,
config.embedding_size,
config.hidden_size,
config.num_layers,
config.dropout)
self.decoder = Decoder(embedding, config.vocab_size,
config.embedding_size,
2 * config.hidden_size,
config.num_layers,
config.dropout)
def forward(self, src_seq, tag_seq, ext_src_seq, trg_seq):
enc_mask = torch.sign(src_seq)
src_len = torch.sum(enc_mask, 1)
enc_outputs, enc_states = self.encoder(src_seq, src_len, tag_seq)
sos_trg = trg_seq[:, :-1].contiguous()
logits = self.decoder(sos_trg, ext_src_seq,
enc_states, enc_outputs, enc_mask)
return logits
|
rezakrimi/MaxoutTextSummarization | data/dm_process.py | <reponame>rezakrimi/MaxoutTextSummarization<filename>data/dm_process.py
import sys
import pickle
from collections import defaultdict
from random import shuffle
sys.path.insert(0, '../')
import config
from data_utils import (make_conll_format2, make_embedding, make_vocab,
make_vocab_from_dm, process_file)
def make_sent_dataset():
train_src_file = "./para-train.txt"
train_trg_file = "./tgt-train.txt"
embedding_file = "./glove.840B.300d.txt"
embedding = "./embedding.pkl"
word2idx_file = "./word2idx.pkl"
# make vocab file
word2idx = make_vocab(train_src_file, train_trg_file, word2idx_file, config.vocab_size)
make_embedding(embedding_file, embedding, word2idx)
def make_para_dataset():
embedding_file = "./glove.840B.300d.txt"
embedding = "./embedding.pkl"
src_word2idx_file = "./word2idx.pkl"
train_squad = "../squad/train-v1.1.json"
dev_squad = "../squad/dev-v1.1.json"
train_src_file = "../squad/para-train.txt"
train_trg_file = "../squad/tgt-train.txt"
dev_src_file = "../squad/para-dev.txt"
dev_trg_file = "../squad/tgt-dev.txt"
test_src_file = "../squad/para-test.txt"
test_trg_file = "../squad/tgt-test.txt"
# pre-process training data
# train_examples have passage question pairs, counter is the word frequency across all passages
# question and passages are represented as a list of tokens
with open('../cnn-dailymail/cnn_examples.pkl', 'rb') as f:
cnn = pickle.load(f)
print('loaded cnn')
# with open('../cnn-dailymail/dm_examples.pkl','rb') as f:
# dm = pickle.load(f)
# print('loaded dailymail')
counter = defaultdict(int)
examples = cnn
shuffle(examples)
train_size = int(len(examples) * 0.92)
train_examples = examples[:train_size]
print(len(train_examples))
dev_test_examples = examples[train_size:]
print(len(train_examples), len(dev_test_examples))
for e in train_examples:
for token in e['context_tokens']:
counter[token] += 1
make_conll_format2(train_examples, train_src_file, train_trg_file)
# make a dict mapping word to unique index
word2idx = make_vocab_from_dm(src_word2idx_file, counter, config.vocab_size)
# makes a dict mapping words from all passages to embedding vectors
make_embedding(embedding_file, embedding, word2idx)
# split dev into dev and test
# random.shuffle(dev_test_examples)
num_dev = len(dev_test_examples) // 2
dev_examples = dev_test_examples[:num_dev]
test_examples = dev_test_examples[num_dev:]
make_conll_format2(dev_examples, dev_src_file, dev_trg_file)
make_conll_format2(test_examples, test_src_file, test_trg_file)
if __name__ == "__main__":
# make_sent_dataset()
make_para_dataset()
|
kaistshadow/shadow-plugin-bitcoin | tmp/tx-generator/generate_tx.py | <reponame>kaistshadow/shadow-plugin-bitcoin
#!/usr/bin/python
import ecdsa
import ecdsa.der
import ecdsa.util
import hashlib
import random
import utils
import keyUtils
import struct
import socket
import time
import binascii, hmac
from bitcoin import *
#### pybitcointools code start
P = 2**256-2**32-2**9-2**8-2**7-2**6-2**4-1
N = 115792089237316195423570985008687907852837564279074904382605163141518161494337
A = 0
Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240
Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424
G = (Gx,Gy)
def isinf(p): return p[0] == 0 and p[1] == 0
def inv(a,n):
lm, hm = 1,0
low, high = a%n,n
while low > 1:
r = high/low
nm, new = hm-lm*r, high-low*r
lm, low, hm, high = nm, new, lm, low
return lm % n
def get_code_string(base):
if base == 2: return '01'
elif base == 10: return '0123456789'
elif base == 16: return "0123456789abcdef"
elif base == 58: return "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
elif base == 256: return ''.join([chr(x) for x in range(256)])
else: raise ValueError("Invalid base!")
def decode(string,base):
base = int(base)
code_string = get_code_string(base)
result = 0
if base == 16: string = string.lower()
while len(string) > 0:
result *= base
result += code_string.find(string[0])
string = string[1:]
return result
def encode(val,base,minlen=0):
base, minlen = int(base), int(minlen)
code_string = get_code_string(base)
result = ""
while val > 0:
result = code_string[val % base] + result
val /= base
if len(result) < minlen:
result = code_string[0]*(minlen-len(result))+result
return result
def changebase(string,frm,to,minlen=0):
return encode(decode(string,frm),to,minlen)
def hash_to_int(x):
if len(x) in [40,64]: return decode(x,16)
else: return decode(x,256)
# https://tools.ietf.org/html/rfc6979#section-3.2
def deterministic_generate_k(msghash,priv):
v = '\x01' * 32
k = '\x00' * 32
priv = encode_privkey(priv,'bin')
msghash = encode(hash_to_int(msghash),256,32)
k = hmac.new(k, v+'\x00'+priv+msghash, hashlib.sha256).digest()
v = hmac.new(k, v, hashlib.sha256).digest()
k = hmac.new(k, v+'\x01'+priv+msghash, hashlib.sha256).digest()
v = hmac.new(k, v, hashlib.sha256).digest()
return decode(hmac.new(k, v, hashlib.sha256).digest(),256)
def base10_add(a,b):
if isinf(a): return b[0],b[1]
if isinf(b): return a[0],a[1]
if a[0] == b[0]:
if a[1] == b[1]:
return base10_double(a[0],a[1])
else:
return (0,0)
m = ((b[1]-a[1]) * inv(b[0]-a[0],P)) % P
x = (m*m-a[0]-b[0]) % P
y = (m*(a[0]-x)-a[1]) % P
return (x,y)
def base10_double(a):
if isinf(a): return (0,0)
m = ((3*a[0]*a[0]+A)*inv(2*a[1],P)) % P
x = (m*m-2*a[0]) % P
y = (m*(a[0]-x)-a[1]) % P
return (x,y)
def base10_multiply(a,n):
if isinf(a) or n == 0: return (0,0)
if n == 1: return a
if n < 0 or n >= N: return base10_multiply(a,n%N)
if (n%2) == 0: return base10_double(base10_multiply(a,n/2))
if (n%2) == 1: return base10_add(base10_double(base10_multiply(a,n/2)),a)
def bin_to_b58check(inp,magicbyte=0):
inp_fmtd = chr(int(magicbyte)) + inp
leadingzbytes = len(re.match('^\x00*',inp_fmtd).group(0))
checksum = bin_dbl_sha256(inp_fmtd)[:4]
return '1' * leadingzbytes + changebase(inp_fmtd+checksum,256,58)
def bin_dbl_sha256(string):
return hashlib.sha256(hashlib.sha256(string).digest()).digest()
def get_privkey_format(priv):
if isinstance(priv,(int,long)): return 'decimal'
elif len(priv) == 32: return 'bin'
elif len(priv) == 33: return 'bin_compressed'
elif len(priv) == 64: return 'hex'
elif len(priv) == 66: return 'hex_compressed'
else:
bin_p = b58check_to_bin(priv)
if len(bin_p) == 32: return 'wif'
elif len(bin_p) == 33: return 'wif_compressed'
else: raise Exception("WIF does not represent privkey")
def encode_privkey(priv,formt):
if not isinstance(priv,(int,long)):
return encode_privkey(decode_privkey(priv),formt)
if formt == 'decimal': return priv
elif formt == 'bin': return encode(priv,256,32)
elif formt == 'bin_compressed': return encode(priv,256,32)+'\x01'
elif formt == 'hex': return encode(priv,16,64)
elif formt == 'hex_compressed': return encode(priv,16,64)+'01'
elif formt == 'wif': return bin_to_b58check(encode(priv,256,32),128)
elif formt == 'wif_compressed': return bin_to_b58check(encode(priv,256,32)+'\x01',128)
else: raise Exception("Invalid format!")
def decode_privkey(priv,formt=None):
if not formt: formt = get_privkey_format(priv)
if formt == 'decimal': return priv
elif formt == 'bin': return decode(priv,256)
elif formt == 'bin_compressed': return decode(priv[:32],256)
elif formt == 'hex': return decode(priv,16)
elif formt == 'hex_compressed': return decode(priv[:64],16)
else:
bin_p = b58check_to_bin(priv)
if len(bin_p) == 32: return decode(bin_p,256)
elif len(bin_p) == 33: return decode(bin_p[:32],256)
else: raise Exception("WIF does not represent privkey")
def ecdsa_raw_sign(msghash,priv):
z = hash_to_int(msghash)
k = deterministic_generate_k(msghash,priv)
r,y = base10_multiply(G,k)
s = inv(k,N) * (z + r*decode_privkey(priv)) % N
return 27+(y%2),r,s
def der_encode_sig(*args):
"""Takes ([vbyte], r, s) as ints and returns hex der encode sig"""
if len(args) == 3:
v,r,s = args
elif len(args) == 2:
r,s = args
elif len(args) == 1 and isinstance(args[0], tuple):
return der_encode_sig(*args[0])
b1, b2 = encode(r, 256), encode(s, 256)
if len(b1) and changebase(b1[0], 256, 16, 1) in "89abcdef":# add null bytes if interpreted as negative number
b1 = b'\x00' + b1
if len(b2) and ord(b2[0]) & 0x80:
b2 = b'\x00' + b2
left = b'\x02' + encode(len(b1), 256, 1) + b1
right = b'\x02' + encode(len(b2), 256, 1) + b2
sighex = binascii.hexlify(b'\x30' + encode(len(left+right), 256, 1) + left + right)
#assert is_bip66(sighex)
return sighex
def ecdsa_tx_sign(txhash, priv):
"""Returns DER sig for rawtx w/ hashcode appended"""
rawsig = ecdsa_raw_sign(txhash, priv)
return der_encode_sig(*rawsig)
def pybitcointools_sig(txhash, sk):
return ecdsa_tx_sign(txhash, sk)
def is_bip66(sig):
"""Checks hex DER sig for BIP66 compliance"""
#https://raw.githubusercontent.com/bitcoin/bips/master/bip-0066.mediawiki
#0x30 [total-len] 0x02 [R-len] [R] 0x02 [S-len] [S] [sighash]
# sig = bytearray.fromhex(sig) if (isinstance(sig, string_types) and
# RE_HEX_CHARS.match(sig)) else bytearray(sig)
sig = bytearray.fromhex(sig)
if sig[1] == len(sig)-2:
sig.extend(b"\1")# add SIGHASH for BIP66 check
if len(sig) < 9 or len(sig) > 73: return False
if (sig[0] != 0x30): return False
if (sig[1] != len(sig)-3): return False
rlen = sig[3]
if (5+rlen >= len(sig)): return False
slen = sig[5+rlen]
if (rlen + slen + 7 != len(sig)): return False
if (sig[2] != 0x02): return False
if (rlen == 0): return False
if (sig[4] & 0x80): return False
if (rlen > 1 and (sig[4] == 0) and not (sig[5] & 0x80)): return False
if (sig[4+rlen] != 0x02): return False
if (slen == 0): return False
if (sig[rlen+6] & 0x80): return False
if (slen > 1 and (sig[6+rlen] == 0) and not (sig[7+rlen] & 0x80)): return False
return True
#### pybitcointools code end
#### petertodd/python-bitcoinlib code start
def IsLowDERSignature(sig):
"""
Loosely correlates with IsLowDERSignature() from script/interpreter.cpp
Verifies that the S value in a DER signature is the lowest possible value.
Used by BIP62 malleability fixes.
"""
length_r = sig[3]
if isinstance(length_r, str):
length_r = int(struct.unpack('B', length_r)[0])
length_s = sig[5 + length_r]
if isinstance(length_s, str):
length_s = int(struct.unpack('B', length_s)[0])
s_val = list(struct.unpack(str(length_s) + 'B', sig[6 + length_r:6 + length_r + length_s]))
# If the S value is above the order of the curve divided by two, its
# complement modulo the order could have been used instead, which is
# one byte shorter when encoded correctly.
max_mod_half_order = [
0x7f,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0x5d,0x57,0x6e,0x73,0x57,0xa4,0x50,0x1d,
0xdf,0xe9,0x2f,0x46,0x68,0x1b,0x20,0xa0]
return CompareBigEndian(s_val, [0]) > 0 and \
CompareBigEndian(s_val, max_mod_half_order) <= 0
def CompareBigEndian(c1, c2):
"""
Loosely matches CompareBigEndian() from eccryptoverify.cpp
Compares two arrays of bytes, and returns a negative value if the first is
less than the second, 0 if they're equal, and a positive value if the
first is greater than the second.
"""
c1 = list(c1)
c2 = list(c2)
# Adjust starting positions until remaining lengths of the two arrays match
while len(c1) > len(c2):
if c1.pop(0) > 0:
return 1
while len(c2) > len(c1):
if c2.pop(0) > 0:
return -1
while len(c1) > 0:
diff = c1.pop(0) - c2.pop(0)
if diff != 0:
return diff
return 0
#### petertodd/python-bitcoinlib code end
# Returns [first, sig, pub, rest]
def parseTxn(txn):
first = txn[0:41*2]
scriptLen = int(txn[41*2:42*2], 16)
script = txn[42*2:42*2+2*scriptLen]
sigLen = int(script[0:2], 16)
sig = script[2:2+sigLen*2]
pubLen = int(script[2+sigLen*2:2+sigLen*2+2], 16)
pub = script[2+sigLen*2+2:]
assert(len(pub) == pubLen*2)
rest = txn[42*2+2*scriptLen:]
# first = txn[0:43*2]
# scriptLen = int(txn[43*2:44*2], 16)
# script = txn[44*2:44*2+2*scriptLen]
# sigLen = int(script[0:2], 16)
# sig = script[2:2+sigLen*2]
# pubLen = int(script[2+sigLen*2:2+sigLen*2+2], 16)
# pub = script[2+sigLen*2+2:]
# assert(len(pub) == pubLen*2)
# rest = txn[44*2+2*scriptLen:]
return [first, sig, pub, rest]
# Substitutes the scriptPubKey into the transaction, appends SIGN_ALL to make the version
# of the transaction that can be signed
def getSignableTxn(parsed):
first, sig, pub, rest = parsed
inputAddr = utils.base58CheckDecode(keyUtils.pubKeyToAddr(pub))
return first + "1976a914" + inputAddr.encode('hex') + "88ac" + rest + "01000000"
###############
# Makes a transaction from the inputs
# outputs is a list of [redemptionSatoshis, outputScript]
def makeRawTransaction(outputTransactionHash, sourceIndex, scriptSig, outputs):
def makeOutput(data):
redemptionSatoshis, outputScript = data
return (struct.pack("<Q", redemptionSatoshis).encode('hex') +
'%02x' % len(outputScript.decode('hex')) + outputScript)
formattedOutputs = ''.join(map(makeOutput, outputs))
return (
# "01000000" + # 4 bytes version
"02000000" + # 4 bytes version
# "0001" + # segwit marker, flag
"01" + # varint for number of inputs
outputTransactionHash.decode('hex')[::-1].encode('hex') + # reverse outputTransactionHash
struct.pack('<I', sourceIndex).encode('hex') +
'%02x' % len(scriptSig.decode('hex')) + scriptSig +
"ffffffff" + # sequence
"%02x" % len(outputs) + # number of outputs
formattedOutputs +
# "0100" + # segwit witness
"00000000" # lockTime
)
def makeSignedTransaction(privateKey, outputTransactionHash, sourceIndex, scriptPubKey, outputs):
myTxn_forSig = (makeRawTransaction(outputTransactionHash, sourceIndex, scriptPubKey, outputs)
+ "01000000") # hash code
print "priv", privateKey
print "tx", myTxn_forSig
# signing with ecdsa module
s256 = hashlib.sha256(hashlib.sha256(myTxn_forSig.decode('hex')).digest()).digest()
sk = ecdsa.SigningKey.from_string(privateKey.decode('hex'), curve=ecdsa.SECP256k1)
while True:
sig = sk.sign_digest(s256, sigencode=ecdsa.util.sigencode_der)
print is_bip66(binascii.hexlify(sig))
if IsLowDERSignature(bytearray.fromhex(binascii.hexlify(sig))):
break
sig = sig + '\01'
# sig = pybitcointools_sig(s256, privateKey) + '01'
# sig = sig.decode('hex')
print "sig",len(sig),[binascii.hexlify(sig)]
pubKey = keyUtils.privateKeyToPublicKey(privateKey, True)
scriptSig = utils.varstr(sig).encode('hex') + utils.varstr(pubKey.decode('hex')).encode('hex')
signed_txn = makeRawTransaction(outputTransactionHash, sourceIndex, scriptSig, outputs)
print "signed_txn", signed_txn
verifyTxnSignature(signed_txn)
return signed_txn
def verifyTxnSignature(txn):
parsed = parseTxn(txn)
signableTxn = getSignableTxn(parsed)
hashToSign = hashlib.sha256(hashlib.sha256(signableTxn.decode('hex')).digest()).digest().encode('hex')
assert(parsed[1][-2:] == '01') # hashtype
sig = keyUtils.derSigToHexSig(parsed[1][:-2])
public_key = parsed[2]
print "public_key:", public_key
public_key = keyUtils.getFullPubKeyFromCompressed(public_key)
print "uncompressed public_key:", public_key
vk = ecdsa.VerifyingKey.from_string(public_key[2:].decode('hex'), curve=ecdsa.SECP256k1)
assert(vk.verify_digest(sig.decode('hex'), hashToSign.decode('hex')))
# Warning: this random function is not cryptographically strong and is just for example
private_key = ''.join(['%x' % random.randrange(16) for x in range(0, 64)])
print keyUtils.privateKeyToWif(private_key)
print keyUtils.keyToAddr(private_key)
# privateKey = keyUtils.wifToPrivateKey("<KEY>") #1MMMM
# signed_txn = makeSignedTransaction(privateKey,
# "<KEY>", # output (prev) transaction hash
# 0, # sourceIndex
# keyUtils.addrHashToScriptPubKey("<KEY>"),
# [[91234, #satoshis
# keyUtils.addrHashToScriptPubKey("<KEY>")]]
# )
# <KEY>
# <KEY>
# cU1UcuA7HRMNoJMamquHLpGMnkDQtwhSt8d3Z2UpGFdx1E4osE7D
# <KEY>fXU8nkonyWDuY8B9vq3LiqQ6vCKH7VpLib4f7hty42
# privateKey = keyUtils.wifToPrivateKey("<KEY>") #1MMMM
# signed_txn = makeSignedTransaction(privateKey,
# "d70cd6ad2c0a8211b2e22954b3e450a6eaffc4b22d4c35fd760805aae08269a3", # output (prev) transaction hash
# 0, # sourceIndex
# keyUtils.addrHashToScriptPubKey("<KEY>"),
# [[100, #satoshis
# keyUtils.addrHashToScriptPubKey("<KEY>")]]
# )
# privateKey = keyUtils.wifToPrivateKey("<KEY>") #1MMMM
# privateKey = keyUtils.wifToPrivateKey("<KEY>") #1MMMM
privateKey = keyUtils.wifToPrivateKey("<KEY>") #1MMMM
print "privateKey", [privateKey]
# signed_txn = makeSignedTransaction(privateKey,
# "c0e9e5a845cf222cc78220685fcc295db73608c64bde5d32689ef85e27c72fb5", # output (prev) transaction hash
# 0, # sourceIndex
# keyUtils.addrHashToScriptPubKey("<KEY>"),
# [[30000000, #satoshis
# keyUtils.addrHashToScriptPubKey("<KEY>")],
# [550000000,
# keyUtils.addrHashToScriptPubKey("<KEY>")]]
# )
signed_txn = makeSignedTransaction(privateKey,
"73cb8c38c3f0ea7a022bc3d93a78d2b45fc2ed0b44664c17c0225c59f06f490d", # output (prev) transaction hash
0, # sourceIndex
keyUtils.addrHashToScriptPubKey("<KEY>"),
[[149000000, #satoshis
keyUtils.addrHashToScriptPubKey("<KEY>")],
[100000000,
keyUtils.addrHashToScriptPubKey("<KEY>")]]
)
print signed_txn
verifyTxnSignature(signed_txn)
print 'SIGNED TXN', signed_txn
########## Send to peer
# magic = 0xd9b4bef9 # mainnet
magic = 0xdab5bffa # testnet
def makeMessage(magic, command, payload):
# print binascii.hexlify(hashlib.sha256(payload).digest()[0:4])
checksum = hashlib.sha256(hashlib.sha256(payload).digest()).digest()[0:4]
# print [checksum, payload]
return struct.pack('I12sI4s', magic, command, len(payload), checksum) + payload
# return struct.pack('I', magic) + "version" + 5 *"\00" + struct.pack('I', len(payload)) + checksum + payload + struct.pack('?', True)
def getTxMsg(payload):
return makeMessage(magic, 'tx', payload)
def getVersionMsg():
# version = 60002
version = 70015
services = 0
timestamp = int(time.time())
addr_me = utils.netaddr(socket.inet_aton("127.0.0.1"), 18444)
addr_you = utils.netaddr(socket.inet_aton("127.0.0.1"), 18444)
nonce = random.getrandbits(64)
# sub_version_num = utils.varstr('')
user_agent_bytes = 0
start_height = 0
relay = True
# print ["addr_me", addr_me]
payload = struct.pack('<iQQ26s26sQBi?', version, services, timestamp, addr_me,
addr_you, nonce, user_agent_bytes, start_height, relay)
return makeMessage(magic, 'version', payload)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print sock.connect(("172.16.17.32", 18444))
print "connect?"
versionMsg = getVersionMsg()
print [versionMsg]
sock.send(versionMsg)
print [sock.recv(131072)] # receive version
verackMsg = makeMessage(magic, 'verack', "")
sock.send(verackMsg)
print [sock.recv(131072)] # receive version, verack
sock.send(getTxMsg(signed_txn.decode('hex')))
i = 0
try:
while True:
sock.settimeout(5)
print [i, sock.recv(131072)]
i += 1
except socket.timeout:
print "sock timeout"
generateMsg = makeMessage(magic, 'generate', "")
sock.send(generateMsg)
i = 0
try:
while True:
sock.settimeout(5)
print [i, sock.recv(131072)]
i += 1
except socket.timeout:
print "sock timeout"
# print ["aa", sock.recv(131072)] # receive
# print ["bb", sock.recv(131072)] # receive
# print ["cc", sock.recv(131072)] # receive
# print ["dd", sock.recv(131072)] # receive
# print ["ee", sock.recv(131072)] # receive
# print ["ff", sock.recv(131072)] # receive
# sock.recv(131072)
sock.close()
|
kaistshadow/shadow-plugin-bitcoin | tmp/tx-generator/utils.py | <reponame>kaistshadow/shadow-plugin-bitcoin<filename>tmp/tx-generator/utils.py
import hashlib
import struct
b58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
def base58encode(n):
result = ''
while n > 0:
result = b58[n%58] + result
n /= 58
return result
def base58decode(s):
result = 0
for i in range(0, len(s)):
result = result * 58 + b58.index(s[i])
return result
def base256encode(n):
result = ''
while n > 0:
result = chr(n % 256) + result
n /= 256
return result
def base256decode(s):
result = 0
for c in s:
result = result * 256 + ord(c)
return result
def countLeadingChars(s, ch):
count = 0
for c in s:
if c == ch:
count += 1
else:
break
return count
# https://en.bitcoin.it/wiki/Base58Check_encoding
def base58CheckEncode(version, payload):
s = chr(version) + payload
checksum = hashlib.sha256(hashlib.sha256(s).digest()).digest()[0:4]
result = s + checksum
leadingZeros = countLeadingChars(result, '\0')
return '1' * leadingZeros + base58encode(base256decode(result))
def base58CheckDecode(s, pk_for_compressed = False):
leadingOnes = countLeadingChars(s, '1')
s = base256encode(base58decode(s))
result = '\0' * leadingOnes + s[:-4]
chk = s[-4:]
checksum = hashlib.sha256(hashlib.sha256(result).digest()).digest()[0:4]
assert(chk == checksum)
version = result[0]
# print result.encode("hex")
# print result[1:-1].encode("hex")
if pk_for_compressed:
return result[1:-1]
else:
return result[1:]
# Returns byte string value, not hex string
def varint(n):
if n < 0xfd:
return struct.pack('<B', n)
elif n < 0xffff:
return struct.pack('<cH', '\xfd', n)
elif n < 0xffffffff:
return struct.pack('<cL', '\xfe', n)
else:
return struct.pack('<cQ', '\xff', n)
# Takes and returns byte string value, not hex string
def varstr(s):
return varint(len(s)) + s
# 60002
def netaddr(ipaddr, port):
services = 0
return (struct.pack('<Q12s', services, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff') +
struct.pack('>4sH', ipaddr, port))
|
kaistshadow/shadow-plugin-bitcoin | tmp/tx-generator/keyUtils.py | <filename>tmp/tx-generator/keyUtils.py<gh_stars>1-10
# https://pypi.python.org/pypi/ecdsa/0.10
import ecdsa
import ecdsa.der
import ecdsa.util
import hashlib
import unittest
import random
import re
import struct
import utils
from binascii import hexlify
# https://en.bitcoin.it/wiki/Wallet_import_format
def privateKeyToWif(key_hex):
return utils.base58CheckEncode(0x80, key_hex.decode('hex'))
def privateKeyToPublicKey(s, compressed=False):
sk = ecdsa.SigningKey.from_string(s.decode('hex'), curve=ecdsa.SECP256k1)
vk = sk.verifying_key
# return ('\04' + sk.verifying_key.to_string()).encode('hex')
if compressed:
from ecdsa.util import number_to_string
order = vk.pubkey.order
# print "order", order
x_str = number_to_string(vk.pubkey.point.x(), order).encode('hex')
# print "x_str", x_str
sign = '02' if vk.pubkey.point.y() % 2 == 0 else '03'
# print "sign", sign
return (sign+x_str)
else:
return ('\04' + vk.to_string()).encode('hex')
# order = ecdsa.SigningKey.from_string(s.decode('hex'), curve=ecdsa.SECP256k1).curve.generator.order()
# p = ecdsa.SigningKey.from_string(s.decode('hex'), curve=ecdsa.SECP256k1).verifying_key.pubkey.point
# x_str = ecdsa.util.number_to_string(p.x(), order)
# y_str = ecdsa.util.number_to_string(p.y(), order)
# compressed = hexlify(bytes(chr(2 + (p.y() & 1))) + x_str).decode('ascii')
# uncompressed = hexlify(bytes(chr(4)) + x_str + y_str).decode('ascii')
def pow_mod(x, y, z):
"Calculate (x ** y) % z efficiently."
number = 1
while y:
if y & 1:
number = number * x % z
y >>= 1
x = x * x % z
return number
def getFullPubKeyFromCompressed(compressed_key):
y_parity = int(compressed_key[:2]) - 2
if y_parity == 4:
return compressed_key
p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
x = int(compressed_key[2:], 16)
a = (pow_mod(x, 3, p) + 7) % p
y = pow_mod(a, (p+1)//4, p)
if y % 2 != y_parity:
y = -y % p
uncompressed_key = '<KEY>, y)
return uncompressed_key
def pubKeyToAddr(s):
ripemd160 = hashlib.new('ripemd160')
ripemd160.update(hashlib.sha256(s.decode('hex')).digest())
return utils.base58CheckEncode(0, ripemd160.digest())
def keyToAddr(s):
return pubKeyToAddr(privateKeyToPublicKey(s, True))
def addrHashToScriptPubKey(b58str):
print len(b58str)
assert(len(b58str) == 34)
# 76 A9 14 (20 bytes) 88 AC
return '76a914' + utils.base58CheckDecode(b58str).encode('hex') + '88ac'
def wifToPrivateKey(s):
if s[0] == "K" or s[0] == "L" or s[0] == "c":
b = utils.base58CheckDecode(s, pk_for_compressed=True)
print "aa"
else:
b = utils.base58CheckDecode(s)
return b.encode('hex')
# Input is a hex-encoded, DER-encoded signature
# Output is a 64-byte hex-encoded signature
def derSigToHexSig(s):
s, junk = ecdsa.der.remove_sequence(s.decode('hex'))
if junk != '':
print 'JUNK', junk.encode('hex')
assert(junk == '')
x, s = ecdsa.der.remove_integer(s)
y, s = ecdsa.der.remove_integer(s)
return '%064x%064x' % (x, y)
|
nborggren/Aleph | Knots/KnotCalc.py | <reponame>nborggren/Aleph
from knots import *
from knot_analysis import Draw_Knot
from ROOT import TLine, TCanvas
globalvars = {} # We will store the calculator's variables here
def lookup(map, name):
for x,v in map:
if x==name: return v
if name not in globalvars.keys(): print 'Undefined:', name
return globalvars.get(name, 0)
from string import *
import re
from yappsrt import *
class CalculatorScanner(Scanner):
patterns = [
('"in"', re.compile('in')),
('"="', re.compile('=')),
('"let"', re.compile('let')),
('r"\\)"', re.compile('\\)')),
('"\\("', re.compile('\(')),
('"/"', re.compile('/')),
('"[*]"', re.compile('[*]')),
('"-"', re.compile('-')),
('"[+]"', re.compile('[+]')),
('"set"', re.compile('set')),
('[ \r\t\n]+', re.compile('[ \r\t\n]+')),
('END', re.compile('$')),
('NUM', re.compile('[0-9]+')),
('VAR', re.compile('[a-zA-Z_]+')),
]
def __init__(self, str):
Scanner.__init__(self,None,['[ \r\t\n]+'],str)
class Calculator(Parser):
def goal(self):
_token_ = self._peek('"set"', 'NUM', 'VAR', '"\\("', '"let"')
if _token_ != '"set"':
expr = self.expr([])
END = self._scan('END')
expr = tie(expr)
expr = scale(expr,.85)
knot = Draw_Knot(expr)
print '=', expr
return expr
else:# == '"set"'
self._scan('"set"')
VAR = self._scan('VAR')
expr = self.expr([])
END = self._scan('END')
globalvars[VAR] = expr
print VAR, '=', expr
return expr
def expr(self, V):
factor = self.factor(V)
n = factor
while self._peek('"[+]"', '"-"', 'END', 'r"\\)"', '"in"', '"[*]"', '"/"') in ['"[+]"', '"-"']:
_token_ = self._peek('"[+]"', '"-"')
if _token_ == '"[+]"':
self._scan('"[+]"')
factor = self.factor(V)
n = add(n,factor)
else:# == '"-"'
self._scan('"-"')
factor = self.factor(V)
n = n-factor
return n
def factor(self, V):
term = self.term(V)
v = term
while self._peek('"[*]"', '"/"', '"[+]"', '"-"', 'END', 'r"\\)"', '"in"') in ['"[*]"', '"/"']:
_token_ = self._peek('"[*]"', '"/"')
if _token_ == '"[*]"':
self._scan('"[*]"')
term = self.term(V)
v = multiply(v,term)
else:# == '"/"'
self._scan('"/"')
term = self.term(V)
v = v/term
return v
def term(self, V):
_token_ = self._peek('NUM', 'VAR', '"\\("', '"let"')
if _token_ == 'NUM':
NUM = self._scan('NUM')
if atoi(NUM)==1:
return tangle(0,0)
return sum([tangle(0,0) for i in range(atoi(NUM))])
elif _token_ == 'VAR':
VAR = self._scan('VAR')
return lookup(V, VAR)
elif _token_ == '"\\("':
self._scan('"\\("')
expr = self.expr(V)
self._scan('r"\\)"')
return expr
else:# == '"let"'
self._scan('"let"')
VAR = self._scan('VAR')
self._scan('"="')
expr = self.expr(V)
V = [(VAR, expr)] + V
self._scan('"in"')
expr = self.expr(V)
return expr
def parse(rule, text):
P = Calculator(CalculatorScanner(text))
return wrap_error_reporter(P, rule)
if __name__=='__main__':
print '?!'*21
print 'Welcome to the Knot Calculator'
print '<NAME>'
print 'SUNY Stony Brook, Physics'
print 'June 2009'
print '?!'*21
# We could have put this loop into the parser, by making the
# `goal' rule use (expr | set var expr)*, but by putting the
# loop into Python code, we can make it interactive (i.e., enter
# one expression, get the result, enter another expression, etc.)
while 1:
try: s = raw_input('>>> ')
except EOFError: break
if not strip(s): break
parse('goal', s)
print 'Bye.'
|
nborggren/Aleph | Knots/knots.py | #???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#
#knots.py
#<NAME> June 2009
#
#designs a simple geometrical realization of a knot topology so as to be embedded in a Cadence layout upon
#manipulation in accordance with Hypres design rules.
#
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????]
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
layers = ['m1', 'm2', 'm3']
#knots formed by products of tangles in conway notation, digits add to # of crossings.
#Here are the knots up to eight crossings that don't require more sophisticated notation
aknots = [3,22,5,32,42,312,2112,7,52,43,322,313,2212,21112,62,512,44,413,4112,332,3212,3113,31112,2312,2222,22112]
dknots = {}
#the basic tangle unit cell. Centered at x,y it occupies a 1 by 1 area
def tangle(x,y):
tangles = {0:[x,y]}
points = {0:[x+.5,y+.5],
1:[x-.5,y+.5],
2:[x-.5,y-.5],
3:[x+.5,y-.5]}
corners = {'ne':[x+.5,y+.5],
'nw':[x-.5,y+.5],
'sw':[x-.5,y-.5],
'se':[x+.5,y-.5]}
paths = {0:[points[1],points[3]],
1:[points[0],points[2]],
}
elevators = {0:0,1:1}
players = {0:0,1:1}
temp = {'points':points,
'tangles':tangles,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
return temp
#planar translations
def translate(knot,x,y):
nknot = {'paths':{},'points':{},'corners':{},
'elevators':{},
'tangles':{},
'players':{}}
for i in ['points','corners','tangles']:
q=0
for j in knot[i].keys():
#print j[0]+x,j[1]+y
nknot[i].update({j:[knot[i][j][0]+x,knot[i][j][1]+y]})
q=q+1
q = 0
for i in knot['paths'].values():
#print x, i[0][0],i[0][0]+x
nknot['paths'].update({q:[[i[0][0]+x,i[0][1]+y],[i[1][0]+x,i[1][1]+y]]})
q=q+1
for i in ['elevators','players']:
for j in knot[i].keys():
nknot[i].update({j:knot[i][j]})
return nknot
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#low budget reflections
#multiplication requires first a reflection of the first knot in the line y = -x (ymx)
def ymx(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][1],-x['points'][i][0]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][1],-x['paths'][i][0][0]],[-x['paths'][i][1][1],-x['paths'][i][1][0]]]})
corners = {'ne':[-x['corners']['sw'][1],-x['corners']['sw'][0]],
'nw':[-x['corners']['nw'][1],-x['corners']['nw'][0]],
'se':[-x['corners']['se'][1],-x['corners']['se'][0]],
'sw':[-x['corners']['ne'][1],-x['corners']['ne'][0]]}
elevators = {}
players = {}
tangles = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'tangles':tangles,
'corners':corners}
for i in ['elevators','players','tangles']:
for j in x[i].keys():
#print x[i][j], j,(1+x[i][j])%2, 'elevate'
#temp[i].update({j:(1+x[i][j])%2}) #confused
temp[i].update({j:x[i][j]}) #confused
#print 'in spin', temp
return temp
def ypx(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[x['points'][i][1],x['points'][i][0]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[x['paths'][i][0][1],x['paths'][i][0][0]],[x['paths'][i][1][1],x['paths'][i][1][0]]]})
corners = {'ne':[x['corners']['sw'][1],x['corners']['sw'][0]],
'nw':[x['corners']['nw'][1],x['corners']['nw'][0]],
'se':[x['corners']['se'][1],x['corners']['se'][0]],
'sw':[x['corners']['ne'][1],x['corners']['ne'][0]]}
elevators = {}
players = {}
tangles = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'tangles':tangles,
'corners':corners}
for i in ['elevators','players','tangles']:
for j in x[i].keys():
temp[i].update({j:x[i][j]})
#print 'in spin', temp
return temp
#reflection about y is zero x
def yi0x(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[x['points'][i][0],-x['points'][i][1]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[x['paths'][i][0][0],-x['paths'][i][0][1]],[x['paths'][i][1][0],-x['paths'][i][1][1]]]})
corners = {'ne':[x['corners']['sw'][0],x['corners']['sw'][1]],
'nw':[x['corners']['nw'][0],x['corners']['nw'][1]],
'se':[x['corners']['se'][0],x['corners']['se'][1]],
'sw':[x['corners']['ne'][0],x['corners']['ne'][1]]}
elevators = {}
players = {}
tangles = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'tangles':tangles,
'corners':corners}
for i in ['elevators','players','tangles']:
for j in x[i].keys():
temp[i].update({j:x[i][j]})
return temp
#reflection about y is infinity (omega) x
def yiwx(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][0],x['points'][i][1]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][0],x['paths'][i][0][1]],[-x['paths'][i][1][0],x['paths'][i][1][1]]]})
corners = {'ne':[-x['corners']['sw'][0],x['corners']['sw'][1]],
'nw':[-x['corners']['nw'][0],x['corners']['nw'][1]],
'se':[-x['corners']['se'][0],x['corners']['se'][1]],
'sw':[-x['corners']['ne'][0],x['corners']['ne'][1]]}
elevators = {}
players = {}
tangles = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'tangles':tangles,
'corners':corners}
for i in ['elevators','players','tangles']:
for j in x[i].keys():
temp[i].update({j:x[i][j]})
return temp
def piby2(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][1],x['points'][i][0]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][1],x['paths'][i][0][0]],[-x['paths'][i][1][1],x['paths'][i][1][0]]]})
corners = {'ne':[-x['corners']['sw'][1],x['corners']['sw'][0]],
'nw':[-x['corners']['nw'][1],x['corners']['nw'][0]],
'se':[-x['corners']['se'][1],x['corners']['se'][0]],
'sw':[-x['corners']['ne'][1],x['corners']['ne'][0]]}
elevators = {}
players = {}
tangles = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'tangles':tangles,
'corners':corners}
for i in ['elevators','players','tangles']:
for j in x[i].keys():
temp[i].update({j:x[i][j]})
return temp
#rotation by pi
def pi(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][0],-x['points'][i][1]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][0],-x['paths'][i][0][1]],[-x['paths'][i][1][0],-x['paths'][i][1][1]]]})
corners = {'ne':[-x['corners']['sw'][0],-x['corners']['sw'][1]],
'nw':[-x['corners']['nw'][0],-x['corners']['nw'][1]],
'se':[-x['corners']['se'][0],-x['corners']['se'][1]],
'sw':[-x['corners']['ne'][0],-x['corners']['ne'][1]]}
elevators = {}
players = {}
tangles = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'tangles':tangles,
'corners':corners}
for i in ['elevators','players','tangles']:
for j in x[i].keys():
temp[i].update({j:x[i][j]})
return temp
def pi3by2(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][1],x['points'][i][0]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][1],x['paths'][i][0][0]],[-x['paths'][i][1][1],x['paths'][i][1][0]]]})
corners = {'ne':[-x['corners']['sw'][1],x['corners']['sw'][0]],
'nw':[-x['corners']['nw'][1],x['corners']['nw'][0]],
'se':[-x['corners']['se'][1],x['corners']['se'][0]],
'sw':[-x['corners']['ne'][1],x['corners']['ne'][0]]}
elevators = {}
players = {}
tangles = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'tangles':tangles,
'corners':corners}
for i in ['elevators','players','tangles']:
for j in x[i].keys():
temp[i].update({j:x[i][j]})
return temp
# a certain sense of lack of completeness inspires the last:
def identity(x):
return x
#end D4 group
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#operations to construct knots
def add(x,y):
t1x = widthandheight(x)
t1y = widthandheight(y)
t2x = findcenter(x)
t2y = findcenter(y)
rofx2lofy = (t2y[0]-t1y[0]/2.) - (t2x[0]+t1x[0]/2.)
#tofx2bofy = (t2y[1]-t1y[1]/2.) - (t2x[1]+t1x[1]/2.)
if rofx2lofy < 0:
y = translate(y,-1.25*rofx2lofy,0)
#if tofx2bofy < 0:
# y = translate(y,0,-tofx2bofy)
npoints = len(x['points'].keys())+len(y['points'].keys())
#print npoints
points = {}
i = 0
for k in x['points'].values():
points.update({i:k})
i=i+1
for k in y['points'].values():
points.update({i:k})
i=i+1
tangles = {}
i = 0
for k in x['tangles'].values():
points.update({i:k})
i=i+1
for k in y['tangles'].values():
points.update({i:k})
i=i+1
npoints = len(x['paths'].keys())+len(y['paths'].keys())
#print npoints
paths = {}
elevators = {}
players = {}
i = 0
for k in x['paths'].keys():
paths.update({i:x['paths'][k]})
elevators.update({i:x['elevators'][k]})
players.update({i:x['players'][k]})
i=i+1
for k in y['paths'].keys():
paths.update({i:y['paths'][k]})
elevators.update({i:y['elevators'][k]})
players.update({i:y['players'][k]})
i=i+1
paths.update({i:[x['corners']['ne'],y['corners']['nw']]})
elevators.update({i:0})
players.update({i:0})
i=i+1
elevators.update({i:1})
players.update({i:1})
paths.update({i:[x['corners']['se'],y['corners']['sw']]})
corners = {'ne':y['corners']['ne'],
'nw':x['corners']['nw'],
'se':y['corners']['se'],
'sw':x['corners']['sw']}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'tangles':tangles,
'players':players,
'corners':corners}
return temp
#sum is to add an array of tangles left to right
def sum(tangs):
temp = add(tangs.pop(0),tangs.pop(0))
while len(tangs)>0:
temp = add(temp, tangs.pop(0))
return temp
def multiply(x,y):
c = ymx(x)
b = findcenter(c)
d = findcenter(y)
#print b
temp = add(c,translate(y,0,b[1]-d[1]))
return temp
def product(tangs):
temp = multiply(tangs.pop(0),tangs.pop(0))
while len(tangs)>0:
temp = multiply(temp, tangs.pop(0))
return temp
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#some circuit maintenance functions
def findcenter(x):
xs = []
ys = []
for i in x['points'].values():
#print i
xs.append(i[0])
ys.append(i[1])
xx = min(xs)+(max(xs)-min(xs))/2.
yy = min(ys)+(max(ys)-min(ys))/2.
return [xx,yy]
def move2origin(x):
zz = findcenter(x)
#print 'how far to origin', zz
return translate(x,-zz[0],-zz[1])
def widthandheight(x):
xs = []
ys = []
for i in x['paths'].values():
xs.append(i[0][0])
xs.append(i[1][0])
ys.append(i[0][1])
ys.append(i[1][1])
xx = max(xs)-min(xs)
yy = max(ys)-min(ys)
return [xx,yy]
def scale(x,aaa):
knot = move2origin(x)
b = widthandheight(knot)
points = {}
corners = {}
paths = {}
nknot = {'paths':{},'points':{},'corners':{},'elevators':{},'players':{},'tangles':{}}
for i in ['points','corners','tangles']:
q=0
for j in knot[i].keys():
#print j[0]/b[0],j[1]/b[1]
nknot[i].update({j:[knot[i][j][0]*aaa/b[0],aaa*knot[i][j][1]/b[1]]})
q=q+1
q=0
for i in knot['paths'].values():
nknot['paths'].update({q:[[aaa*i[0][0]/b[0],aaa*i[0][1]/b[1]],[aaa*i[1][0]/b[0],aaa*i[1][1]/b[1]]]})
q=q+1
for i in ['elevators','players']:
for j in x[i].keys():
nknot[i].update({j:x[i][j]})
nknot = translate(nknot,.5,.5)
return nknot
def tie(x):
b = findcenter(x)
c = widthandheight(x)
d = [b[0],b[1]+3*c[1]/4.]
f = [b[0],b[1]-3*c[1]/4.]
n = len(x['paths'])
x['paths'].update({n:[x['corners']['ne'],d]})
x['paths'].update({n+1:[d,x['corners']['nw']]})
x['paths'].update({n+2:[x['corners']['se'],f]})
x['paths'].update({n+3:[f,x['corners']['sw']]})
for i in ['elevators','players']:
x[i].update({n:0})
x[i].update({n+1:0})
x[i].update({n+2:1})
x[i].update({n+3:1})
return x
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
z = sum([tangle(2*i,0) for i in range(5)])
q = sum([tangle(10+2*i,0) for i in range(5)])
pp = sum([tangle(13+2*i,0) for i in range(3)])
zz = sum([tangle(20+2*i,0) for i in range(7)])
h = product([z,q,pp,zz])
h = tie(h)
h = scale(h,.9)
#f = open('5537.txt','w')
#f.write(str(h))
|
nborggren/Aleph | Knots/braids/Dissonance.py | <filename>Knots/braids/Dissonance.py
import os, sys
sys.path.append(os.path.join('/usr/local/bin/root/','lib'))
from Tkinter import *
from ROOT import *
class Dissonance:
def __init__(self, root):
self.myParent = root
self.top_frame = Frame(root)
self.top_frame.pack(side=RIGHT,fill=BOTH,expand=YES)
self.left_frame = Frame(self.top_frame, background="grey",
borderwidth=5, relief=RIDGE,
height=350,
width=650,
) ###
self.left_frame.pack(side=RIGHT,
fill=BOTH,
expand=YES,
) ###
self.micros = Frame(root)
self.micros.pack()
self.rght = Frame(root)
self.rght.pack()
self.bJos = Button(self.micros)
self.bRes = Button(self.micros)
photo = PhotoImage(file="./lib/micros/jos.GIF")
self.bJos = Label(self.micros, image=photo)
self.bJos.photo = photo
self.bJos["background"] = "blue"
self.bJos.bind("<Button-1>", self.bJosClick)
self.bJos.pack()
photo1 = PhotoImage(file="./lib/micros/res.GIF")
self.bRes = Label(self.micros, image=photo1)
self.bRes.photo = photo1
self.bRes["background"] = "blue"
self.bRes.bind("<Button-1>", self.bResClick)
self.bRes.pack()
photo2 = PhotoImage(file="./lib/micros/inductor.GIF")
self.bInd = Label(self.micros, image=photo2)
self.bInd.photo = photo2
self.bInd["background"] = "blue"
self.bInd.bind("<Button-1>", self.bIndClick)
self.bInd.pack()
photo3 = PhotoImage(file="./lib/micros/cap.GIF")
self.bCap = Label(self.micros, image=photo3)
self.bCap.photo = photo3
self.bCap["background"] = "blue"
self.bCap.bind("<Button-1>", self.bCapClick)
self.bCap.pack()
photo4 = PhotoImage(file="./lib/micros/source.GIF")
self.bSrc = Label(self.micros, image=photo4)
self.bSrc.photo = photo4
self.bSrc["background"] = "blue"
self.bSrc.bind("<Button-1>", self.bSrcClick)
self.bSrc.pack()
photo5 = PhotoImage(file="./lib/micros/sink.GIF")
self.bSnk = Label(self.micros, image=photo5)
self.bSnk.photo = photo5
self.bSnk["background"] = "blue"
self.bSnk.bind("<Button-1>", self.bSnkClick)
self.bSnk.pack()
photo6 = PhotoImage(file="./lib/micros/gnd.GIF")
self.bGnd = Label(self.micros, image=photo6)
self.bGnd.photo = photo6
self.bGnd["background"] = "blue"
self.bGnd.bind("<Button-1>", self.bGndClick)
self.bGnd.pack()
self.Img = Canvas(self.left_frame,width=1000,height=500)
self.Img.pack()
self.Img.create_line(0,100,470,0,fill="red",dash=(4,4))
def bJosClick(self, event):
if self.bJos["background"] in ["blue","red"]:
self.bJos["background"] = "yellow"
elif self.bJos["background"] == "yellow":
self.bJos["background"] = "green"
else:
self.bJos["background"] = "red"
def bResClick(self, event):
if self.bRes["background"] in ["blue","red"]:
self.bRes["background"] = "yellow"
elif self.bRes["background"] == "yellow":
self.bRes["background"] = "green"
else:
self.bRes["background"] = "red"
def bIndClick(self, event):
if self.bInd["background"] in ["blue","red"]:
self.bInd["background"] = "yellow"
elif self.bInd["background"] == "yellow":
self.bInd["background"] = "green"
else:
self.bInd["background"] = "red"
def bCapClick(self, event):
if self.bCap["background"] in ["blue","red"]:
self.bCap["background"] = "yellow"
elif self.bCap["background"] == "yellow":
self.bCap["background"] = "green"
else:
self.bCap["background"] = "red"
def bSrcClick(self, event):
if self.bSrc["background"] in ["blue","red"]:
self.bSrc["background"] = "yellow"
elif self.bSrc["background"] == "yellow":
self.bSrc["background"] = "green"
else:
self.bSrc["background"] = "red"
def bSnkClick(self, event):
if self.bSnk["background"] in ["blue","red"]:
self.bSnk["background"] = "yellow"
elif self.bSnk["background"] == "yellow":
self.bSnk["background"] = "green"
else:
self.bSnk["background"] = "red"
def bGndClick(self, event):
if self.bGnd["background"] in ["blue","red"]:
self.bGnd["background"] = "yellow"
elif self.bGnd["background"] == "yellow":
self.bGnd["background"] = "green"
else:
self.bGnd["background"] = "red"
class Jos:
IxRm = 26.0 #); Subgap voltage Ic*Rm in mV.
Jc = 4500.0 #); Critical current density in A/cm2.
RmRn = 20.0 #); Dimensionless ratio of subgap (Rm) and normal (Rn) resistances.
#I = Is(phi)+In(V)+Id(Vdot)+If(t)
# pass
#?????????????????????????????????????????????????????????????????????????????????????????????????????
#?Unit Conversion to Pscan
#?
#?
#?????????????????????????????????????????????????????????????????????????????????????????????????????
def Convert_to_Pscan(type,num):
if type == 'R':
return num/2.38 #One PSCAN resistance unit corresponds to 2.38 Ohms.
elif type == 'L':
return num/2.64 #One PSCAN unit for inductance corresponds to 2.64 pH.
elif type == 'I':
return num/0.125 #One PSCAN unit for current corresponds to 0.125 mA.
#elif type == 'V':
# return num/2.6 #For calculation of bias current it is supposed that biasVolt=2.6 mV
#query http://pavel.physics.sunysb.edu/RSFQ/Lib/units.html
else:
return 'enter R for resistance, L for inductance, I for Current.'
def Convert_away_Pscan(type,num):
if type == 'R':
return num*2.38 #One PSCAN resistance unit corresponds to 2.38 Ohms.
elif type == 'L':
return num*2.64 #One PSCAN unit for inductance corresponds to 2.64 pH.
elif type == 'I':
return num*0.125 #One PSCAN unit for current corresponds to 0.125 mA.
#query http://pavel.physics.sunysb.edu/RSFQ/Lib/units.html
else:
return 'enter R for resistance, L for inductance, I for Current.'
root = Tk()
dis = Dissonance(root)
root.mainloop()
|
nborggren/Aleph | AlephSB/Aleph.py | <filename>AlephSB/Aleph.py<gh_stars>0
import sys
if len(sys.argv)<2:
from lorenz import *
tmp=lorenz()
else:
print sys.argv[1]
exec("from "+sys.argv[1]+" import *")
exec("tmp = "+sys.argv[1]+"()")
tmp.Init()
tmp.Start()
zwom = input("sheat")
|
nborggren/Aleph | AlephSB/Calc.py | <reponame>nborggren/Aleph<gh_stars>0
from Tkinter import *
from ROOT import *
class Calc(Frame):
def Start(self, master=None):
Frame.__init__(self,master)
self.grid(sticky='nesw')
self.sOps = ['traj','ntraj','hist','lang','nlang','mesh','meshtraj','myFFT','Reset']
self.dOps = ['run','fpt']
self.Widgets(label=False)
##Widgets for Gui
def Widgets(self, label=False):
top=self.winfo_toplevel()
top.rowconfigure(0, weight=1)
top.columnconfigure(0, weight=1)
self.rowconfigure(0, weight=1)
self.columnconfigure(0, weight=1)
#?????????????????????????????????????????????????????????????????
#Algebra widgets
#?????????????????????????????????????????????????????????????????
#frame displaying velocity field
self.wForce = Frame(self)
self.wForce.grid(row=2,rowspan=4,column=1,columnspan=8,sticky='nesw')
self.wF = []
for i in range(self.dof):
print self.f[i]
if label==True:
self.wF.append(Label(self.wForce,text="f_"+str(i)+"= "+self.f[i]))
self.wF[-1].grid(row=2*i,rowspan=2,columnspan=8)
else:
self.wF.append(Label(self.wForce,text="f_"+str(i)))
self.wF[-1].grid(row=2*i,rowspan=2,columnspan=8)
#frame displaying parameters of velocity field
self.wPar = Frame(self)
self.wPar.grid(row=2, column=10, columnspan=8,rowspan=4,sticky='nesw')
self.sPar = [StringVar() for i in range(len(self.par))]
for i in range(len(self.par)):
tmp = self.par.keys()[i]
self.sPar[i].set(str(self.par[tmp]))
self.parInits = []
self.lInits = []
nn = len(self.par.values())
for i in range(nn):
self.parInits.append(Entry(self.wPar,textvariable=self.sPar[i]))
self.lInits.append(Label(self.wPar,text=self.par.keys()[i]))
self.parInits[-1].grid(row =i/5+1, column=6+2*(i%5)+1)
self.lInits[-1].grid(row =i/5+1, column=6+2*(i%5))
#frame displaying parameters for trajectories and langevin trajectories
self.tpar = {"t0":0.,
"t1":25000.,
"t2":5000,
"t3":1,
"t4":50}
self.wtPar = Frame(self)
self.wtPar.grid(row=25, column=10, columnspan=8,rowspan=4,sticky='nesw')
self.stPar = [StringVar() for i in range(len(self.tpar))]
for i in range(len(self.tpar)):
tmp = self.tpar.keys()[i]
self.stPar[i].set(str(self.tpar[tmp]))
self.tparInits = []
self.tlInits = []
for i in range(len(self.tpar.values())):
self.tparInits.append(Entry(self.wtPar,textvariable=self.stPar[i]))
self.tparInits[-1].grid(row = 1, column =6+2*(i%5)+1)
self.tlInits.append(Label(self.wtPar,text=self.tpar.keys()[i]))
self.tlInits[-1].grid(row = 1, column=6+2*(i%5))
#frame displaying parameters of diffusion tensor
self.wdPar = Frame(self)
self.wdPar.grid(row=15, column=10, columnspan=8,rowspan=2,sticky='nesw')
self.sdPar = [StringVar() for i in range(len(self.dpar))]
for i in range(len(self.dpar)):
tmp = self.dpar.keys()[i]
self.sdPar[i].set(str(self.dpar[tmp]))
self.dparInits = []
self.dlInits = []
for i in range(len(self.dpar.values())):
self.dparInits.append(Entry(self.wdPar,textvariable=self.sdPar[i]))
self.dparInits[-1].grid(row = 1, column =6+2*(i%5)+1)
self.dlInits.append(Label(self.wdPar,text=self.dpar.keys()[i]))
self.dlInits[-1].grid(row = 1, column=6+2*(i%5))
#frame displaying noise terms for langevin equations
self.lngPar = Frame(self)
self.lngPar.grid(row=17, column=10, columnspan=8,rowspan=2,sticky='nesw')
self.slngPar = [StringVar() for i in range(self.dof)]
self.slngInits = []
for i in range(self.dof):
self.slngPar[i].set(self.nexpr[i])
for i in range(self.dof):
self.slngInits.append(Entry(self.lngPar,textvariable=self.slngPar[i]))
self.slngInits[i].grid(row = 1, column =6+i)
##collect images used for buttons
self.phX = [PhotoImage(file="/home/nborggren/Aleph/lib/micros/x"+str(i)+".gif") for i in range(self.dof)]
self.wInits = Frame(self)
self.wInits.grid(row=8,rowspan=4,column=1,columnspan=2,sticky='nesw')
self.xzero = [StringVar() for i in range(self.dof)]
self.suzero = [StringVar() for i in range(self.mon)]
tmp = self.rzero()
utmp = self.Mons(tmp)
sx = ['x_'+str(i) for i in range(self.dof)]
for i in range(self.mon):
self.suzero[i].set(str(utmp[i])[:6])
for i in range(self.dof):
self.xzero[i].set(str(tmp[i]))
self.sxInits = [Entry(self.wInits,textvariable=self.xzero[i]) for i in range(self.dof)]
self.bxInits = [Button(self.wInits,image=self.phX[i]) for i in range(self.dof)]
##scale for initial conditions
for i in range(self.dof):
self.sxInits[i].grid(row=i,column=1)
exec("self.bxInits["+str(i)+"].bind(\"<Button-1>\", self.ev_x"+str(i)+')')
##buttons for form generators
self.bxInits[i].grid(row=i,column=2)
#soul operators
self.wOps = Frame(self)
self.wOps.grid(row=8, column=14, columnspan=3,rowspan=6,sticky='nesw')
self.aOps = []
for i in range(len(self.sOps)):
print i
exec("self.aOps.append(Button(self.wOps,text=\""+self.sOps[i]+"\",command=self.ev_"+self.sOps[i]+"))")
self.aOps[i].grid(row=i/3,column=i%3)
#soul operators
self.wdOps = Frame(self)
self.wdOps.grid(row=19, column=14, columnspan=5,rowspan=6,sticky='nesw')
self.adOps = []
for i in range(len(self.dOps)):
print i
exec("self.adOps.append(Button(self.wdOps,text=\""+self.dOps[i]+"\",command=self.ev_"+self.dOps[i]+"))")
self.adOps[i].grid(row=i/3,column=i%3)
lu0 = Label(self.wdOps,text='u0')
lu0.grid(row=2,column=0)
self.su0 = StringVar()
self.su0.set("1./pow(50*2.5066,2)*exp((-pow(x[0]-"+self.xzero[0].get()+",2)-pow(x[1]-"+self.xzero[1].get()+",2))/(2*pow(50,2)))")
wu0 = Entry(self.wdOps,textvariable=self.su0)
wu0.grid(row=2,column=1)
lname = Label(self.wdOps,text='name')
lname.grid(row=3,column=0)
self.soutput = StringVar()
self.soutput.set(self.output)
wname = Entry(self.wdOps,textvariable=self.soutput)
wname.grid(row=3,column=1)
zouts = ['lies','maybe','truth']
self.wExpr = Frame(self)
self.wExpr.grid(row=16, column=1, columnspan=8,rowspan=3,sticky='nesw')
self.var = [Label(self.wExpr,text=zouts[i]) for i in range(3)]
self.remarks = [StringVar() for i in range(3)]
self.aExpr = [Label(self.wExpr,textvariable=self.remarks[i]) for i in range(3)]
for i in range(3):
self.aExpr[i].grid(row=i,column=1)
self.var[i].grid(row=i,column=10)
self.phPar = [PhotoImage(file="/home/nborggren/Aleph/lib/micros/"+i+".gif") for i in ['sigma','rho','beta']]
self.wPar = Frame(self)
#self.wPar.grid(row=12,column=1,columnspan = 3)
self.bPar = [Button(self.wPar,image=i) for i in self.phPar]
#for i in range(3):self.bPar[i].grid(row=0,column=i)
def ev_Reset(self):
for i in range(len(self.sPar)):
print self.par.keys()[i]
self.par[self.par.keys()[i]]=float(self.sPar[i].get())
#no se porque
for i in range(len(self.sPar)):
print self.par.keys()[i]
self.par[self.par.keys()[i]]=float(self.sPar[i].get())
for i in range(len(self.sdPar)):
self.dpar[self.dpar.keys()[i]]=float(self.sdPar[i].get())
try:
x = [float(self.xzero[i].get()) for i in range(self.dof)]
except ValueError:
x = [complex(self.xzero[i].get()) for i in range(self.dof)]
self.imagine==1
#self.Traj(x,U=1,Plot=1)
#return
self.reset()
def current(self,ohwell):
tmp = [self.xzero[i].get() for i in range(self.dof)]
utmp = self.Mons(tmp)
for i in range(self.mon):
self.suzero[i] = str(utmp[i])[:6]
pass
def ev_traj(self):
try:
x = [float(self.xzero[i].get()) for i in range(self.dof)]
except ValueError:
x = [complex(self.xzero[i].get()) for i in range(self.dof)]
self.imagine==1
self.Traj(x,U=1,Plot=1)
return
def ev_ntraj(self,Plot=1):
self.nTraj(50,U=1)
pass
def ev_lang(self):
x = [float(self.xzero[i].get()) for i in range(self.dof)]
print 'langevining'
for i in range(len(self.sPar)):
self.par[self.par.keys()[i]]=float(self.sPar[i].get())
self.reset()
try:
#self.Lang(x,m=2,wt=float(self.sdPar[2].get()),Plot=1)
self.Lang(x,m=2,wt=self.par['Dxx'],Plot=1) #buggy fixed weight
except ValueError:
self.Lang(x,m=2,wt=100.,Plot=1)
pass
def ev_nlang(self,Plot=1):
for i in range(self.dof):
self.par[self.par.keys()[i]]=float(self.sPar[i].get())
self.reset()
self.nLang(50,Plot=1)
pass
def ev_hist(self):
c3 = TCanvas()
self.hist.Draw("colz")
zwom=input("sheat")
pass
def ev_mesh(self):
self.Mesh(self.output+".xml")
pass
def ev_meshtraj(self):
#for i in range(self.dof):
# print self.sPar[i].get()
# self.par[self.par.keys()[i]]=self.sPar[i].get()
#self.ev_Reset()
self.MeshTraj(int(self.tsteps),1,self.dt,sz=15,boundary=1)
pass
def ev_run(self):
#self.output=self.soutput.get()
#print self.output
self.Run(self.output)
def ev_fpt(self):
self.FirstPassage()
pass
|
nborggren/Aleph | AlephSB/Algebra.py | <gh_stars>0
"""The Algebra class serves as a backbone for basic operations,
addition and multiplication,
a torch to the abyss of differential equations ahead.
"""
__author__ = "<NAME> (<EMAIL>)"
__date__ = "2011-01-04"
__copyright__ = "Copyright (C) 2011 <NAME>"
__license__ = "GNU LGPL Version 2.1"
from ROOT import TF3,TH1F,TFile, gDirectory
from numpy import *
from array import array
from swiginac import *
from os import environ
from os import system as MySys
from time import time, strftime, gmtime
#from datetime import date
from Analysis import *
class Algebra:
""" The Algebra Class
"""
## Initialization
def Init(self,dof=3,mon=5,S=False,system='Lorenz',
f=["-sigma*x[0]+sigma*x[1]",
"rho*x[0]-x[1]-x[0]*x[2]",
"-beta*x[2]+x[0]*x[1]"],
u=["1","y/x","x/y","x*z/y","x*y/z"],
par={'sigma':10,'rho':28.,'beta':8./3.},
spar = [symbol("\\sigma"),symbol("\\rho"),symbol("\\beta")],
A=matrix([[-10.,10.,0,0,0],
[-1,0,28.,-1,0],
[-8./3.,0,0,0,1]]),
B=matrix([[0,0,0],
[-1,1,0],
[1,-1,0],
[1,-1,1],
[1,1,-1]]),
imagine=0,
MyLog=1):
self.dof = dof
self.mon = mon
self.S=S
self.f=f
self.u=u
self.par=par
self.spar=spar
self.MyLog=MyLog
if S==True:
self.U = matrix([[symbol("u_{"+str(i)+"}")] for i in range(mon)])
self.V = matrix([[symbol("v_{"+str(i)+"}")] for i in range(mon)])
self.x = matrix([[symbol("x_{"+str(i)+"}")] for i in range(dof)])
if system=='Lorenz':
self.A=matrix([[-spar[0],spar[0],0,0,0],
[-1,0,spar[1],-1,0],
[-spar[2],0,0,0,1]])
self.B=matrix([[0,0,0],
[-1,1,0],
[1,-1,0],
[1,-1,1],
[1,1,-1]])
self.M = self.GetM()
else:
self.A = A
self.B = B
self.M = self.GetM()
self.media = {'image':[],
'table':[],
'eqn':[],
'video':[],
'dot':[]}
self.logbook = {'notes':[],
'inits':[]}
self.root = []
self.dtime= str(time()).split('.')[0] #date.isoformat(date.today())
self.current,self.current2 = cname(0),cname(0)
self.name = strftime("%Y_%b_%d.%H_%M", gmtime())+"_"
self.output = self.name+"_"
self.scurrent,self.scurrent2 = self.current.next(), self.current2.next()
## Calulates matrix product \f$M=BA\f$
def GetM(self):
return self.B*self.A
## Calulates structure constants in \f$u_i*u_j=\frac{1}{2}(\delta_{ik}M_{kj}+\delta_{ij}M_{jk})\f$
def Struct(self,i,j,k):
return (delta(i,k)*self.M[k,j]+delta(i,j)*self.M[j,k])/2
## the product of the ith and jth basis vector in the algebra \f$u*v\f$
def mult(self,i,j):
return [self.Struct(k,i,j) for k in range(self.mon)]
## the product of vectors in the algebra \f$u*v\f$
def Mult(self,U,V):
try:
len(U)
except TypeError:
return [U*V[i] for i in range(self.mon)]
try:
tmp = [0 for i in range(self.mon)]
for i in range(self.mon):
for j in range(self.mon):
for k in range(self.mon):
tmp[k] = tmp[k]+U[i]*V[j]*self.Struct(k,i,j)
return tmp
except RuntimeError:
tmp = [0 for i in range(self.mon)]
for i in range(self.mon):
for j in range(self.mon):
for k in range(self.mon):
tmp[k] = tmp[k]+U[i][0]*V[j][0]*self.Struct(k,i,j)
return tmp
## the sum of vectors in the algebra \f$u+v\f$
def Sum(self,U,V):
try:
return [U[i]+V[i] for i in range(self.mon)]
except RuntimeError:
return [U[i][0]+V[i][0] for i in range(self.mon)]
## we can call it additon too. \f$u+v\f$
def Add(self,U,V):
try:
return [U[i]+V[i] for i in range(self.mon)]
except RuntimeError:
return [U[i][0]+V[i][0] for i in range(self.mon)]
## the associativity relations
def Assoc(self,U,V,W):
return [self.Mult(self.Mult(U,V),W)[i]-self.Mult(U,self.Mult(V,W))[i] for i in range(self.mon)]
def IsJordan(self,U,V):
return self.Mult(self.Mult(U,V),self.Mult(U,U))==self.Mult(U,self.Mult(V,self.Mult(U,U)))
## numeric or symbolic vector of point in monomial space
def Mons(self,x):
return [1,x[1]/x[0],x[0]/x[1],x[0]*x[2]/x[1],x[0]*x[1]/x[2]]
def tMons(self,x):
tmp = [1,x[1]/x[0],x[0]/x[1],x[0]*x[2]/x[1],x[0]*x[1]/x[2]]
return [atan(j) for j in tmp]
def PlotMons(self,U):
sProp = ''
for i in self.Sum(['['+str(i)+']*' for i in range(self.mon)],self.u):
sProp=sProp+i+"+"
return sProp[:-1]
## The time derivative of U comes from \f$\dot{U_i}=U_iM_{ij}U_j\f$,\f$\dot{x_i}= \sum_{j=1}^mA_{ij}\prod_{k=1}^nx_k^{B_{jk}}\f$
def udot(self,U,x=False):
if x==True:
U = self.Mons(U)
tmp = [sum([self.M[j,i]*U[i] for j in range(self.mon)]) for i in range(self.mon)]
return [self.Mult(U[i],tmp[i]) for i in range(self.mon)]
def Publish(self,name,title,path="./"):
f=open(environ["ALEPHPATH"]+'lib/template.tex')
g=open(name+'.tex','w')
img_beg = ["\\begin{figure}[h]",
"\\begin{center}"]
img_end = ["\\end{center}",
"\\end{figure}"]
tbl_beg = ["\\begin{table}[htp]",
"\\centering",
"\\begin{tabular}{"+"*{"+str(self.mon+1)+"}{|>{\centering}p{"+str(15./(self.mon+1))[:3]+"cm}}|} ",
"\\hline"]
tbl_end = ["\\end{tabular}",
"\\end{table}"]
eqn_beg = ["\\begin{equation}"]
eqn_end = ["\\end{equation}"," "]
vid_beg = ["\\begin{figure}[ht]",
"\includemovie[",
"poster,"]
vid_end = ["\end{figure}"]
IMAGES,TABLES,VIDEOS,EQNS,DOTS="","","","",""
for i in self.media['image']:
for j in img_beg:
IMAGES=IMAGES+j+" \n "
IMAGES = IMAGES+ "\epsfig{file="+i+",scale = 0.8} \n"
for j in img_end:
IMAGES=IMAGES+j+" \n "
for i in self.media['table']:
for j in tbl_beg:
TABLES=TABLES+j+" \n "
tmp = ["& $ U_"+str(q+1)+ " $ " for q in range(self.mon)]
TABLES = TABLES + " * "
for k in tmp:
TABLES = TABLES+k
TABLES = TABLES + " \n \\tabularnewline \n \\hline \n "
TABLES = TABLES + i +" \n "
for j in tbl_end:
TABLES=TABLES+j+" \n "
for i in self.media['dot']:
MySys("$DOTPATH./ladot "+i+".ladot")
DOTS=DOTS+"\\input{"+i+".tex} \n"
DOTS=DOTS+"\\includegraphics[width=6.5in,height=8in]{"+i+".ps} \n"
for i in self.media['eqn']:
for j in eqn_beg:
EQNS=EQNS+j+" \n "
EQNS = EQNS+i+" \n"
for j in eqn_end:
EQNS=EQNS+j+" \n "
for i in self.media['video']:
for j in vid_beg:
VIDEOS=VIDEOS+j+" \n "
VIDEOS = VIDEOS + "text={\\small("+i+")}]{10cm}{10cm}{"+i+"} \n "
for j in vid_end:
VIDEOS=VIDEOS+j+" \n "
for line in f:
line=line.replace('NOTES',title)
line=line.replace('IMAGES',IMAGES)
line=line.replace('TABLES',TABLES)
line=line.replace('EQNS',EQNS)
line=line.replace('DOTS',DOTS)
line=line.replace('VIDEOS',VIDEOS)
g.write(line)
g.close()
MySys("latex "+name+".tex")
MySys("dvipdf "+name+".dvi")
## returns TeX for the multiplication table
def mtable(self):
labels = ["$U_"+str(i+1)+"$" for i in range(self.mon)]
output = " "
for i in range(self.mon):
output = output+"$U_"+str(i+1)+"$ & $"
for j in range(self.mon):
tmp=self.mult(i,j)
#print tmp
for k in range(self.mon):
if tmp[k]!=0:
output = output + "("+str(tmp[k])+")U_"+str(k+1)+"+"
if output[-1]=="+":
output=output[:-1]+ "$ &$"
else:
output = output + " 0 $ &$"
output = output[:-2] + "\n \\tabularnewline \n \hline \n"
return output
##BasicGraph, default conneted graph of
def BoseHubbard(q,name='demo',k = 0, conds = [],outs = [],ps=1):
points = [i for i in range(q)]
f = open(name+'.dot','w')
f.write('digraph ' + name + '{ \n')
f.write(' edge [dir=none] \n')
for j in points:
if i<j: #+1 line for double counting
f.write(str(i)+'->'+str(j)+'; \n ')
f.write('}')
f.close()
MySys("dot -Tps "+name+".dot > "+name+".ps")
return
## makes a ladot file demonstrating the recursion relations
def Ladot(self,name,D=False):
f = open(name+".ladot",'w')
ltr = [chr(i) for i in xrange(ord('i'), ord('i')+self.dof)]
alp = [chr(i) for i in xrange(ord('a'), ord('a')+self.mon)]
f.write("graph "+name+" { \n \n node [shape=box,width=2.5, color = \"blue\"] \n")
sym = [symbol(i) for i in ltr]
a=matrix([[zz+1 for zz in sym]])
b=a*self.A
for ii in range(self.mon):
tmp = " "*4+alp[ii]+" [label = \"$p_{"
tmp2 = "["+str(b[0,ii])+"]$(6)\"]"
for jj in range(self.dof):
if self.B[ii,jj]==0:
tmp = tmp+ltr[jj]
elif self.B[ii,jj]>0:
tmp = tmp+ltr[jj]+str(-self.B[ii,jj])
else:
tmp = tmp+ltr[jj]+"+"+str(-self.B[ii,jj])
tmp = tmp+"}"
f.write(tmp+tmp2+" \n")
f.write(" edge [color = \"red\"] \n \n")
for ii in alp:
for jj in alp:
if ii<jj:
f.write(" "+ii+" -- "+jj+"; \n")
f.write("} \n")
self.dot.append(name)
##Return a root TF2 object for plotting vector u
def GetTF2(self,u):
if self.dof!=2:
return "sorry"
fnc = ""
for i in range(self.mon-1):
fnc=fnc+str(u[i])+"*"+self.U[i]+"+"
fnc=fnc+str(u[self.mon-1])+"*"+self.U[self.mon-1]
return TF2("sys",fnc,0,10,0,10)
##Return a root TF2 object plotting vector (over monomials) u
def GetTF3(self,u):
if self.dof!=3:
return "sorry"
fnc = ""
for i in range(self.mon-1):
fnc=fnc+str(u[i])+"*"+self.u[i]+"+"
fnc=fnc+str(u[self.mon-1])+"*"+self.u[self.mon-1]
return TF3("lorenz",fnc,-30,30,-30,30,0,50)
##Newton solver to find zeroes of F, (Null Space of M)
def GetZeroes(self):
return
def write(self,output):
zwom = TFile(output+".root","recreate")
for i in self.root:
try:
i.Write()
except AttributeError:
continue
#
#for i in self.hU:i.Write()
#for i in self.tU:i.Write()
#for i in self.hSect:i.Write()
## delta function returns true if both inputs are the same.
def delta(i,j):
return float(i==j)
def MyPow(Dyn,u,m):
if m == 1:
return u
else:
return Dyn.Mult(u,MyPow(Dyn,u,m-1))
##The Algebra system class.
#A quasimonomial dynamical system can be written for i from 1 to n (dof) as
#\f$\dot{x_i}=x_i\sum_{j=1}^mA_{ij}\prod_{k=1}^nx_k^{B_{jk}}\f$ \cite{Figuerdo}
|
nborggren/Aleph | AlephSB/Dynamics.py | <gh_stars>0
##@package Dynamics
#Classes for Dynamical Systems
##@author <NAME>
from dolfin import *
from ROOT import TH3F,TH2F,TH1F, TF1, TGraph, TGraph2D, gStyle, TPolyLine3D, TCanvas, TF3, TPolyLine, TPostScript
from array import array as MyArray
from time import time, strftime, gmtime
from Algebra import *
from random import random
from numpy.fft import *
from Analysis import *
from Calc import *
gStyle.SetPalette(1)
##The Dynamical system class
class Dynamics(Algebra,Calc):
def Init(self,S=False,Ti=[0.,15.,50000],nx=256,
mins = [-30,-30,0],
maxs = [30,30,50],
doms = [[0.2,1.8],[-10,4.5],[-2.5,6],[-50,150],[-3.5,15]],
ROOT = 1,dof=3,mon=5,system='Lorenz',
f=["-sigma*x[0]+sigma*x[1]",
"rho*x[0]-x[1]-x[0]*x[2]",
"-beta*x[2]+x[0]*x[1]"],
u=["1","y/x","x/y","x*z/y","x*y/z"],
par={'sigma':10,'rho':28.,'beta':8./3.},
spar = [symbol("\\sigma"),symbol("\\rho"),symbol("\\beta")],
A=matrix([[10.,10.,0,0,0],
[-1,0,28.,-1,0],
[-8./3.,0,0,0,1]]),
B=matrix([[0,0,0],
[-1,1,0],
[1,-1,0],
[1,-1,1],
[1,1,-1]]),
imagine=0,
hists=[],
MyLog=1):
self.u = u
self.nx=nx
self.mins=mins
self.maxs=maxs
self.doms=doms
self.ROOT=ROOT
self.imagine=imagine
Algebra.Init(self,S=S,dof=dof,mon=mon,system=system,f=f,u=u,par=par,spar=spar,A=A,B=B,imagine=imagine)
# if ROOT == 0:
# try:
# self.velocity = Expression((self.f[i] for i in range(self.dof)),defaults=self.par)
# except TypeError:
# self.GetVelocity()
self.npSect=0
self.pSect=[]
#self.T(Ti[0],Ti[1],Ti[2])
self.nLines = []
##Flow field calculated at x.
def Flow(self,x):
if self.ROOT == 0:
return [self.velocity(x)[i] for i in range(self.dof)]
elif self.S == False:
sigma=self.par['sigma']
rho=self.par['rho']
beta=self.par['beta']
return [-sigma*x[0]+sigma*x[1],rho*x[0]-x[1]-x[0]*x[2],-beta*x[2]+x[0]*x[1]]
else:
sigma=self.spar[0]
rho=self.spar[1]
beta=self.spar[2]
return [-sigma*x[0]+sigma*x[1],rho*x[0]-x[1]-x[0]*x[2],-beta*x[2]+x[0]*x[1]]
##Set time specifications for trajectories
def T(self,ti,tf,tsteps):
self.ti=ti
self.tf=tf
self.dt=(tf-ti)/float(tsteps)
self.tsteps=tsteps
self.mons(l=[ti,tf,tsteps])
self.space(l=[ti,tf,tsteps])
def mons(self,l=[0,15,50000]):
if self.imagine==1:
self.hUr = [TH1F("u_"+str(k)+"_r","u_"+str(k)+"_r",2500,self.doms[k][0],self.doms[k][1]) for k in range(self.mon)]
self.tUr = [TH1F("tu_"+str(k)+"_r","tu_"+str(k)+"_r",int(l[2]),l[0],l[1]) for k in range(self.mon)]
self.hUi = [TH1F("u_"+str(k)+"_i","u_"+str(k)+"_i",2500,self.doms[k][0],self.doms[k][1]) for k in range(self.mon)]
self.tUi = [TH1F("tu_"+str(k)+"_i","tu_"+str(k)+"_i",int(l[2]),l[0],l[1]) for k in range(self.mon)]
else:
self.hU = [TH1F("u_"+str(k),"u_{"+str(k)+"}",2500,self.doms[k][0],self.doms[k][1]) for k in range(self.mon)]
#print int(,l[0],l[1]
self.tU = [TH1F("tu_"+str(k),"tu_{"+str(k)+"}",int(l[2]),l[0],l[1]) for k in range(self.mon)]
if self.dof==3:
self.hist = TH3F("histr","histr",100,self.mins[0],self.maxs[0],100,self.mins[1],self.maxs[1],100,self.mins[2],self.maxs[2])
def space(self,l=[0,15,50000]):
if self.imagine==1:
self.hxr = [TH1F("x_"+str(k)+"_r","Re(x_{"+str(k)+"})",2500,self.mins[k],self.maxs[k]) for k in range(self.dof)]
self.txr = [TH1F("tx_"+str(k)+"_r","Re(tx_{"+str(k)+"})",int(l[2]),l[0],l[1]) for k in range(self.dof)]
self.hxi = [TH1F("x_"+str(k)+"_i","Im(x_{"+str(k)+"})",2500,self.mins[k],self.maxs[k]) for k in range(self.dof)]
self.txi = [TH1F("tx_"+str(k)+"_i","Im(tx_{"+str(k)+"})",int(l[2]),l[0],l[1]) for k in range(self.dof)]
if self.dof==3:
self.histr = TH3F("histr","histr",100,self.mins[0],self.maxs[0],100,self.mins[1],self.maxs[1],100,self.mins[2],self.maxs[2])
self.histi = TH3F("histi","histi",100,-self.maxs[0],self.maxs[0],100,-self.maxs[1],self.maxs[1],100,-self.maxs[2],self.maxs[2])
if self.dof==2:
self.histr = TH2F("histr","histr",100,self.mins[0],self.maxs[0],100,self.mins[1],self.maxs[1])
self.histi = TH2F("histi","histi",100,-self.maxs[0],self.maxs[0],100,-self.maxs[1],self.maxs[1])
else:
self.hx = [TH1F("x_"+str(k),"x_{"+str(k)+"}",100,self.mins[k],self.maxs[k]) for k in range(self.dof)]
self.tx = [TH1F("tx_"+str(k),"tx_{"+str(k)+"}",int(l[2]),l[0],l[1]) for k in range(self.dof)]
if self.dof==3:
self.hist = TH3F("hist","hist",2500,self.mins[0],self.maxs[0],2500,self.mins[1],self.maxs[1],2500,self.mins[2],self.maxs[2])
##Time iteration, defaults to runge kutte
def Tstep(self, x, dt, fast=0, q = 1, t = 0):
#print time stepping
if fast == 1:
a = self.Flow(x)
#print a, 'a'
return [x[i]+(q*t-t+dt)*a[i] for i in range(self.dof)]
elif q==0 and fast == 0:
x1 = x
a1 = self.Flow(x1)
x2 = [x[i] + 0.5*a1[i]*dt for i in range(self.dof)]
a2 = self.Flow(x2)
x3 = [x[i] + 0.5*a2[i]*dt for i in range(self.dof)]
a3 = self.Flow(x3)
x4 = [x[i] + 0.5*a3[i]*dt for i in range(self.dof)]
a4 = self.Flow(x4)
return [x[i] + (dt/6.0)*(a1[i]+2*a2[i]+2*a3[i]+a4[i]) for i in range(self.dof)]
## Returns arrays of trajectories and perhpas monomials for initial condition x
def Traj(self,x,Fill=None,U=1,Plot=0,fast=1,store=0,q=0,gen=False,noise=0):
print "Im trying 1"
if self.imagine==1:
trajr=[array('d') for i in range(self.dof)]
traji=[array('d') for i in range(self.dof)]
else:
traj=[array('d') for i in range(self.dof)]
xraj=array('d')
xraj.append(self.ti)
if self.imagine==0:
ost = ""
for j in range(self.dof):
ost = ost+"x["+str(j)+"],"
ost = ost[:-1]
else:
ostr, osti = "",""
for j in range(self.dof):
ostr,osti = ostr+"x["+str(j)+"].real,",osti+"x["+str(j)+"].imag,"
ostr, osti = ostr[:-1],osti[:-1]
print "Im trying 2"
if U==1:
tmp=self.Mons(x)
if self.imagine==1:
utrajr=[array('d') for i in range(self.mon)]
utraji=[array('d') for i in range(self.mon)]
for i in range(self.mon):
utrajr[i].append(tmp[i].real)
utraji[i].append(tmp[i].imag)
else:
utraj=[array('d') for i in range(self.mon)]
for i in range(self.mon):
utraj[i].append(tmp[i])
for i in range(self.dof):
if self.imagine==1:
trajr[i].append(x[i].real)
traji[i].append(x[i].imag)
else:
traj[i].append(x[i])
if gen==True:
z=qIntegrate(x,self.dt,self.velocity)
for j in range(self.tsteps):
if gen == False:
if noise==0:
x = self.Tstep(x,self.dt,fast,q=q)
else:
xtmp = self.Tstep(x,self.dt,fast,q=q)
x = self.NoiseStep(xtmp,self.dt)
else:
x = z.next()
xraj.append((j+1)*self.dt+self.ti)
for i in range(self.dof):
if self.imagine==1:
trajr[i].append(x[i].real)
traji[i].append(x[i].imag)
else:
traj[i].append(x[i])
if U==1:
tmp=self.Mons(x)
for i in range(self.mon):
if self.imagine==1:
utrajr[i].append(tmp[i].real)
utraji[i].append(tmp[i].imag)
self.hUr[i].Fill(tmp[i].real)
self.hUi[i].Fill(tmp[i].imag)
#self.tUr[i].Fill((j+1)*self.dt+self.ti,tmp[i].real)
self.tUr[i].AddBinContent(j+1,tmp[i].real)
#self.tUi[i].Fill((j+1)*self.dt+self.ti,tmp[i].imag)
self.tUi[i].AddBinContent(j+1,tmp[i].imag)
else:
utraj[i].append(tmp[i])
self.hU[i].Fill(tmp[i])
self.tU[i].Fill((j+1)*self.dt+self.ti,tmp[i])
for i in range(self.dof):
if self.imagine==0:
self.hx[i].Fill(x[i])
#self.tx[i].Fill((j+1)*self.dt+self.ti,x[i])
self.tx[i].AddBinContent(j+1,x[i])
exec("self.hist.Fill("+ost+")")
else:
self.hxr[i].Fill(x[i].real)
#self.txr[i].Fill((j+1)*self.dt+self.ti,x[i].real)
self.txr[i].AddBinContent(j+1,x[i].real)
#self.txr[i].Fill(x[i].real)
self.hxi[i].Fill(x[i].imag)
#self.txi[i].Fill((j+1)*self.dt+self.ti,x[i].imag)
self.txi[i].AddBinContent(j+1,x[i].imag)
#self.txi[i].Fill(x[i].imag)
exec("self.histr.Fill("+ostr+")")
exec("self.histi.Fill("+osti+")")
# for i in range(self.npSect):
# print self.npSect, 'npSect'
# distance = self.dist(self.surf[i],x)
# if distance < .1:
# fill = self.fill(i,x)
# exec("self.pSect[i].Fill("+str(fill[0])+','+str(fill[1])+")")
self.last=x
if Plot==1:
if U==1:
if self.imagine==1:
c1,c2,c4,c5=self.Prettify(xraj,[trajr,traji],uu=[utrajr,utraji])
c1.Draw()
c2.Draw()
c5.Draw()
c4.Draw()
else:
c1,c2,c4=self.Prettify(xraj,traj,uu=utraj)
else:
c1,c2,c4=self.Prettify(xraj,traj)
c1.Draw()
#c2.Draw()
#c5.Draw()
c4.Draw()
zwom=input("sheat")
if U==1:
try:
return xraj, traj, utraj
except UnboundLocalError:
return xraj,[trajr,traji],[utrajr,utraji]
else:
return xraj, traj
## run multiple trajectories
def nTraj(self,n,fast=1,U=0,gen=False,imagine=0,Plot=1,RunList=[],noise=0):
try:
self.hist
except AttributeError:
self.hReset()
t=0
before=time()
if self.MyLog==1:
MySys("echo "+strftime("%H_%M_%S")+" :Begin Trajectory >> "+self.output+".log")
#x=self.rzero()
#xx,yy,uu = self.Traj(x,Fill=1,U=1,fast=fast)
#self.hU = [TH1F("U_"+str(i)) for i in range(self.mon)]
for i in range(n):
if i%10==0:
t+=(time()-before)/60.
before = time()
print t, " minutes for ", i, "trajectories"
if imagine==0:
x=self.rzero()
self.Traj(x,Fill=1,fast=fast,U=U,gen=gen,noise=noise)
if imagine==1:
x=self.rzero()
tt,xx,uu = self.Traj(x,Fill=1,fast=fast,U=U,noise=noise)
#if imagine==2:
# x=self.izero(1./2048.)
# tt,xx,uu = self.Traj(x,Fill=1,fast=fast,U=U)
# if i%10==0:
# self.nLines.append(TPolyLine3D(len(tt),xx[0][0],xx[0][1],xx[0][2]))
# self.nLines[-1].SetLineColor(4)
# self.nLines.append(TPolyLine3D(len(tt),xx[1][0],xx[1][1],xx[1][2]))
# self.nLines[-1].SetLineColor(2)
for x in RunList:
print x
tt,xx,uu = self.Traj(x,Fill=1,fast=fast,U=U)
if self.MyLog==1:
MySys("echo "+strftime("%H_%M_%S")+" :End Trajectory >> "+self.output+".log")
if U==1:
if imagine==1:
c1 = TCanvas()
c1.Divide(3,2)
c3 = TCanvas()
c3.Divide(3,2)
c2 = TCanvas()
c2.Divide(2,2)
c4 = TCanvas()
c4.Divide(2,2)
for i in range(self.mon):
c1.cd(i+1)
self.hUr[i].SetLineColor(4)
self.hUr[i].Draw()
self.hUi[i].SetLineColor(2)
self.hUi[i].Draw("same")
c3.cd(i+1)
self.tUr[i].SetLineColor(4)
self.tUr[i].Draw()
self.tUi[i].SetLineColor(2)
self.tUi[i].Draw("same")
for i in range(self.dof):
c2.cd(i+1)
self.hxr[i].SetLineColor(4)
self.hxr[i].Draw()
self.hxi[i].SetLineColor(2)
self.hxi[i].Draw("same")
c4.cd(i+1)
self.txr[i].SetLineColor(4)
self.txr[i].Draw()
self.txi[i].SetLineColor(2)
self.txi[i].Draw("same")
for a,b in enumerate(self.nLines):
if a%2==0:
b.SetLineColor(4)
else:
b.SetLineColor(2)
# if a==0:
# b.Draw("APES")
# else:
# b.Draw("PS")
if self.imagine==1:
c5 = TCanvas()
c5.Divide(2)
c5.cd(1)
self.histr.Draw()
c5.cd(2)
self.histi.Draw()
#c5.Print(self.name+"zoom_"+self.current.next()+".ps")
if Plot==1:
zwom=input("sheat")
return
def FM_SYNTH(self,graph=None):
pass
def MeshTraj(self,tsteps,q,h,t=1,boundary=1,sz=12,smooth=0,inspect=1,shift=0):
for j in range(tsteps):
#print j,t
mesh = self.mesh
for x in mesh.coordinates():
#print x
xc = self.Tstep(x,h,fast=1)
#print xc
for k in range(self.dof):
x[k] = xc[k]
u = self.Mons(x)
for i in range(self.mon):
if i<self.dof:
self.hx[i].Fill(x[i])
self.hU[i].Fill(u[i])
t=q*t+h
if smooth>0:
self.mesh.smooth(smooth)
if inspect==1 and j%200==0:
pass
if j%1000==1001:
p=plot(mesh,interactive=True)
else:
p=plot(mesh,axes=True)
## Poincare sections to inspect
def pSect(self,hSect,cSect,fSect):
self.pSect=len(hSect)
self.cSect=cSect ## conditions to be satisfied for inclusion in histogram
self.hSect=[TH2F("h"+str(i),"h"+str(i),self.nx,hSect[i][0],hSect[i][1]) for i in range(pSect)]
self.fSect = fSect
## Reset histogram
def hReset(self):
if self.dof==2:
self.hist = TH2F(self.name,self.name,self.nx,self.mins[0],self.maxs[0],self.nx,self.mins[1],self.maxs[1])
if self.dof==3:
self.histr = TH3F(self.name+'r',self.name+'r',self.nx,self.mins[0],self.maxs[0],self.nx,self.mins[1],self.maxs[1],self.nx,self.mins[2],self.maxs[2])
self.histi = TH3F(self.name+'i',self.name+'i',self.nx,-self.maxs[0],self.maxs[0],self.nx,-self.maxs[1],self.maxs[1],self.nx,-self.maxs[2],self.maxs[2])
## reset flow field if parameters changed
def SetVelocity(self):
if self.dof==2:
self.velocity = Expression((self.f[0],self.f[1]),defaults=self.par)
if self.dof==3:
self.velocity = Expression((self.f[0],self.f[1],self.f[2]),defaults=self.par)
##how about a report
def reset(self):
self.SetDiffusion()
#self.SetDimerization()
#self.SetPartition()
self.SetVelocity()
self.u0 = Function(self.Q)
#print self.su0.get()
self.u0 = project(Expression(self.su0.get()),self.Q)
def rzero(self,force=''):
if force=='real':
tmp = [(self.maxs[i]-self.mins[i])*random()+self.mins[i] for i in range(self.dof)]
return tmp
if self.imagine==0:
tmp = [(self.maxs[i]-self.mins[i])*random()+self.mins[i] for i in range(self.dof)]
elif self.imagine==1:
tmp = [(1+random()*1j)*(self.maxs[i]-self.mins[i])*random()+self.mins[i] for i in range(self.dof)]
#tmp = [(1+random()*1j)*(self.maxs[i]-self.mins[i])*random()+self.mins[i] for i in range(self.dof)] #override
self.logbook['inits'].append(tmp)
return tmp
def izero(self,width):
tmp = [(self.maxs[i]-self.mins[i])*random()+self.mins[i] for i in range(self.dof)]
tmpi = [(random()-1/2.)*width*1j +tmp[i] for i in range(3)]
return tmpi
def SetSpaces(self):
self.mesh = Rectangle(self.mins[0],self.mins[1],self.maxs[0],self.maxs[1],self.nx,self.nx)
self.Q = FunctionSpace(self.mesh, "Lagrange", 2)
self.V = VectorFunctionSpace(self.mesh, "Lagrange", 2)
self.space()
def dzero(self):
tmp_d0 = self.rzero()
dtmp = -1
#self.logbook.append(str(tmp))
while self.refresh<1:
dtmp = dtmp+1
tmp_d0 = [tmp_d0[0],tmp_d0[1]*sin(2*pi*dtmp/8.),tmp_d0[2]*cos(2*pi*dtmp/8.)]
yield tmp_d0
def myFFT(self,x):
tt,xx,uu = self.Traj(x,U=1,fast=0)
n,gn=len(tt),len(uu)
grsU = [TGraph(n,tt,uu[i]) for i in range(5)]
ffts = [fft(uu[i]).real for i in range(5)]
ffts2= [fft(uu[i]).imag for i in range(5)]
fftUt = [array('d') for i in range(5)]
fftUt2 = [array('d') for i in range(5)]
for i in range(5):
for j in range(n):
fftUt[i].append(ffts[i][j]**2+ffts2[i][j]**2)
fftUt2[i].append(ffts[i][j])
fftUr = [TGraph(n,tt,fftUt[i]) for i in range(5)]
fftUi = [TGraph(n,tt,fftUt2[i]) for i in range(5)]
c1 = TCanvas()
c1.Divide(3,2)
for i in range(5):
c1.cd(i+1)
fftUi[i].Draw("APE")
fftsx1 = [fft(xx[i]).real for i in range(3)]
fftsx2= [fft(xx[i]).imag for i in range(3)]
fftsxA = [array('d') for i in range(3)]
for i in range(3):
for j in range(n):
fftsxA[i].append(fftsx1[i][j])
fftxr = [TGraph(n,tt,fftsxA[i]) for i in range(3)]
c2 = TCanvas()
c2.Divide(3)
for i in range(3):
c2.cd(i+1)
fftxr[i].Draw("APE")
zwom = input("numbers continue")
## Make plots
def Prettify(self,tt,xx,uu=None,store=0,more=0,hold=1,verbose=0):
n=len(tt)
if self.imagine == 0:
grsX = [TGraph(n,tt,xx[i]) for i in range(self.dof)]
else:
grsX = [TGraph(n,tt,xx[0][i]) for i in range(self.dof)]
grsXi = [TGraph(n,tt,xx[1][i]) for i in range(self.dof)]
self.current.next()
c2 = TCanvas()
if self.dof==2:
c2.Divide(2)
else:
c2.Divide(2,2)
gColors=[2,4,6,7,8,9,11,12,13,14,15,16,17,18]
gShapes=[27,26,23,22,28,15,11]
for i in range(self.dof):
c2.cd(i+1)
grsX[i].SetLineColor(4)
grsX[i].SetLineWidth(1)
grsX[i].SetMarkerColor(4)
grsX[i].SetMarkerStyle(7)
grsX[i].SetTitle("x_{"+str(i)+"}")
grsX[i].GetYaxis().SetTitle("x_{"+str(i)+"}")
grsX[i].GetXaxis().SetTitle("time")
grsX[i].Draw("APES")
grsX[i].SetName("gX"+str(i)+self.current.next())
self.root.append(grsX[i])
if self.imagine==1:
grsXi[i].SetLineColor(2)
grsXi[i].SetLineWidth(1)
grsXi[i].SetMarkerColor(2)
grsXi[i].SetMarkerStyle(7)
#grsXi[i].SetTitle("x_{"+str(i)+"}")
#grsXi[i].GetYaxis().SetTitle("x_{"+str(i)+"}")
#grsXi[i].GetXaxis().SetTitle("time")
grsXi[i].Draw("PS")
grsX[i].SetName("gX"+str(i)+self.current.next())
self.root.append(grsXi[i])
if verbose==1:c2.Print(self.dtime+self.name+"x_"+self.current.next()+".ps")
if self.dof==3:
if self.imagine==0:
yy = TPolyLine3D(n,xx[0],xx[1],xx[2])
else:
yy = TPolyLine3D(n,xx[0][0],xx[0][1],xx[0][2])
yyi = TPolyLine3D(n,xx[1][0],xx[1][1],xx[1][2])
elif self.dof==2:
yy = TGraph(n,xx[0],xx[1])
print 'yy is ', yy,len(xx[0]),n
elif self.dof==1:
yy = TGraph(n,tt,xx[0])
c4 = TCanvas()
yy.SetLineColor(4)
yy.Draw("APES")
#yy.SetName("t"+self.current.next())
if self.imagine==1:
yyi.SetLineColor(2)
yyi.Draw("PS")
self.root.append(yyi)
#yy.SetName("ti"+self.current.next())
#zwom = input("sheat")
if verbose==1:c4.Print(self.dtime+self.name+"trace_"+self.current.next()+".ps")
self.media['image'].append(self.dtime+self.name+"trace_"+self.current.next()+".ps")
self.root.append(yy)
if uu!=None:
if self.imagine==0:
gn = len(uu)
grsU = [TGraph(n,tt,uu[i]) for i in range(gn)]
else:
gn = len(uu[0])
grsU = [TGraph(n,tt,uu[0][i]) for i in range(gn)]
grsUi = [TGraph(n,tt,uu[1][i]) for i in range(gn)]
c1 = TCanvas()
c1.Divide(self.mon/2+1,2)
for j,i in enumerate(grsU):
i.SetLineColor(4)
i.SetLineWidth(2)
i.SetMarkerColor(4)
i.SetName("gU"+str(j)+self.current.next())
if self.imagine==1:
for i in grsUi:
i.SetLineColor(2)
i.SetLineWidth(2)
i.SetMarkerColor(2)
i.SetName("gUi"+str(j)+self.current.next())
for i in range(gn):
c1.cd(i+1)
grsU[i].SetTitle(self.u[i])
grsU[i].GetYaxis().SetTitle(self.u[i])
grsU[i].GetXaxis().SetTitle("time")
self.root.append(grsU[i])
grsU[i].Draw("APES")
if self.imagine==1:
grsUi[i].Draw("PS")
self.root.append(grsUi[i])
if verbose==1:c1.Print(self.name+"u_"+self.current.next()+".ps")
if self.imagine==1:
c5 = TCanvas()
c5.Divide(2)
c5.cd(1)
self.histr.Draw()
c5.cd(2)
self.histi.Draw()
if verbose==1:c5.Print(self.name+"zoom_"+self.current.next()+".ps")
return c1, c2, c4, c5
else:
return c1,c2,c4
return c2,c4
if verbose==1:
MySys("echo "+strftime("%H_%M_%S")+" :End Trajectory >> "+self.output+".log")
def sZ(self):
Z=""
if len(self.u)<=len(self.par):
for i,j in enumerate(self.u):
Z=Z+"g"+str(i)+"*"+j+"+"
Z=Z[:-1]
print Z
self.partition=Z
return Expression(Z,defaults=self.par)
# def sZx(self):
#b=Dynamics()
#b.Init(imagine=1)
#xi=b.rzero(force='real')
#print xi
#t,x,u=b.Traj(xi,U=1,Plot=1)
#b.nTraj(0,hold=1,U=1,imagine=1,RunList=[[xi[i]+1j*1./4096.*(random()-1/2) for i #in range(3)] for k in range(1)])
|
nborggren/Aleph | AlephSB/Diffusion.py | <filename>AlephSB/Diffusion.py
## @package Diffusion
# Class for solution of diffusion equations over Dynamical Systems in Dolfin.
#????????????????????????????????????????????????????????????????????????????
#
#
# We seek to solve
# \f$\partial_t u = \partial_i (D_{ij}\partial_j u - F_i u)\f$.
# The functional form is found by multiplying a test function v and integrating over the mesh.
# \f$\int_\Omega v \partial_t u dx =\int_\Omega v \partial_i (D_{ij}\partial_j u - F_i u) dx \f$
# Continuing with integration by parts yields \f$ \int_{\partial\Omega} v (D_{ij}\partial_j u - F_i u) n_i ds - \int_\Omega \partial_i v (D_{ij}\partial_j u - F_i u) dx\f$.
# Here \f$D_{ij}\f$ denotes the spatial dependance of the Diffusion, \f$F_{i}\f$, is the drift force, \f$n_i\f$, a normal vector to the mesh. The mesh is denoted \f$\Omega\f$ with boundary \f$\partial\Omega \f$.
#
# We approximate to find an expression for \f$u_{t+\delta t} \f$ given the solution at \f$u_t\f$.
#
# \f$\int_\Omega v (u_{t+\delta t} - u_t) dx \approx \delta t [\int_{\partial\Omega} v (D_{ij}\partial_j u - F_i u) n_i ds - \int_\Omega \partial_i v (D_{ij}\partial_j u - F_i u) dx ]\f$
# We take the average of the expression [...] over time \f$\delta t\f$ by asserting \f$u \approx \frac{u_{t+\delta t} + u_t}{2}\f$. Continuing gives
#
# \f$\frac{\delta t}{2} [\int_{\partial\Omega} v (D_{ij}\partial_j u_{t+\delta t} - F_i u_{t+\delta t}) n_i ds - \int_\Omega \partial_i v (D_{ij}\partial_j u_{t+\delta t} - F_i u_{t+\delta t}) dx + \f$ \f$ \int_{\partial\Omega} v (D_{ij}\partial_j u_t - F_i u_t) n_i ds - \int_\Omega \partial_i v (D_{ij}\partial_j u_t - F_i u_t) dx ]\f$
#
# Collecting terms with \f$u_{t+\delta t} \f$ on one side of equality and \f$u_t\f$ on the other gives the variational forms a and L that we will use.
#
# \f$ a = \int_\Omega (v u_{t+\delta t}+\frac{\delta t}{2}\partial_i v [D_{ij}\partial_j u_{t+\delta t} - F_i u_{t+\delta t}]) dx \f$ \f$- \frac{\delta t}{2} \int_{\partial\Omega} v (D_{ij}\partial_j u_{t+\delta t} - F_i u_{t+\delta t}) n_i ds \f$
#
# \f$ L = \int_\Omega (v u_{t}-\frac{\delta t}{2}\partial_i v [D_{ij}\partial_j u_{t} - F_i u_{t}]) dx + \frac{\delta t}{2} \int_{\partial\Omega} v (D_{ij}\partial_j u_{t} - F_i u_{t}) n_i ds \f$.
#
# Compare with the lines of python code:
#
# \f$a = v*u*dx + 0.5*k*(inner(grad(v),velocity*u)*dx + inner(grad(v), D*grad(u))*dx -v*inner(D*grad(u)-velocity*u,n)*ds)\f$
#
# \f$L = v*u0*dx - 0.5*k*(inner(grad(v),velocity*u0)*dx + inner(grad(v), D*grad(u0))*dx-v*inner(D*grad(u0)-velocity*u0,n)*ds)\f$
#
#@author <NAME>
#
#????????????????????????????????????????????????????????????????????????????
from Dynamics import *
from Noise import *
from numpy import abs as ABS
from numpy import log as LN
from random import choice
from Dissonance import *
from Analysis import cname
## The diffusion class for study of \f$\partial_t u = \partial_i (D_{ij}\partial_j u - F_i u)\f$.
class DirichletBoundary(SubDomain):
def inside(self, x, on_boundary):
return on_boundary
def boundary(x):
return x[0] < DOLFIN_EPS-1 or x[1] < DOLFIN_EPS-1
def bc_0(x):
return pow(pow(x[0]-0.14175347706,2)+pow(x[1]-462.572738749,2),0.5) < 10
def bc_1(x):
return pow(pow(x[0]-644.051823392,2)+pow(x[1]-0.00611657521734,2),0.5) < 10
def bc_2(x):
return pow(pow(x[0]-29.9798485002,2)+pow(x[1]-178.229919659,2),0.5) < 10
#class DirichletBoundary(SubDomain):
# def inside(self, x, on_boundary):
# return on_boundary
class Diffusion(Dynamics, Noise, Dissonance):
## Initialize Defaults
def Init(self):
Dynamics.Init(self)
Noise.Init(self)
self.normT = array('d')
self.norm = array('d')
self.sMesh=[]
##sets the necessary variables self.mesh and self.Q from a file input or generates two d from set ranges.
def Mesh(self,mesh_file,rewrite=False,dim = 1,vdim=2):
if rewrite==True:
MySys("rm "+mesh_file)
if self.Log==1:
MySys("echo mesh used is named: "+mesh_file+" >> "+self.output+".log")
MySys("echo mesh started at: " +strftime('%y.%m.%d -- %H.%M.%S ')+" >> "+self.output+".log")
try:
mesh = Mesh(mesh_file)
except RuntimeError:
mesh = UnitSquare(self.nx,self.nx)
for k in mesh.coordinates():
k[0] = (self.maxs[0]-self.mins[0])*k[0]+self.mins[0]
k[1] = (self.maxs[1]-self.mins[1])*k[1]+self.mins[1]
mesh_out = File(mesh_file)
mesh_out << mesh
print "generating the mesh then"
if self.Log==1:
MySys("echo mesh loaded at: " +strftime('%y.%m.%d -- %H.%M.%S ')+" >> "+self.output+".log")
#qRectangle(100,100,.97,.97,mins=[0,0],maxs=[1000,1000])
#self.mesh = mesh
#self.Q = FunctionSpace(mesh, "Lagrange", 2)
#self.V = VectorFunctionSpace(mesh, "Lagrange", 2)
return
##solves the steady state equation for given parameters and rights to output and screen. (no it does not)
def Steady(self,output,t0='hist'):
if self.Log==1:
MySys("echo Simulation begun at: " +strftime('%y.%m.%d -- %H.%M.%S ')+" >> "+self.output+".log")
mesh = self.mesh
velocity = self.velocity
D,velocity = self.dpar['D'], self.velocity
g = Constant(0.0)
bc = DirichletBC(self.Q, g, boundary)
# Create FunctionSpaces
Q = FunctionSpace(mesh, "CG", 1)
V = VectorFunctionSpace(mesh, "CG", 2)
n = FacetNormal(mesh)
# Initialise source function and previous solution function
u0 = self.GetInit(Q,t0)
u0 = self.Normalize(u0)
du,v,u1,u,zero = TrialFunction(Q), TestFunction(Q),Function(Q),Function(Q),Function(Q)
lam1,lam2 = Constant(5000000.),Constant(50000000.0)
nn = len(u1.vector()[:])
for i in range(nn):
u1.vector()[i]=u0.vector()[i]
D = 0.005
L = u*v*dx+inner(grad(v), D*grad(u)-velocity*u)*dx-v*inner(D*grad(u)-velocity*u,n)*ds- v*u1*dx
a = derivative(L, u, du)
A = assemble(a)
b = assemble(L)
bc.apply(A,b)
solve(A, u.vector(), b)
#uu = self.Zoom(u,[0,3],[1,5.2])
# a = u*v*dx+inner(grad(v), D*grad(u)-velocity*u)*dx-v*inner(D*grad(u)-velocity*u,n)*ds
#L = lam1*u*inner(-velocity,n)*ds+lam2*inner(D*grad(u)-velocity*u,n)*ds
#L = lam1*u*dx
#L=u*u*dx
#L=lam2*inner(D*grad(v)-velocity*v,n)*ds+lam1*v*dx
# L= v*u0*dx
# A = assemble(a)
# b = assemble(L)
# solve(A, u1.vector(), b)
#pp = plot(uu)
p=plot(u)
interactive()
p.write_png(output+"_Steady.png")
out_file = File(output+"s.pvd")
out_file << u1
self.SaveFunction(u1,output+"_steady")
return
##runs a trajectory from a given initial probability distribution. Defaults to initial condition from histogram object, e.g. filled with langevin trajectories.
def Run(self,output,t0='expr',dim=1,vdim=2,viddy=True,ROOT=False):
print self.nx
self.mesh = Rectangle(self.mins[0],self.mins[1],self.maxs[0],self.maxs[1],self.nx,self.nx) #why?
meshtmp = Rectangle(self.mins[0],self.mins[1],self.maxs[0],self.maxs[1],int(self.nx/3),int(self.nx/3)) #why?
mesh=self.mesh
ggg = File(output+"_mesh.xml")
ggg<<mesh
if self.MyLog==1:
print self.output
print strftime('%y.%m.%d -- %H.%M.%S ')
MySys("echo Simulation begun at: " +strftime('%y.%m.%d -- %H.%M.%S ')+" >> "+self.output+".log")
MySys("echo diffusion parameters, t0,tf,dt are "+str(self.dpar)+" >> "+self.output+".log")
MySys("echo parameters are "+str(self.par)+" >> "+self.output+".log")
MySys("echo mesh used is "+output+"_mesh.xml >> "+self.output+".log")
velocity = self.velocity
D = self.D
t,T,k = self.dpar['t'], self.dpar['T'],self.dpar['k']
print self.dpar
# Create FunctionSpaces
Q = self.Q
V = self.V
# Set up boundary condition
g = Constant(0.00001)
bc = DirichletBC(Q, g, boundary)
#bc = DirichletBC(Q, g, DirichletBoundary.inside)
# Initialise source function and previous solution function
#print Q,self.nu0
u0=self.u0
#u0 = self.GetInit(Q,t0)
#plot(u0,interactive=True)
#u0 = self.Normalize(u0)
u,v,u1,u2 = TrialFunction(Q), TestFunction(Q), Function(Q), Function(Q)
# a = v*u*dx + 0.5*k*(inner(grad(v),-velocity*u)*dx + inner(grad(v), D*grad(u))*dx-v*inner(D*grad(u)-velocity*u,n)*ds)
# L = v*u0*dx - 0.5*k*(inner(grad(v),-velocity*u0)*dx + inner(grad(v), D*grad(u0))*dx-v*inner(D*grad(u0)-velocity*u0,n)*ds)
a = v*u*dx + 0.5*k*(inner(grad(v),-velocity*u)*dx + inner(grad(v), D*grad(u))*dx)
L = v*u0*dx - 0.5*k*(inner(grad(v),-velocity*u0)*dx + inner(grad(v), D*grad(u0))*dx)
A = assemble(a)
j,jj=0,0
counter=cname(1)
#cN = TCanvas()
#edensity=[]
# Time-stepping
while t < T:
b = assemble(L)
bc.apply(A,b)
solve(A, u1.vector(), b)
#u1.vector()[:] /=
print 'norm', assemble(u1*dx, mesh=self.mesh)
#X = Function(Q,"x[0]") #for i in range(2)]
#ops = [Expression("x[0]"),Expression("x[1]")]
#for i,j in zip(fops,ops):
# i.interpolate(j)
#observables = [dot(velocity,-D*grad(u1)+velocity*u1)/dot(-D*grad(u1)+velocity*u1,-D*grad(u1)+velocity*u1)*dx,X*dx,fops[1]*u1*dx]
#print '<F.J>', assemble(dot(velocity,-D*grad(u1)+velocity*u1)*dot(-D*grad(u1)+velocity*u1,-D*grad(u1)+velocity*u1)*dx, mesh=self.mesh)
#print assemble(o,mesh=self.mesh),"next"
#u1=self.Normalize(u1,t=t)
# Copy solution from previous interval
u0.assign(u1)
if viddy == True:
if ROOT==False:
p=plot(u1)
if jj%self.png==0:
j+=1
#use cname, fix this crap
tmp = (6-len(str(j)))*"0"+str(j)
else:
pass
#edensity.append(energy_density(mesh,u0))
#tmpE=energy_density(meshtmp,u0)
#tmpE.Draw("surf2")
#u2=self.LR(u1,u2)
if self.smesh==1:
uu.interpolate(u1)
r=plot(uu)
r.write_png(self.output+"_Z.png")
#p=plot(u1)
#q=plot(u2)
#p.write_png(output+"_"+tmp+"_P.png")
#q.write_png(output+"_"+tmp+"_S.png")
if jj%self.xml==0:
tmp=counter.next()
#self.SaveFunction(u1,output+"_"+tmp)
ufile = File("./"+output+"_"+tmp+".xml")
ufile << u1
t += k
jj += 1
#tmpE=Func2Graph(meshtmp,u0)
#tmpE.Draw("surf2")
zwom = input("sheat")
#nn = len(self.normT)
#self.renorm = TGraph(nn,self.normT,self.norm)
if self.MyLog==1:
MySys("echo everything short of "+output+"_"+counter.next()+".xml written >> "+self.output+".log")
MySys("tar -cvf "+output+".tar "+output+".log "+output+"_*")
MySys("rm "+output+"_*.gz "+output+"_*.png ")
MySys("echo Simulation finished at: " +strftime('%y.%m.%d -- %H.%M.%S ')+" >> "+self.output+".log")
def FirstPassage(self,bc="bc_0"):
#mesh=self.mesh
mesh = Rectangle(self.mins[0],self.mins[1],self.maxs[0],self.maxs[1],500,500)
def bnd_123_0(x):
#return pow(pow(x[0]-0.14175347706,2)+pow(x[1]-462.572738749,2),0.5) < 10
return pow(pow(x[0]-0.14175347706,2)+pow(x[1]-362.572738749,2),0.5) < 10
#meshtmp = Rectangle(self.mins[0],self.mins[1],self.maxs[0],self.maxs[1],50,50) #why?
u0 = Constant(0.0)
velocity = self.velocity
D = self.D
#Q=self.Q
Q=FunctionSpace(mesh,"Lagrange",2)
# Define variational problem
v = TestFunction(Q)
u = TrialFunction(Q)
a = v*inner(velocity,grad(u))*dx-inner(grad(v), D*grad(u))*dx
L = -v*dx
bc_tmp = DirichletBC(Q, u0, bnd_123_0)
problem = VariationalProblem(a, L, bc_tmp)
u = problem.solve()
#plot(u,interactive=True)
#tmpFPT=Func2Graph(mesh,u)
#tmpFPT.Draw("surf2")
for i in range(100):
print u(10*i,.001)
if self.MyLog==1:
print self.output
print strftime('%y.%m.%d -- %H.%M.%S ')
MySys("echo Simulation begun at: " +strftime('%y.%m.%d -- %H.%M.%S ')+" >> "+self.output+".log")
MySys("echo fpt from x to: "+str(X)+" "+str(Y)+" >> "+self.output+".log")
MySys("echo parameters are "+str(self.par)+" >> "+self.output+".log")
MySys("echo mesh used is "+self.output+"_mesh.xml >> "+self.output+".log")
##zooms in on a particular cranny or nook and appends submesh to sMesh.
def Zoom(self,xrng,yrng,nx=250):
# Structure sub domain
smesh = UnitSquare(nx,nx)
dx = xrng[1]-xrng[0]
dy = yrng[1]-yrng[0]
for i in smesh.coordinates():
i[0]=i[0]*dx+xrng[0]
i[1]=i[1]*dy+yrng[0]
self.sMesh.append(smesh)
return
def SaveFunction(self,anyF,name):
zz=File(name+".xml")
zz<<anyF.vector()[:]
MySys("gzip "+name+".xml")
def Normalize(self,u,t=-1):
norm = assemble(u*dx, mesh=self.mesh)
u.vector()[:] /= norm
if t>0:
self.normT.append(t)
self.norm.append(norm)
return u
def GetInit(self,Q,smooth = 5, expr = "exp(-pow(x[0]-200.5,2)-pow(x[1]-200.5,2))", func = None,n=5):
print "getting init"
u0 = Function(Q)
u1 = Function(Q)
if self.t0 == 'hist':
u0 = self.hist2u0(u0)
if self.t0 == 'traj':
u0 = self.traj2u0(n,u0,u1)
if self.t0 == 'func':
try:
z = Function(Q,self.nu0)
u0 = self.func2u0(z,u0)
except TypeError:
self.u0 = self.nu0
return self.u0
if self.t0 == 'rand':
u0,u1 = Function(Q),Function(Q)
u0 = self.rand2u0(u0,u1,n=self.rand)
if self.t0 == 'expr':
utmp = Expression(expr)
u0 = Function(Q)
u0.interpolate(utmp)
return u0
def func2u0(self,MyFunc,uu):
uu.interpolate(MyFunc)
return uu
def expr2u0(self,Q,expr="exp(-pow(x[0]-2.5,2)-pow(x[1]-2.5,2))"):
self.u0 = project(Expression(expr),Q)
def hist2u0(self,u0):
nn = len(self.mesh.coordinates()[:])
for i in range(nn):
tmp=self.mesh.coordinates()[i]
mybin = self.hist.FindBin(tmp[0],tmp[1])
val = self.hist.GetBinContent(mybin)
u0.vector()[i]=val
return u0
def traj2u0(self,n,u0,u1):
tt,xx = self.Traj(self.rzero(),fast=0)
nn=len(tt)
for i in range(n):
tmp = choice(range(nn))
a1, b1 = max([xx[0][tmp],0.001]),max([xx[1][tmp],0.001])
it = dict(a = a1,b=b1)
gaus = Expression("exp(-(pow(x[0]-a,2)+pow(x[1]-b,2))/0.001)",defaults=it)
u1.interpolate(gaus)
for j in range(len(u0.vector()[:])):
u0.vector()[j]+=u1.vector()[j]
return u0
def rand2u0(self,u0,u1,n=7):
nn=len(u0.vector())
for i in range(n):
xx=self.rzero()
it = dict(a = xx[0],b=xx[1])
#gaus = Expression("exp(-(pow(x[0]-a,2)+pow(x[1]-b,2))/0.01)",defaults=it)
gaus = Expression("exp(-(pow(x[0]-a,2)+pow(x[1]-b,2))/0.001)",defaults=it)
u1.interpolate(gaus)
for j in range(len(u0.vector()[:])):
u0.vector()[j]+=u1.vector()[j]
return u0
#def GetCalc(self):
# self.MyCalc()
# self.mainloop()
def Clean(self,output,dest):
MySys("tar -czvf "+ output + "_dat.tar.gz " + output + ".pvd " + output+"*.vtu")
MySys("mv "+ output + "_dat.tar.gz "+ output + ".pvd " + output+"*.vtu " + dest)
#MySys("rm "+ self.name+".pvd " + self.name+"*.vtu")
MySys("tar -czvf "+ output + "_P_png.tar.gz " + output + "*_P.png")
MySys("rm "+ output + "_P.mp4")
MySys("ffmpeg -qscale 5 -r 5 -b 9600 -i "+output+"_%06d_P.png "+output+"_P.mp4")
MySys("mv "+ output + "_P_png.tar.gz "+ output + "*_P.png " + dest)
#MySys("rm "+ output + "*.png")
MySys("tar -czvf "+ output + "_S_png.tar.gz " + output + "*_S.png")
MySys("rm "+ output + "_S.mp4")
MySys("ffmpeg -qscale 5 -r 2 -b 9600 -i "+output+"_%06d_S.png "+output+"_S.mp4")
MySys("mv "+ output + "_S_png.tar.gz "+ output + "*_S.png " + dest)
MySys("tar -czvf "+ output + "_Z_png.tar.gz " + output + "*_Z.png")
MySys("rm "+ output + "_Z.mp4")
MySys("ffmpeg -qscale 5 -r 5 -b 9600 -i "+output+"_%06d_Z.png "+output+"_S.mp4")
MySys("mv "+ output + "_Z_png.tar.gz "+ output + "*_Z.png " + dest)
MySys("tar -czvf "+ output + "_xml.tar.gz " + output + "*.xml")
MySys("mv "+ output + "_xml.tar.gz "+ output + "*.xml " + dest)
#MySys("rm "+ output + "*.xml")
if self.Log==1:
MySys("echo wrote "+output+ " files to dir: "+dest+" >> "+self.output+".log")
return
|
nborggren/Aleph | Knots/knot_analysis2.py | from array import array
from ROOT import TCanvas, TPad, TTree, TFormula, TF1, TPaveLabel, TH1F, TH2F, TFile
from ROOT import gROOT, gBenchmark, gDirectory,gStyle, TGraph, TLine
from math import sin,cos
from knots import findcenter
gROOT.Reset()
def getCWH(x):
xx = []
yy = []
for i in x.values():
i[0][0].append(x)
i[1][0].append(x)
i[0][1].append(y)
i[1][1].append(y)
def Draw_Knot(knot):
n =len(knot['paths'])
lines = [TLine(knot['paths'][i][0][0],knot['paths'][i][0][1],knot['paths'][i][1][0],knot['paths'][i][1][1]) for i in range(n)]
c1 = TCanvas()
c1.SetFillColor( 29 )
for i in knot['elevators'].keys():
if knot['elevators'][i] == 0:
lines[i].SetLineColor(4)
lines[i].SetLineWidth(4)
lines[i].Draw()
else:
lines[i].SetLineColor(2)
lines[i].SetLineWidth(4)
for i in knot['elevators'].keys():
if knot['elevators'][i] == 1:
lines[i].Draw()
zwom =input("type a number to continue")
return lines
#print findcenter(h)
#c1 = TCanvas()
#knot = Draw_Knot(h,c1)
#for i in knot:
# i.Draw()
#zwom = input("sheat")
|
nborggren/Aleph | Knots/braids/MakeBraid.py | <reponame>nborggren/Aleph
def MakeBraid(name,ids,braids,ibraids):
f = open("template.ps")
g = open(name, 'w')
for line in f:
if line.find("REPLACE ME")== -1:
g.write(line)
else:
for j in ids:
g.write(str(j[0])+" "+str(j[1])+ " ident \n")
for j in braids:
g.write(str(j[0])+" "+str(j[1])+ " braid \n")
for j in ibraids:
g.write(str(j[0])+" "+str(j[1])+ " ibraid \n")
g.close()
def MakeBraidM(name,Gens):
f = open("template.ps")
g = open(name, 'w')
for line in f:
if line.find("REPLACE ME")== -1:
g.write(line)
else:
for k in Gens.keys():
for j in Gens[k]:
g.write(str(j[0])+" "+str(j[1])+ " "+ k + '\n')
g.close()
def GetIds(n,m,braids,ibraids):
ids = []
for i in range(n):
for j in range(m):
ids.append([i+1,j+1])
for j in braids:
b = [e for i, e in enumerate(braids) if e==j]
for k in b:
ids.remove(k)
ids.remove([k[0],k[1]+1])
print k,[k[0],k[1]+1]
for j in ibraids:
b = [e for i, e in enumerate(ibraids) if e==j]
for k in b:
ids.remove(k)
ids.remove([k[0],k[1]+1])
return ids
def GetIdsM(n,m,Gens):
ids = []
for i in range(n):
for j in range(m):
ids.append([i+1,j+1])
for elem in Gens.keys():
for j in Gens[elem]:
b = [e for i, e in enumerate(Gens[elem]) if e==j]
for k in b:
try:
if elem in ['magnot','maginot']:
ids.remove(k)
else:
ids.remove(k)
ids.remove([k[0],k[1]+1])
except ValueError:
print k,[k[0],k[1]+1]
return ids
def GetVIdsM(n,m,Gens):
ids = []
for i in range(n):
for j in range(m):
ids.append([i+1,j+1])
for elem in Gens.keys():
for j in Gens[elem]:
b = [e for i, e in enumerate(Gens[elem]) if e==j]
for k in b:
try:
ids.remove(k)
ids.remove([k[0]+1,k[1]])
except ValueError:
print k,[k[0],k[1]+1]
return ids
#braids = [[3,4],[2,7]]
#ibraids = [[6,5]]
#dbraids = [[1,9],[3,9],[1,5]]
#bbraids = [[11,3],[10,5]]
#cbraids = [[2,1],[5,1]]
# n = 4
# m = 5
# vmagbraids = []
# vmagibraids = []
# magbraids = []
# magibraids = []
# braids = []
# ibraids = []
# vbraids = []
# vibraids = []
# for i in range(n):
# braids.append([4*i+2,2*i+2])
# ibraids.append([4*i+4,2*i+3])
# braids.append([4*i+4,2*i+1])
# ibraids.append([4*i+6,2*i+2])
# vbraids.append([2*i+2,4*i+2])
# vibraids.append([2*i+3,4*i+4])
# vbraids.append([2*i+1,4*i+4])
# vibraids.append([2*i+2,4*i+6])
# Gens = {'bbraid':vbraids,
# 'cbraid':vibraids
# }
# Gens2 = {'magbraid':braids,
# 'magbbraid':ibraids
# }
# #ids = GetIds(19,19,braids,ibraids)
# print Gens
# ids2 = GetVIdsM(19,19,Gens)
# ids3 = GetIdsM(19,19,Gens2)
# #MakeBraid("test.ps",ids,braids,ibraids)
# Gens.update({'vmagident':ids2})
# for i in Gens2.keys():
# Gens.update({i:Gens2[i]})
# Gens.update({'magident':ids3})
# MakeBraidM("ring4.ps",Gens)
# #print 'ids', ids
# print 'ids2', ids2
# #zwom = input("sheat")
|
nborggren/Aleph | Knots/braids/mGrammar.py | <gh_stars>0
print '?!'*21
print 'Welcome to the sBraid production line'
print '<NAME>'
print 'August 2009'
print '?!'*21
z = raw_input("How many magnetic lines do you want?")
mlines = int(z)
print " "
print "The Rules of the Game:"
print "alphabet:"
print " \'mn\' crosses line n, n+1"
print " \'Mn\' is the inverse of mn, crossing line n+1 over n"
print " \'xn'\' makes a copy of line n and switches line n+1 to n XOR n+1"
print " \'Xn'\' makes a copy of line n+1 and switches line n to n XOR n+1"
print " "
eBraid = raw_input("how should I braid them?")
|
nborggren/Aleph | AlephSB/Noise.py | <reponame>nborggren/Aleph
#???????????????????????????????????????????????????????????????????????????????????
##@package Noise
#Classes for Dynamical Systems with random force from ROOT histograms.
#@author <NAME>
#
#???????????????????????????????????????????????????????????????????????????????????
from Dynamics import *
from random import choice
from cmath import sqrt as iSqrt
##Noise class studies the dynamics of a system with a random force (default gaussian)
class Noise(Dynamics):
def SetNoise(self,Expr="gaus(0)",Fmin=-75,Fmax=75,wt=1.,imagine=1):
self.wt = wt
Dynamics.Init(self,imagine=imagine)
self.noise = [TF1("noise",Expr,Fmin,Fmax) for i in range(self.dof)]
self.hnoise = [TH1F("hnoise_"+str(i),"hnoise_"+str(i),100,Fmin,Fmax) for i in range(self.dof)]
for i in self.noise:i.SetParameters(300.,0.,wt)
##Finite difference integration step calling a random number from the noise expression.
def NoiseStep(self,x,dt):
wt = self.wt
if self.imagine==1:
#print x
tmp = [self.noise[i].GetRandom()+self.noise[i].GetRandom()*iSqrt(-1) for i in range(self.dof)]
for i in range(self.dof):
self.hnoise[i].Fill(wt*tmp[i].real)
else:
tmp = [self.noise[i].GetRandom() for i in range(self.dof)]
#for i in range(self.dof):
# self.hnoise[i].Fill(wt*tmp[i])
xr = [x[i]+wt*tmp[i] for i in range(self.dof)]
a1 = self.Flow(xr)
return [x[i] + dt*a1[i] for i in range(self.dof)]
##Returns a trajectory of langevin dynamics from input initial conditions.
def Lang(self,x,m=50,Fill=None,U=None,isPos=0,wt=1.,Plot=0,fast=1):
traj=[array('d') for i in range(self.dof)]
xraj=array('d')
xraj.append(self.ti)
if Fill==1:
ost = ""
for j in range(self.dof):
ost = ost+"x["+str(j)+"],"
ost = ost[:-1]
if U==1:
utraj=[array('d') for i in range(self.mon)]
tmp=self.Mons(x)
for i in range(self.mon):
utraj[i].append(tmp[i])
for i in range(self.dof):traj[i].append(x[i])
for j in range(self.tsteps):
x = self.Noisestep(x,self.dt,wt=wt)
xraj.append((j+1)*self.dt+self.ti)
for i in range(self.dof):traj[i].append(x[i])
if U==1:
tmp=self.Mons(x)
for i in range(self.mon):utraj[i].append(tmp[i])
if Fill==1 and j>self.roc*self.tsteps:
exec("self.hist.Fill("+ost+")")
for i in range(len(self.pSect)):
exec(self.cSect[i])
if cond:
exec(self.fSect[i]) #must define fill array
exec("self.hSect[i].Fill("+str(fill[0])+','+str(fill[1])+")")
self.last=x
print len(xraj),len(traj),"here we are"
if Plot==1:
if U==1:
if self.imagine==1:
c1,c2,c4,c5=self.Prettify(xraj,traj,uu=utraj)
else:
c1,c2,c4=self.Prettify(xraj,traj,uu=utraj)
else:
c2,c4=self.Prettify(xraj,traj)
c2.Draw()
c4.Draw()
zwom = input("sheat")
if U ==1:
return xraj, traj, utraj
else:
return xraj, traj
##Does n such trajectories. Good for initial condition guess for diffusion equations.
def nLang(self,n,m=50,wt=1.):
try:
self.hist
except AttributeError:
self.hReset()
system("echo "+str(n)+" trajectories started at: " +strftime('%y.%m.%d -- %H.%M.%S ')+" >> "+self.output+".log")
t=0
before=time()
for i in range(n):
if i%50==0:
t+=(time()-before)/60.
before = time()
print t, " minutes for ", i, "trajectories"
x=self.rzero()
self.Lang(x,m,Fill=1,wt=wt)
system("echo "+str(n)+" trajectories finished at: " +strftime('%y.%m.%d -- %H.%M.%S ')+" >> "+self.output+".log")
return
|
nborggren/Aleph | Knots/knots_safe.py | <gh_stars>0
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#
#knots.py
#<NAME> June 2009
#
#designs a simple geometrical realization of a knot topology so as to be embedded in a Cadence layout upon
#manipulation in accordance with Hypres design rules.
#
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????]
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
layers = ['m1', 'm2', 'm3']
#knots formed by products of tangles in conway notation, digits add to # of crossings.
#Here are the knots up to eight crossings that don't require more sophisticated notation
aknots = [3,22,5,32,42,312,2112,7,52,43,322,313,2212,21112,62,512,44,413,4112,332,3212,3113,31112,2312,2222,22112]
dknots = {}
#the basic tangle unit cell. Centered at x,y it occupies a 1 by 1 area
def tangle(x,y):
points = {0:[x+.5,y+.5],
1:[x-.5,y+.5],
2:[x-.5,y-.5],
3:[x+.5,y-.5]}
corners = {'ne':[x+.5,y+.5],
'nw':[x-.5,y+.5],
'sw':[x-.5,y-.5],
'se':[x+.5,y-.5]}
paths = {0:[points[1],points[3]],
1:[points[0],points[2]],
}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
return temp
#planar translations
def translate(knot,x,y):
nknot = knot
for i in ['points','corners']:
for j in knot[i].keys():
nknot[i][j][0] = knot[i][j][0] + x
nknot[i][j][1] = knot[i][j][1] + y
for i in knot['paths'].keys():
for j in range(len(knot['paths'][i])):
nknot['paths'][i][j][0] = knot['paths'][i][j][0] + x
nknot['paths'][i][j][1] = knot['paths'][i][j][1] + y
return nknot
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#low budget reflections
#multiplication requires first a reflection of the first knot in the line y = -x (ymx)
def ymx(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][1],-x['points'][i][0]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][1],-x['paths'][i][0][0]],[-x['paths'][i][1][1],-x['paths'][i][1][0]]]})
corners = {'ne':[-x['corners']['sw'][1],-x['corners']['sw'][0]],
'nw':[-x['corners']['nw'][1],-x['corners']['nw'][0]],
'se':[-x['corners']['se'][1],-x['corners']['se'][0]],
'sw':[-x['corners']['ne'][1],-x['corners']['ne'][0]]}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
#print 'in spin', temp
return temp
def ypx(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[x['points'][i][1],x['points'][i][0]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[x['paths'][i][0][1],x['paths'][i][0][0]],[x['paths'][i][1][1],x['paths'][i][1][0]]]})
corners = {'ne':[x['corners']['sw'][1],x['corners']['sw'][0]],
'nw':[x['corners']['nw'][1],x['corners']['nw'][0]],
'se':[x['corners']['se'][1],x['corners']['se'][0]],
'sw':[x['corners']['ne'][1],x['corners']['ne'][0]]}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
#print 'in spin', temp
return temp
#reflection about y is zero x
def yi0x(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[x['points'][i][0],-x['points'][i][1]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[x['paths'][i][0][0],-x['paths'][i][0][1]],[x['paths'][i][1][0],-x['paths'][i][1][1]]]})
corners = {'ne':[x['corners']['sw'][0],x['corners']['sw'][1]],
'nw':[x['corners']['nw'][0],x['corners']['nw'][1]],
'se':[x['corners']['se'][0],x['corners']['se'][1]],
'sw':[x['corners']['ne'][0],x['corners']['ne'][1]]}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
#print 'in spin', temp
return temp
#reflection about y is infinity (omega) x
def yiwx(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][0],x['points'][i][1]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][0],x['paths'][i][0][1]],[-x['paths'][i][1][0],x['paths'][i][1][1]]]})
corners = {'ne':[-x['corners']['sw'][0],x['corners']['sw'][1]],
'nw':[-x['corners']['nw'][0],x['corners']['nw'][1]],
'se':[-x['corners']['se'][0],x['corners']['se'][1]],
'sw':[-x['corners']['ne'][0],x['corners']['ne'][1]]}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
#print 'in spin', temp
return temp
def piby2(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][1],x['points'][i][0]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][1],x['paths'][i][0][0]],[-x['paths'][i][1][1],x['paths'][i][1][0]]]})
corners = {'ne':[-x['corners']['sw'][1],x['corners']['sw'][0]],
'nw':[-x['corners']['nw'][1],x['corners']['nw'][0]],
'se':[-x['corners']['se'][1],x['corners']['se'][0]],
'sw':[-x['corners']['ne'][1],x['corners']['ne'][0]]}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
#print 'in spin', temp
return temp
#rotation by pi
def pi(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][0],-x['points'][i][1]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][0],-x['paths'][i][0][1]],[-x['paths'][i][1][0],-x['paths'][i][1][1]]]})
corners = {'ne':[-x['corners']['sw'][0],-x['corners']['sw'][1]],
'nw':[-x['corners']['nw'][0],-x['corners']['nw'][1]],
'se':[-x['corners']['se'][0],-x['corners']['se'][1]],
'sw':[-x['corners']['ne'][0],-x['corners']['ne'][1]]}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
#print 'in spin', temp
return temp
def pi3by2(x):
points = {}
#print 'in spin', x['points']
for i in x['points'].keys():
points.update({i:[-x['points'][i][1],x['points'][i][0]]})
paths = {}
for i in x['paths'].keys():
paths.update({i:[[-x['paths'][i][0][1],x['paths'][i][0][0]],[-x['paths'][i][1][1],x['paths'][i][1][0]]]})
corners = {'ne':[-x['corners']['sw'][1],x['corners']['sw'][0]],
'nw':[-x['corners']['nw'][1],x['corners']['nw'][0]],
'se':[-x['corners']['se'][1],x['corners']['se'][0]],
'sw':[-x['corners']['ne'][1],x['corners']['ne'][0]]}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
#print 'in spin', temp
return temp
# a certain sense of lack of completeness inspires the last:
def identity(x):
return x
#end D4 group
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#operations to construct knots
def add(x,y):
npoints = len(x['points'].keys())+len(y['points'].keys())
#print npoints
points = {}
i = 0
for k in x['points'].values():
points.update({i:k})
i=i+1
for k in y['points'].values():
points.update({i:k})
i=i+1
npoints = len(x['paths'].keys())+len(y['paths'].keys())
#print npoints
paths = {}
i = 0
for k in x['paths'].values():
paths.update({i:k})
i=i+1
for k in y['paths'].values():
paths.update({i:k})
i=i+1
paths.update({i:[x['corners']['ne'],y['corners']['nw']]})
paths.update({i:[x['corners']['se'],y['corners']['sw']]})
corners = {'ne':y['corners']['ne'],
'nw':x['corners']['nw'],
'se':y['corners']['se'],
'sw':x['corners']['sw']}
elevators = {}
players = {}
temp = {'points':points,
'paths':paths,
'elevators':elevators,
'players':players,
'corners':corners}
return temp
#sum is to add an array of tangles left to right
def sum(tangs):
temp = add(tangs.pop(0),tangs.pop(0))
while len(tangs)>0:
temp = add(temp, tangs.pop(0))
return temp
def multiply(x,y):
temp = add(ymx(x),y)
return temp
def product(tangs):
temp = multiply(tangs.pop(0),tangs.pop(0))
while len(tangs)>0:
temp = multiply(temp, tangs.pop(0))
return temp
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
#some circuit maintenance functions
def findcenter(x):
xs = []
ys = []
for i in x['points']:
xs.append(i[0])
ys.append(i[1])
xx = (max(xs)-min(xs))/2.
yy = (max(ys)-min(ys))/2.
return [xx,yy]
def move2origin(x):
zz = findcenter(x)
return translate(x,-zz[0],-zz[1])
#???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
a = tangle(0,0)
b = tangle(0,1)
c = tangle(0,2)
d = add(a,b)
e = add(d,c)
f = sum([a,b,c])
g = translate(d,2,1)
print 'd', d
print 'e', e
print 'f', f
print 'g', g
#print 'spin', spin(d)
#print 'multiply', multiply(a,b)
print len(aknots), 'aknots'
print 'product', product([a,b,c])
z = multiply(f,g)
print '32?'
for i in z['points'].values():
print i
for i in z['paths'].values():
print i
|
nborggren/Aleph | Knots/braids/BraidCalc.py | <gh_stars>0
from MakeBraid import *
import os
print '?!'*21
print 'Welcome to the sBraid production line'
print '<NAME>'
print 'August 2009'
print '?!'*21
z = raw_input("How many magnetic lines do you want? ")
mlines = int(z)
print " "
print "The Rules of the Game:"
print "alphabet:"
print " \'mn\' crosses line n, n+1"
print " \'Mn\' is the inverse of mn, crossing line n+1 over n"
print " \'xn'\' makes a copy of line n and switches line n+1 to n XOR n+1"
print " \'Xn'\' makes a copy of line n+1 and switches line n to n XOR n+1"
print " "
eBraid = raw_input("how should I braid them? ")
l = len(eBraid)/2
allbraids = [eBraid[2*i:2*i+2] for i in range(l)]
x = 2
b = []
ib = []
bb = []
ibb = []
elem = ['braid','vbraid','magbraid','vmagbraid','ibraid','magibraid','vibraid','vmagibraid','bbraid','cbraid','magnot','maginot','magibbraid','dbraid','ident','magident']
gens = {}
gens2 = {}
alph = ['a','A','b','B','c','C','d','D','e','E','f','F','g','G','h','H','i','I']
for i in range(len(elem)):
gens.update({elem[i]:[]})
gens2.update({alph[i]:elem[i]})
for i in allbraids:
print i
gens[gens2[i[0]]].append([x,int(i[1])])
# if i[0] == 'm':
# b.append([x,int(i[1])])
# elif i[0] == 'M':
# ib.append([x,int(i[1])])
# elif i[0] == 'x':
# bb.append([x,int(i[1])])
# elif i[0] == 'X':
# ibb.append([x,int(i[1])])
x = x+1
ids = GetIdsM(x,mlines,gens)
gens.update({'magident':ids})
MakeBraidM("./from_BraidCalc/"+eBraid+".ps",gens)
#os.system("gimp ./from_BraidCalc/"+eBraid+".ps")
|
nborggren/Aleph | AlephSB/Analysis.py | <filename>AlephSB/Analysis.py<gh_stars>0
from dolfin import *
from os import system as MySys
from ROOT import TFile, gStyle, gDirectory, TCanvas, TGraph2D, TGraph
from array import array as MyArray
from os import environ
##@package Analysis
#A class for investigating the dolfin solutions.
## the data count as a len d string with zeroes on the left followed by the stringed int for use in naming the indexed output files.
#
#e.g
#
# >>> from Analysis import *
#
# >>> count=cname(0)
#
# >>> for i in range(3):count.next()
#
# '0000000'
#
# '0000001'
#
# '0000002'
#
# >>> a=cname(0,D=14)
#
# >>> a.next()
#
# '00000000000000'
def cname(i,D=6):
while 0<1:
ni = len(str(i))
si = (D-ni)*'0'+str(i)
i=i+1
yield si
def PostAnalysis(name,rng,integrals = ["u1-u0"]):
cnt = cname(1)
mesh = Mesh(name+"_mesh.xml")
Q = FunctionSpace(mesh,"Lagrange",2)
u = Function(Q)
V = VectorFunctionSpace(mesh,"Lagrange",2)
v = Function(V)
for i in range(rng):
tmp = cnt.next()
if i == 0:
u1 = Function(Q,name+"_"+tmp+".xml.gz")
else:
u0 = u1
u1 = Function(Q,name+"_"+tmp+".xml.gz")
u.interpolate(u1)
p=plot(u)
## finds the difference between U0,U1, and returns in du
# (rather project(U0-U1)) homework: show this functions utility; rates of convergence
def Du(U0,U1):
return project(U0-U1)
## the entropy density evaluates the entropy, \f$-u ln(u)\f$, of a function u at the mesh coordinates and puts into a ROOT TGraph2D object. homework: extend for D>2, analytically continue into complex domain
def entropy_density(mesh,u,name="test",D=2):
x,y,z = MyArray('f'),MyArray('f'),MyArray('f')
for i in mesh.coordinates()[:]:
tmp = sqrt(pow(u(i),2))
x.append(i[0])
y.append(i[1])
z.append(-tmp*ln(tmp))
return TGraph2D(len(x),x,y,z)
## the entropy density array zips through X,Y and evaluates the entropy, \f$-u ln(u)\f$, of a function u at those coordinates and puts into a ROOT TGraph object. homework: extend for D>2, analytically continue into complex domain, (postprocessing of TGraphs, TGraph2Ds)
def entropy_density_array(X,Y,u,name="test",D=2,t=0):
n=len(X)
if t==0:
t=[float(i)/n for i in range(n)]
jj=0
x,z = MyArray('f'),MyArray('f')
for i,j in zip(X,Y):
tmp = sqrt(pow(u([i,j]),2))
x.append(t[jj])
z.append(-tmp*ln(tmp))
jj+=1
return TGraph(n,x,z)
## like entropy, but for the potential, \f$- ln(u)\f$, outputs ROOT TGraph2D
def energy_density(mesh,u):
x,y,z = MyArray('f'),MyArray('f'),MyArray('f')
for i in mesh.coordinates()[:]:
tmp = sqrt(pow(u(i),2))
x.append(i[0])
y.append(i[1])
z.append(-ln(tmp))
tmp = TGraph2D(len(x),x,y,z)
return tmp
## like entropy, but for the potential, \f$- ln(u)\f$, outputs ROOT TGraph
def energy_density_array(X,Y,u,name="test",D=2,t=0):
n=len(X)
if t==0:
t=[float(i)/n for i in range(n)]
jj=0
x,z = MyArray('f'),MyArray('f')
for i,j in zip(X,Y):
tmp = sqrt(pow(u([i,j]),2))
x.append(t[jj])
z.append(-ln(tmp))
jj+=1
return TGraph(n,x,z)
## some day quality will be checked
def QualCheck(mesh,fname,D,vel):
pass
## quick load for evaluating, returns dolfin mesh, and function space
def Load_2D(n,d=2,x=[0,1],y=[0,1],elements = "Lagrange"):
mesh=Rectangle(x[0],y[0],x[1],y[1],n,n)
Q=FunctionSpace(mesh,elements,d)
return mesh,Q
## quick load for evaluating say, the lorenz poincare section
def Load_3D(name,mins=[-30,-30,26],maxs=[30,30,28],nx=30,ny=30,nz=4):
mesh = Box(mins[0],mins[1],mins[2],maxs[0],maxs[1],maxs[2],nx,ny,nz)
Q=FunctionSpace(mesh,"Lagrange",2)
u = Function(Q,name)
return mesh,Q,u
## quick loads the file named u as well for evaluating
def Load_Func(u,n,d=2,x=[0,1000],y=[0,1000],elements = "Lagrange"):
mesh,Q = Load_2D(n,d=d,x=x,y=y,elements = elements)
return mesh,Q,Function(Q,u)
## the q derivative entails a higher density of grid points as one approaches zero for qx,qy < 1, in accordance with \f$x_out=q_x^{x_in*n_x}\f$. It approaches the ordinary derivative as qx,qy approach 1.
def qUnitSquare(nx,ny,qx,qy):
mesh = UnitSquare(nx,ny)
for x in mesh.coordinates():
x[0]=qx**(x[0]*nx)
x[1]=qy**(x[1]*ny)
return mesh
## returns a TGraph2D of the constant z cross section (z keyword) of a 3D function u given the mesh keyword.
def Lorenz_Psect(u,mesh = Rectangle(-30,-30,30,30,40,40),z=27):
x,y,z = MyArray('f'),MyArray('f'),MyArray('f')
for i in mesh.coordinates()[:]:
x.append(i[0])
y.append(i[1])
z.append(u([i[0],i[1],z]))
return TGraph2D(len(x),x,y,z)
## the q derivative entails a higher density of grid points as one approaches zero for qx,qy < 1, in accordance with \f$x_out=q_x^{x_in*n_x}*(x_max-x_min)\f$. It approaches the ordinary derivative as qx,qy approach 1.
def qRectangle(nx,ny,qx,qy,mins=[0,0],maxs=[1,1]):
mesh = UnitSquare(nx,ny)
for z in mesh.coordinates():
z[0]=qx**(z[0]*nx)*(maxs[0]-mins[0])
z[1]=qy**(z[1]*ny)*(maxs[1]-mins[1])
return mesh
## the qs and hs will take over the world
def qhUnitSquare(hx,hy,qx,qy):
nx,ny=int(1/hx),int(1/hy)
mesh = UnitSquare(nx,ny)
for x in mesh.coordinates():
ix = x[0]*nx
iy = x[1]*ny
x[0] = qh(1,ix,qx,hx)
x[1] = qh(1,iy,qy,hy)
return mesh
## shen?
def qh(x,i,q,h):
if i<=0:
return x
else:
return qh(q*x+h,i-1,q,h)
## shen?
def qpUnitSquare(q2x,q2y,q1x,q1y,q0x,q0y):
nx,ny=int(1/q0x),int(1/q0y)
mesh = UnitSquare(nx,ny)
for x in mesh.coordinates():
ix = x[0]*nx
iy = x[1]*ny
x[0] = qp(1,ix,q2x,q1x,q0x)
x[1] = qp(1,iy,q2y,q1y,q0y)
return mesh
##you cant believe everything doxygen outputs
def qp(x,i,q2,q1,q0):
if i<=0:
return x
else:
return qp(q2*x*x+q1*x+q0,i-1,q2,q1,q0)
##Makes movies homework:get rid of token use mesh and cname
def MakeMovies(tok,rng,path="./",P=1,D=0,S=0):
mesh = Mesh(path+tok+".xml")
Q = FunctionSpace(mesh,"CG",1)
tmp,du = Function(Q),Function(Q)
for i in range(rng[0],rng[1]):
z = Function(Q,path+tok+"_"+(4-len(str(i+1)))*'0'+str(i+1)+".xml")
if P==1:
tmp.assign(z)
v = plot(tmp)
v.write_png(tok+"_"+(4-len(str(i+1)))*'0'+str(i+1)+"_P.png")
if S==1:
du = LR(z,du)
tmp.assign(du)
v = plot(tmp)
v.write_png(tok+"_"+(4-len(str(i+1)))*'0'+str(i+1)+"_S.png")
if D==1 and i>0:
w = Function(Q,path+tok+"_"+(4-len(str(i)))*'0'+str(i)+".xml")
du=Du(w,z,du)
tmp.assign(du)
v = plot(du)
v.write_png(tok+"_"+(4-len(str(i+1)))*'0'+str(i+1)+"_D.png")
if P==1:
system("ffmpeg -qscale 5 -r 5 -b 9600 -i "+tok+"_%06d_P.png "+tok+"_P.mp4")
if S==1:
system("ffmpeg -qscale 5 -r 5 -b 9600 -i "+tok+"_%06d_S.png "+tok+"_S.mp4")
if D==1:
system("ffmpeg -qscale 5 -r 5 -b 9600 -i "+tok+"_%06d_D.png "+tok+"_D.mp4")
##A generator equivalent to newtons method
def NewtonsMethod(y,iyprime,x):
try:
while 1>0:
x = x-iyprime(x)*y(x)
yield x
except IndexError:
while 1>0:
tmp = [x[i] for i in len(x)]
for i in range(len(x)):
for j in range(len(x)):
tmp[i]=tmp[i]-iyprime[i,j](x)*y[j](x)
x[i]=tmp[i]
yield x
##An order parameter that assigns to each coordinate in X the value it has at the
def NewtonsOrder2D(y,iyprime,X,nmax=100):
x,y,z1,z2 = MyArray('d'),MyArray('d'),MyArray('d'),MyArray('d')
for i in X:
tmp = NewtonsMethod(y,iyprime,i)
for j in range(nmax):
out = tmp.next()
x.append(i[0])
y.append(i[1])
z1.append(out[0])
z2.append(out[1])
return TGraph2D(len(x),x,y,z2),TGraph2D(len(x),x,y,z2)
##same complaints as MakeMovies, evaluate function instead of vector C-
def xml2oct(tok,rng,n):
mesh = Mesh(tok+".xml")
Q=FunctionSpace(mesh,"CG",1)
for i in range(rng):
if i%n==1:
w = Function(Q,tok+"_"+str(i)+".xml")
f=open(tok+"_"+str(i)+".dat",'w')
z=mesh.coordinates()[:]
for j in range(len(z)):
f.write(str(z[j,0])+" "*3+str(z[j,1])+" "*3+str(w.vector()[j])+" \n")
def ShrinkImage(figure,ftype=".eps"):
MySys("gs -r300 -dEPSCrop -dTextAlphaBits=4 -sDEVICE=png16m -sOutputFile="+figure+".png -dBATCH -dNOPAUSE "+figure+ftype)
MySys("convert "+figure+".png eps3:"+figure+"_c.eps")
def ShrinkImages(filelist,ftype=".eps"):
f=open(filelist)
for line in f:
ShrinkImage(line.split(ftype)[0],ftype=ftype)
##generator? welp let us see what it does:
#
#>>> a=RecRelations(1,2,3,4,5)
#
# >>> a.next()
#
# 1
#
# >>> a.next()
#
# 1
#
# >>> a.next()
#
# 26
#
# >>> a.next()
#
# 201
#
# >>> a.next()
#
# 1901
#
# >>> a.next()
#
# 17126
#
# >>> a.next()
#
# 156001
#
#I dont know anything about that sequence but any number in the last spot after 0,1,0,1 is golden.
#
# >>> b=RecRelations(0,1,0,1,29301299400382)
#
# >>> b.next()
#
# 1
#
# >>> b.next()
#
# 1
#
# >>> b.next()
#
# 2
#
# >>> b.next()
#
# 3
#
# >>> b.next()
#
# 5
#
# >>> b.next()
#
# 8
#
# >>> b.next()
#
# 13
#
# >>> b.next()
#
# 21
#
# >>> b.next()
#
# 34
def RecRelations(a,b,c,d,x):
fn2 = 1 # "f_{n-2}"
fn1 = 1 # "f_{n-1}"
while True:
(fn1,fn2,oldfn2) = ((a*x+b)*fn1+(c*x+d)*fn2,fn1,fn2)
yield oldfn2
## Make plots from root outputs. where are these root outputs?
def Prettify_Root(MyFile, sHistos): #self,tt,xx,uu=None,store=0):
f = TFile(MyFile)
gStyle.SetPalette(1)
hist = [gDirectory.Get(i) for i in sHistos]
c2=TCanvas()
H = len(hist)
if H%2==0:
c2.Divide(H/2,2)
else:
c2.Divide((H+1)/2,2)
# c2 = TCanvas()
gColors=[2,4,6,7,8,9,11,12,13,14,15,16,17,18]
gShapes=[27,26,23,22,28,15,11]
for i in range(H):
c2.cd(i+1)
# hist[i].SetLineColor(4)
# hist[i].SetLineWidth(1)
# grsX[i].SetMarkerColor(4)
# grsX[i].SetMarkerStyle(7)
# grsX[i].SetTitle("x_{"+str(i)+"}")
# grsX[i].GetYaxis().SetTitle("x_{"+str(i)+"}")
# grsX[i].GetXaxis().SetTitle("time")
hist[i].Draw()
zwom = input("sheat")
if zwom == 1:
return hist
else:
return
## homework given a set of u variables, predict the most likely x variable.
def histUtoX(MyHists):
pass
## let us forgive
def T(ti,q,nmax=100,s=False):
if s == True:
dq = [q[i]*ti**i for i in range(len(q))]
return expand(sum(dq))
else:
dq = [q[i]*pow(ti,i) for i in range(len(q))]
return sum(dq)
##This gives a generator for a trajectory with initial condition x and time step dt. Here is an example use for the lorenz attractor.
#
#>>> from Dynamics import *
#
# >>> from Analysis import *
#
# >>> dyn=Dynamics()
#
# >>> dyn.Init(imagine=1)
#
# >>> x0=dyn.rzero()
#
# >>> xt=qIntegrate(x0,.001,dyn.Flow)
#
# >>> x0
#
# [(-20.698756946446188+2.5449184142103549j), (-27.741584663422792+2.1660923516583601j), (2.6240628528889474+2.1327704439181305j)]
#
# >>> xt.next()
#
# [(-20.769185223615956+2.541130153584835j), (-28.233665707279705+2.2726516460714548j), (3.1857691418927421+2.0116475673078296j)]
#
# >>> xt.next()
#
# [(-20.843830028452594+2.5384453685097013j), (-28.715691540154349+2.3752154656269879j), (3.7578888865040811+1.8873366315339923j)]
#
# >>> xt.next()
#
# [(-20.922548643569613+2.5368130694808739j), (-29.18748339126363+2.4737168487944792j), (4.3403834883879213+1.7599019322097629j)]
#
# >>> xt.next()
#
# [(-21.005197991046554+2.5361821072740098j), (-29.648850843002137+2.5680847901158188j), (4.9332103162237635+1.6294092099590032j)]
#
# >>> xt.next()
#
# [(-21.09163451956611+2.5365011341024277j), (-30.09959199800096+2.6582443476575728j), (5.5363219401834467+1.49592610424901j)]
#
# >>> xt.next()
#
# [(-21.181714094350458+2.5377185662379791j), (-30.539493675345845+2.7441167748438504j), (6.1496653688169625+1.3595226004875669j)]
#
# >>> xt.next()
# [(-21.27529189016041+2.5397825483240379j), (-30.968331636949625+2.8256196769694077j), (6.7731812886967369+1.2202714698165806j)]
#
# >>> xt.next()
# [(-21.372222287628304+2.5426409196104918j), (-31.385870845111913+2.9026671927174275j), (7.4068033073105033+1.0782487009233956j)]
#
def qIntegrate(x,dt,f,data=False):
xi=x
while True:
tmp = [j+f(xi)[i]*dt for i,j in enumerate(xi)]
#data['traj'].append(xi)
xi = [i for i in tmp]
yield xi
#compress (gz) each file in a list
def comList(fname):
f = open(fname)
for line in f:
MySys("gzip "+line.replace('\n',''))
return
#the probability current \f$J_i=-D_{ij}\partial_ju+F_iu\f$
def J(V,D,velocity,u):
return project(-D*grad(u)+velocity*u,V)
# \f$J_i^2f$
def Jsquared(V,D,velocity,u):
z = J(V,D,velocity,u)
return dot(z,z)
# \f$\partial_iJ_i\f$
def DivJ(U,V,D,velocity,u):
z = J(V,D,velocity,u)
return project(div(z),U)
def CurlJ(U,V,D,velocity,u):
#diagonal diffusion for starters
z = J(V,D,velocity,u)
return project(curl(z),U)
def CurlJ_2D(J, mesh, t=100, eps = 0.001,xdom=[0,1],ydom=[0,1]):
x,y,z=MyArray('f'),MyArray('f'),MyArray('f')
for i in mesh.coordinates()[:]:
x.append(i[0])
y.append(i[1])
tmp = 0
for j in range(t):
xt = i[0]+eps*sin(2*pi*j/t)
yt = i[1]+eps*cos(2*pi*j/t)
#if xt < xdom[0] or xt > xdom[1] or yt < ydom[0] or yt > ydom[1]:
# jtmp = [0,0]
#else:
jtmp = J([xt,yt])
#print jtmp
tmp = tmp+jtmp[0]*eps*cos(2*pi*j/t)-jtmp[1]*eps*sin(2*pi*j/t)
print tmp
z.append(tmp/(2*pi*eps))
return TGraph2D(len(mesh.coordinates()[:]),x,y,z)
def entropy_prod(Q,head,n,dt):
t = MyArray('f')
ent = MyArray('f')
v = Function(Q)
for i in range(n):
ss = (6-len(str(i)))*"0"+str(i)
g = Function(Q,head+ss+".xml.gz")
t.append(i*dt)
v,S = entropy(g,v)
ent.append(S)
print S, i
return v, TGraph(n,t,ent)
def Func2Graph(mesh,func,Z=0,polar=0):
try:
MyRange = mesh.coordinates()[:]
except AttributeError:
MyRange=mesh
pass
n,d=len(MyRange),len(MyRange[0])
print n,d
if d==2:
try:
tmp = len(func(MyRange[0]))
except TypeError:
tmp=1
if tmp==1:
x,y,z=MyArray('f'),MyArray('f'),MyArray('f')
for i in MyRange:
x.append(i[0])
y.append(i[1])
if Z==1:
z.append(func(i[0],i[1])) # who cares about units?
else:
z.append(func(i))
return TGraph2D(n,x,y,z)
elif tmp==2 and polar==0:
x,y,zx,zy=MyArray('f'),MyArray('f'),MyArray('f'),MyArray('f')
for i in MyRange:
x.append(i[0])
y.append(i[1])
if Z==1:
tmp2 = func(i[0],i[1])
zx.append(tmp2[0])
zy.append(tmp2[1])
else:
tmp2 = func(i)
zx.append(tmp2[0])
zy.append(tmp2[1])
return TGraph2D(n,x,y,zx),TGraph2D(n,x,y,zy)
elif tmp==2 and polar==1:
x,y,zx,zy=MyArray('f'),MyArray('f'),MyArray('f'),MyArray('f')
for i in MyRange:
x.append(i[0])
y.append(i[1])
if Z==1:
tmp2 = func(i[0],i[1])
zx.append(pow(pow(tmp2[0],2)+pow(tmp2[1],2),.5))
zy.append(atan(tmp2[1]/tmp2[0]))
else:
tmp2 = func(i)
zx.append(pow(pow(tmp2[0],2)+pow(tmp2[1],2),.5))
zy.append(atan(tmp2[1]/tmp2[0]))
return TGraph2D(n,x,y,zx),TGraph2D(n,x,y,zy)
else:
x,y=MyArray('f'),MyArray('f')
for i,e in enumerate(MyRange):
x.append(i)
y.append(func(e))
return TGraph(n,x,y)
def OrderParam(mesh,velocity):
try:
MyRange = mesh.coordinates()[:]
except AttributeError:
MyRange=mesh
pass
n,d=len(MyRange),len(MyRange[0])
if d==2:
x,y,z=MyArray('f'),MyArray('f'),MyArray('f')
for i in MyRange:
x.append(i[0])
y.append(i[1])
tmp = [j(i) for j in velocity]
if tmp[0]>=0 and tmp[1]>=0:
z.append(1)
elif tmp[0]>=0 and tmp[1]<0:
z.append(0.33)
elif tmp[0]<0 and tmp[1]>=0:
z.append(-0.33)
else:
z.append(-1)
return TGraph2D(n,x,y,z)
else:
x,y=MyArray('f'),MyArray('f')
for i,e in enumerate(MyRange):
x.append(i)
y.append(func(e))
return TGraph(n,x,y)
#try newtons method dork.
def FindZeroes(mesh,func,Z=0,tol=0.001):
try:
MyRange = mesh.coordinates()[:]
except AttributeError:
MyRange=mesh
pass
x,y=MyArray('f'),MyArray('f')
for i in MyRange:
if Z==1:
tmp=func(i[0],i[1])
if tmp < tol:
x.append(i[0])
y.append(i[1])
else:
tmp=func(i)
print func(i),tmp
if abs(tmp) < tol:
x.append(i[0])
y.append(i[1])
return TGraph(len(x),x,y)
def Entropy2Graph(D,velocity,mesh,Q,FuncList,dt=1,T=310,n=1085):
n = len(FuncList)
t,s=MyArray('f'),MyArray('f')
for i,j in enumerate(FuncList):
#j = (6-len(str(i*dn+1)))*"0"+str(i+1)
u = Function(Q,j)
v = Function(Q)
#J = velocity*u-D*grad(u)
ss = project(inner(T*grad(LR(u,v))-velocity,velocity*u-D*grad(u)))
t.append(i*dt)
s.append(assemble(ss*dx,mesh=mesh))
gr = TGraph(n,t,s)
gr.SetMarkerColor(4)
gr.SetMarkerStyle(7)
gr.SetTitle("entropy production")
return gr
def NegEntropyReg():
return #z = [i for i in mesh.coordinates()[:] if ss(i)<0]
def FunctionArray(Q,head,tail,n):
return [Function(Q,head+"0"*(6-len(str(i)))+str(i)+tail) for i in range(n)]
def Func2Table(Q,head,n,dn):
g = Function(Q)
for i in range(n):
j = (6-len(str(i*dn+1)))*"0"+str(i*dn+1)
f=Function(Q,head+j+".xml.gz")
g.interpolate(f)
p=plot(g,interactive=True)
p.write_png(head+j+".png")
def FuncMax(mesh,func):
mx = max(func.vector()[:])
tmp = [i for i, e in enumerate(func.vector()[:]) if e==mx]
print tmp
return mesh.coordinates()[tmp[0]]
def CutCroRegion(mesh,func):
nn=len(mesh.coordinates()[:])
for i in range(nn):
if mesh.coordinates()[i][1]>mesh.coordinates()[i][0]:
func.vector()[i]=0
return func
def CutCiRegion(mesh,func):
nn=len(mesh.coordinates()[:])
for i in range(nn):
if mesh.coordinates()[i][1]<mesh.coordinates()[i][0]:
func.vector()[i]=0
return func
def MyEval(Q,funcs,points):
tmp = []
for i in funcs:
tmp.append([])
f=Function(Q,i)
for j in points:
z = f(j)
tmp[-1].append(z)
print z
return tmp
def fpt(mesh,Q,velocity,D):
return
##partitions a given mesh into n sectors in accordance with a particular output distribution
#4
def Pools(n,fmesh,fname):
mesh = Mesh(fmesh)
Q = FunctionSpace(mesh,"CG",1)
f = Function(Q,fname)
fmin = min(f.vector()[:])
fmax = max(f.vector()[:])
df = (fmax-fmin)/n
pools = {}
[pools.update({i:[]}) for i in range(n+1)]
cds = [fmin+df*i for i in range(n+1)]
nn = len(f.vector()[:])
print nn, "thelength"
for j in range(nn):
isin = [ i for i, e in enumerate(cds) if f.vector()[j]>=e]
pools[max(isin)].append(j)
return pools
##generates a langevinesque trajectory from reservoir exchange within pools. Equipartition states all states of particular probability equally probable, so we can switch to any state on a given probability contour.
#2
def resExch(nn,steps,pools):
x0 = choice(range(nn))
traj = []
for i in range(steps):
where = [i for i, e in enumerate(pools.keys()) if x0 in pools[e]]
print where
x0 = choice(pools[where[0]])
traj.append(x0)
return traj
def Traj2Bin(X,rule):
sym = []
for x in X:
if eval(rule)==True:
sym.append(1)
else:
sym.append(0)
return sym
def DirtyIntegrator(velocity,x,tf,dt):
X=[x[0]]
Y=[x[1]]
T=[0]
t=0
while t<tf:
tmp=[x[0]+velocity[0](x)*dt,x[1]+velocity[1](x)*dt]
#print tmp
x=tmp
X.append(x[0])
Y.append(x[1])
t=t+dt
T.append(t/60.)
return X,Y,T
def Arrays2Graph(X,Y):
x=MyArray('f')
y=MyArray('f')
for i,j in zip(X,Y):
x.append(i)
y.append(j)
return TGraph(len(x),x,y)
def PlotViewer(FileList,names,option="APL"):
for i in FileList:
f=TFile(i)
for j in names:
tmp=gDirectory.Get(j)
tmp.Draw(option)
zwom=input("sheat")
return
def LamStates():
tmp = ['1','2','3']
return [i+j+k for i in tmp for j in tmp for k in tmp]
#seee TrajTable ~/Analysis/Diff/lambda
def TrajTable(mesh,Q,head,nx,dat=10,image=40,zeroes=[],entropy=1,energy=1):
return
def Publish(name,title,media,columns=5):
f=open(environ["ALEPHPATH"]+'lib/template.tex')
g=open(name+'.tex','w')
img_beg = ["\\begin{figure}[h]"
"\\begin{center}"]
img_end = ["\\end{center}",
"\\end{figure}"]
tbl_beg = ["\\begin{table}[htp]",
"\\centering",
"\\begin{tabular}{"+"*{"+str(columns+1)+"}{|>{\centering}p{"+str(15./(columns+1))[:3]+"cm}}|} ",
"\\hline"]
tbl_end = ["\\end{tabular}",
"\\end{table}"]
eqn_beg = ["\\begin{equation}"]
eqn_end = ["\\end{equation}"," "]
vid_beg = ["\\begin{figure}[ht]",
"\includemovie[",
"poster,"]
vid_end = ["\end{figure}"]
IMAGES,TABLES,VIDEOS,EQNS,DOTS="","","","",""
qqq=0
for i in media['image']:
for j in img_beg:
IMAGES=IMAGES+j+" \n "
IMAGES = IMAGES+ "\epsfig{file="+i+",scale = 0.8} \n"
for j in img_end:
IMAGES=IMAGES+j+" \n "
qqq=qqq+1
if qqq%15==1:
IMAGES = IMAGES + "\clearpage \n"
for i in media['table']:
for j in tbl_beg:
TABLES=TABLES+j+" \n "
tmp = ["& $ U_"+str(q+1)+ " $ " for q in range(self.mon)]
TABLES = TABLES + " * "
for k in tmp:
TABLES = TABLES+k
TABLES = TABLES + " \n \\tabularnewline \n \\hline \n "
TABLES = TABLES + i +" \n "
for j in tbl_end:
TABLES=TABLES+j+" \n "
for i in media['dot']:
MySys("$DOTPATH./ladot "+i+".ladot")
DOTS=DOTS+"\\input{"+i+".tex} \n"
def PublishImageList(name,title,filelist):
f=open(filelist)
media={'table': [], 'image': [], 'video': [], 'dot': [], 'eqn': []}
for line in f:
media['image'].append(line.replace('\n',''))
Publish(name,title,media)
def MeshTraj(mesh,velocity,dt,tsteps,record=0,interactive=1):
if record !=0:
nm=cname(1)
for j in range(tsteps):
for x in mesh.coordinates():
xc = TStep(velocity,x,dt)
for k in range(len(x)):
x[k] = xc[k]
if j%4500==0 and interactive==1:
p=plot(mesh,interactive=True,axes=True)
else:
p=plot(mesh,axes=True)
if record !=0:
if j%25==0:
p.write_png(record+"_"+nm.next()+".png")
def TStep(velocity,x,dt):
try:
a=velocity(x)
except IndexError:
a=[velocity[i](x) for i in range(len(velocity))]
return [x[i]+dt*a[i] for i in range(len(x))]
def MakeInitialCondition(mesh,Q,MyFile,hist='u_2'):
f = TFile(MyFile)
u4 = gDirectory.Get(hist)
u4.Draw()
u0 = Function(Q)
for i,j in enumerate(mesh.coordinates()[:]):
tmp = j[0]*j[1]/j[2]
tmp = u4.FindBin(tmp)
u0.vector()[i]=u4.GetBinContent(tmp)
return u0
|
nborggren/Aleph | lib/Dyn.py | import ROOT as rt
import random
mem=rt.TH1F("mem","mem",100,0,1)
for i in range(10000):
mem.Fill(random.random())
mem.Draw("E0")
zwom=input("sheat")
|
nborggren/Aleph | Knots/braids/braid.py | <filename>Knots/braids/braid.py
from MakeBraid import *
import os
globalvars = {} # We will store the calculator's variables here
def mult(x,y):
elems = y.split("_")
MyBraid[elems[0]].append([MyBraid['idx'],int(elems[1])])
MyBraid['idx']=MyBraid['idx']+1
return x+"*"+y
def div(x,y):
elems = y.split("_")
MyBraid[elems[0]+'i'].append([MyBraid['idx'],int(elems[1])])
MyBraid['idx']=MyBraid['idx']+1
return x+"/"+y
def lookup(map, name):
for x,v in map:
if x==name: return v
if name not in globalvars.keys(): print 'Undefined:', name
return globalvars.get(name, 0)
from string import *
import re
from yappsrt import *
class sbraidScanner(Scanner):
patterns = [
('"in"', re.compile('in')),
('"="', re.compile('=')),
('"let"', re.compile('let')),
('r"\\)"', re.compile('\\)')),
('"\\("', re.compile('\(')),
('"_"', re.compile('_')),
('"/"', re.compile('/')),
('"[*]"', re.compile('[*]')),
('"set"', re.compile('set')),
('[ \r\t\n]+', re.compile('[ \r\t\n]+')),
('END', re.compile('$')),
('GEN', re.compile('[abc]')),
('NUM', re.compile('[0-9]+')),
('VAR', re.compile('[d-zA-Z_]+')),
]
def __init__(self, str):
Scanner.__init__(self,None,['[ \r\t\n]+'],str)
class sbraid(Parser):
def goal(self):
_token_ = self._peek('"set"', 'GEN', 'NUM', 'VAR', '"\\("', '"let"')
if _token_ != '"set"':
expr = self.expr([])
END = self._scan('END')
print '=', expr
return expr
else:# == '"set"'
self._scan('"set"')
VAR = self._scan('VAR')
expr = self.expr([])
END = self._scan('END')
globalvars[VAR] = expr
print VAR, '=', expr
return expr
def expr(self, V):
elem = self.elem(V)
v = elem
while self._peek('"[*]"', '"/"', 'END', 'r"\\)"', '"in"', '"_"') in ['"[*]"', '"/"']:
_token_ = self._peek('"[*]"', '"/"')
if _token_ == '"[*]"':
self._scan('"[*]"')
elem = self.elem(V)
v = mult(v,elem)
else:# == '"/"'
self._scan('"/"')
elem = self.elem(V)
v = div(v,elem)
return v
def elem(self, V):
term = self.term(V)
n = term
while self._peek('"_"', '"[*]"', '"/"', 'END', 'r"\\)"', '"in"') == '"_"':
self._scan('"_"')
term = self.term(V)
if MyBraid['idx']==2:
MyBraid[n].append([2,int(term)])
MyBraid['idx']=3
n = n+"_"+term
return n
def term(self, V):
_token_ = self._peek('GEN', 'NUM', 'VAR', '"\\("', '"let"')
if _token_ == 'GEN':
GEN = self._scan('GEN')
return GEN
elif _token_ == 'NUM':
NUM = self._scan('NUM')
return NUM
elif _token_ == 'VAR':
VAR = self._scan('VAR')
return lookup(V, VAR)
elif _token_ == '"\\("':
self._scan('"\\("')
expr = self.expr(V)
self._scan('r"\\)"')
return expr
else:# == '"let"'
self._scan('"let"')
VAR = self._scan('VAR')
self._scan('"="')
expr = self.expr(V)
V = [(VAR, expr)] + V
self._scan('"in"')
expr = self.expr(V)
return expr
def parse(rule, text):
P = sbraid(sbraidScanner(text))
return wrap_error_reporter(P, rule)
if __name__=='__main__':
print '?'*31
print 'Welcome to the sBraid Calculator'
print 'enter group elements in the form a_i'
print '?'*31
# We could have put this loop into the parser, by making the
# `goal' rule use (expr | set var expr)*, but by putting the
# loop into Python code, we can make it interactive (i.e., enter
# one expression, get the result, enter another expression, etc.)
while 1:
MyBraid = {'a':[],
'ai':[],
'b':[],
'bi':[],
'c':[],
'ci':[],
'idx':2}
try: s = raw_input('>>> ')
except EOFError: break
if not strip(s): break
parse('goal', s)
print MyBraid
gens = {'magbraid':MyBraid['a'],
'magibraid':MyBraid['ai'],
'magbbraid':MyBraid['b'],
'magibbraid':MyBraid['bi'],
'magnot':MyBraid['c'],
'maginot':MyBraid['ci']
}
j=0
for i in MyBraid.keys():
if i not in ['idx']:
try:
for k in MyBraid[i]:
if k[1]>j:
j=k[1]
except ValueError:
j = j
print 'j', j
j = j+1
ids = GetIdsM(MyBraid['idx'],j,gens)
gens.update({'magident':ids})
eBraid = 'temp'
MakeBraidM("./from_BraidCalc/"+eBraid+".ps",gens)
print 'Bye.'
print MyBraid
|
nasseredine/udacity-dend-p4 | plugins/operators/load_dimension.py | from airflow.hooks.postgres_hook import PostgresHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class LoadDimensionOperator(BaseOperator):
ui_color = '#80BD9E'
delete_sql = "DELETE FROM {};"
insert_sql = "INSERT INTO {} ({});"
@apply_defaults
def __init__(self,
redshift_conn_id,
table_name,
select_sql,
append_data=True,
*args, **kwargs):
super(LoadDimensionOperator, self).__init__(*args, **kwargs)
self.redshift_conn_id = redshift_conn_id
self.table_name = table_name
self.select_sql = select_sql
self.append_data = append_data
def execute(self, context):
redshift = PostgresHook(postgres_conn_id=self.redshift_conn_id)
if not self.append_data:
formatted_delete_sql = LoadDimensionOperator.delete_sql.format(self.table_name)
redshift.run(formatted_delete_sql)
formatted_insert_sql = LoadDimensionOperator.insert_sql.format(self.table_name, self.select_sql)
redshift.run(formatted_insert_sql) |
nasseredine/udacity-dend-p4 | plugins/operators/stage_redshift.py | import os
from airflow.contrib.hooks.aws_hook import AwsHook
from airflow.hooks.postgres_hook import PostgresHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class StageToRedshiftOperator(BaseOperator):
ui_color = '#358140'
template_fields = ('s3_key',)
copy_sql = """
COPY {}
FROM '{}'
ACCESS_KEY_ID '{}'
SECRET_ACCESS_KEY '{}'
FORMAT AS json '{}';
"""
s3_prefix = 's3://'
@apply_defaults
def __init__(self,
redshift_conn_id,
aws_credentials_id,
table_name,
s3_bucket,
s3_key,
json_option='auto',
*args, **kwargs):
super(StageToRedshiftOperator, self).__init__(*args, **kwargs)
self.redshift_conn_id = redshift_conn_id
self.aws_credentials_id = aws_credentials_id
self.table_name = table_name
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.json_option = json_option
def execute(self, context):
aws_hook = AwsHook(self.aws_credentials_id)
credentials = aws_hook.get_credentials()
redshift = PostgresHook(postgres_conn_id=self.redshift_conn_id)
self.log.info(f"Clearing data from {self.table_name} staging table in Redshift")
redshift.run('DELETE FROM {}'.format(self.table_name))
rendered_s3_key = self.s3_key.format(**context)
s3_path = os.path.join(StageToRedshiftOperator.s3_prefix, self.s3_bucket, rendered_s3_key)
self.log.info(f"Copying data from S3 to {self.table_name} staging table in Redshift")
self.log.info(f"Data source: {s3_path}")
formatted_sql = StageToRedshiftOperator.copy_sql.format(
self.table_name,
s3_path,
credentials.access_key,
credentials.secret_key,
self.json_option
)
redshift.run(formatted_sql)
|
nasseredine/udacity-dend-p4 | plugins/operators/load_fact.py | <reponame>nasseredine/udacity-dend-p4<gh_stars>0
from airflow.hooks.postgres_hook import PostgresHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class LoadFactOperator(BaseOperator):
ui_color = '#F98866'
insert_sql = "INSERT INTO {} ({});"
@apply_defaults
def __init__(self,
redshift_conn_id,
table_name,
select_sql,
*args, **kwargs):
super(LoadFactOperator, self).__init__(*args, **kwargs)
self.redshift_conn_id = redshift_conn_id
self.table_name = table_name
self.select_sql = select_sql
def execute(self, context):
redshift = PostgresHook(postgres_conn_id=self.redshift_conn_id)
formatted_sql = LoadFactOperator.insert_sql.format(self.table_name, self.select_sql)
redshift.run(formatted_sql) |
nasseredine/udacity-dend-p4 | plugins/operators/data_quality.py | from airflow.hooks.postgres_hook import PostgresHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class DataQualityOperator(BaseOperator):
ui_color = '#89DA59'
@apply_defaults
def __init__(self,
redshift_conn_id,
tests,
*args, **kwargs):
super(DataQualityOperator, self).__init__(*args, **kwargs)
self.redshift_conn_id = redshift_conn_id
self.tests = tests
def execute(self, context):
redshift = PostgresHook(postgres_conn_id=self.redshift_conn_id)
for test in self.tests:
record = redshift.get_records(test['sql'])
if record is None or len(record) == 0 or len(record[0]) == 0:
raise ValueError(f"Data quality check failed. No result was returned.")
if record[0][0] != test['result']:
raise ValueError(f"Data quality check failed. Got {record[0][0]} when expecting {test['result']}.")
self.log.info(f"Data quality checks cleared.") |
nasseredine/udacity-dend-p4 | dags/udac_example_dag.py | from datetime import datetime, timedelta
import os
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators import (StageToRedshiftOperator, LoadFactOperator,
LoadDimensionOperator, DataQualityOperator)
from helpers import SqlQueries
S3_BUCKET='udacity-dend'
LOG_DATA_KEY='log_data'
LOG_JSON_PATH=f's3://{S3_BUCKET}/log_json_path.json'
SONG_DATA_KEY='song_data'
default_args = {
'owner': 'udacity',
'start_date': datetime(2019, 1, 12),
'depends_on_past': False,
'retries': 3,
'retry_delay': timedelta(minutes=5),
'catchup': False,
'email_on_retry': False
}
dag = DAG('udac_example_dag',
default_args=default_args,
description="Load and transform data in Redshift with Airflow",
schedule_interval='0 * * * *'
)
start_operator = DummyOperator(task_id='Begin_execution', dag=dag)
stage_events_to_redshift = StageToRedshiftOperator(
task_id='Stage_events',
dag=dag,
redshift_conn_id='redshift',
aws_credentials_id='aws_credentials',
table_name='public.staging_events',
s3_bucket=S3_BUCKET,
s3_key=LOG_DATA_KEY,
json_option=LOG_JSON_PATH,
)
stage_songs_to_redshift = StageToRedshiftOperator(
task_id='Stage_songs',
dag=dag,
redshift_conn_id='redshift',
aws_credentials_id='aws_credentials',
table_name='public.staging_songs',
s3_bucket=S3_BUCKET,
s3_key=SONG_DATA_KEY,
)
load_songplays_table = LoadFactOperator(
task_id='Load_songplays_fact_table',
dag=dag,
redshift_conn_id='redshift',
table_name='public.songplays',
select_sql=SqlQueries.songplay_table_insert,
)
load_user_dimension_table = LoadDimensionOperator(
task_id='Load_user_dim_table',
dag=dag,
redshift_conn_id='redshift',
table_name='public.users',
select_sql=SqlQueries.user_table_insert,
)
load_song_dimension_table = LoadDimensionOperator(
task_id='Load_song_dim_table',
dag=dag,
redshift_conn_id='redshift',
table_name='public.songs',
select_sql=SqlQueries.song_table_insert,
)
load_artist_dimension_table = LoadDimensionOperator(
task_id='Load_artist_dim_table',
dag=dag,
redshift_conn_id='redshift',
table_name='public.artists',
select_sql=SqlQueries.artist_table_insert,
)
load_time_dimension_table = LoadDimensionOperator(
task_id='Load_time_dim_table',
dag=dag,
redshift_conn_id='redshift',
table_name='public.time',
select_sql=SqlQueries.time_table_insert,
)
run_quality_checks = DataQualityOperator(
task_id='Run_data_quality_checks',
dag=dag,
redshift_conn_id='redshift',
tests = [
{
'sql': "SELECT COUNT(*) FROM users WHERE userid is NULL;",
'result': 0
},
{
'sql': "SELECT COUNT(*) FROM songs WHERE songid is NULL;",
'result': 0
},
]
)
end_operator = DummyOperator(task_id='Stop_execution', dag=dag)
start_operator >> [stage_events_to_redshift, stage_songs_to_redshift] >> \
load_songplays_table >> [
load_user_dimension_table,
load_song_dimension_table,
load_artist_dimension_table,
load_time_dimension_table
] >> run_quality_checks >> end_operator |
alexhnguyen/CacheTwitterCdk | lib/lambda/cache_twitter_lambda.py | import os
import boto3
import tempfile
import json
from botocore.exceptions import ClientError
import tweepy
PREVIOUS_ID_FILENAME = "previous_id"
SCREEN_NAME = "BarackObama"
BUCKET_NAME = "alngyn-twitter-archive"
def handler(event, context):
print("Authorizing with twitter")
api = tweepy.API(authorization())
print("Getting tweets")
tweets = get_tweets(api)
print(f"Got {len(tweets)} tweets")
if len(tweets) == 0:
return {
"statusCode": 200,
"body": "No new tweets found."
}
print("Uploading tweets")
latest_uploaded_tweet_id = upload_tweets(tweets)
if latest_uploaded_tweet_id is not None:
print("Updating latest uploaded tweet")
update_latest_uploaded_tweet_id(latest_uploaded_tweet_id)
else:
print("Unable to upload new tweets")
return {
"statusCode": 500,
"body": "Failed to upload new tweets."
}
return {
"statusCode": 200,
"body": f"Added {len(tweets)} new tweets."
}
def authorization():
# set these manually in AWS console.
# you could put it in secrets manager,
# but then you'd have to pay more
consumer_key = os.environ.get('CONSUMER_KEY')
consumer_secret = os.environ.get('CONSUMER_SECRET')
access_token = os.environ.get('ACCESS_TOKEN')
access_token_secret = os.environ.get('ACCESS_TOKEN_SECRET')
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
return auth
def get_tweets(api):
tweets = []
try:
with tempfile.NamedTemporaryFile('w') as tempfile_prev_id:
s3_client = boto3.client('s3')
s3_client.download_file(BUCKET_NAME, PREVIOUS_ID_FILENAME, tempfile_prev_id.name)
tempfile_prev_id.flush()
with open(tempfile_prev_id.name, "r") as since_id_io:
since_id = since_id_io.read().strip()
tweets = api.user_timeline(screen_name=SCREEN_NAME, since_id=since_id)
except ClientError:
# if we get a ClientError, that means we could not get the previous_id
# in that case, get the previous 10 tweets
tweets = api.user_timeline(screen_name=SCREEN_NAME, count=10)
return tweets
def upload_tweets(tweets):
latest_uploaded_tweet_id = None
try:
# reverse to start with the oldest
# that way if we fail it is easier to
# pick up where we left off
tweets.reverse()
for tweet in tweets:
save_json = {
"author_id": tweet.author.id,
"screen_name": tweet.author.screen_name,
"created_at": tweet.created_at.isoformat(),
"text": tweet.text,
"favorite_count": tweet.favorite_count,
"retweet_count": tweet.retweet_count,
}
with tempfile.NamedTemporaryFile('w') as tempfile_json:
json.dump(save_json, tempfile_json)
tempfile_json.flush()
upload_file(
BUCKET_NAME, tempfile_json.name,
f'{tweet.author.id}/{tweet.created_at.year}/{tweet.created_at.month}/{tweet.created_at.isoformat()}.json'
)
latest_uploaded_tweet_id = tweet.id
except Exception as e:
print(e)
return latest_uploaded_tweet_id
def update_latest_uploaded_tweet_id(latest_uploaded_tweet_id):
with tempfile.NamedTemporaryFile('w') as tempfile_new_prev_id:
tempfile_new_prev_id.write(str(latest_uploaded_tweet_id))
tempfile_new_prev_id.flush()
upload_file(BUCKET_NAME, tempfile_new_prev_id.name, PREVIOUS_ID_FILENAME)
def upload_file(bucket, file_name, object_name=None):
if object_name is None:
object_name = file_name
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
except ClientError as e:
print(e)
return False
return True
handler('', '') |
alexhnguyen/CacheTwitterCdk | lib/lambda/retrieve_twitter_lambda.py | <reponame>alexhnguyen/CacheTwitterCdk
import boto3
import tempfile
import json
BUCKET_NAME = "alngyn-twitter-archive"
# this ID comes from twitter
BARACK_OBAMA_ID = '813286'
def get_all_files_in_path(s3_client, bucket_name, file_prefix):
paginator = s3_client.get_paginator('list_objects_v2')
pages = paginator.paginate(Bucket=bucket_name, Prefix=file_prefix)
return [obj['Key'] for page in pages for obj in page['Contents']]
def handler(event, context):
try:
s3_client = boto3.client('s3')
all_files = get_all_files_in_path(s3_client, BUCKET_NAME, f'{BARACK_OBAMA_ID}/')
all_files.sort()
# for simplicity, get the 5 newest tweets
latest = all_files[-5:]
json_outputs = []
for s3_filepath in latest:
with tempfile.NamedTemporaryFile('w') as tempfile_input:
# flush to make sure the file actually exists
tempfile_input.flush()
s3_client.download_file(BUCKET_NAME, s3_filepath, tempfile_input.name)
# flush to update the file
tempfile_input.flush()
with open(tempfile_input.name) as json_file:
abc = json.load(json_file)
json_outputs.append(abc)
response = {
"statusCode": 200,
"headers": {
"Content-Type" : "application/json",
"Access-Control-Allow-Headers" : "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token",
"Access-Control-Allow-Methods" : "OPTIONS,POST",
"Access-Control-Allow-Credentials" : True,
"Access-Control-Allow-Origin" : "http://localhost:8080",
"X-Requested-With" : "*"
},
"body": str(json_outputs)
}
return response
except:
return {
"statusCode": 200,
"headers": {
"Content-Type" : "application/json",
"Access-Control-Allow-Headers" : "Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token",
"Access-Control-Allow-Methods" : "OPTIONS,POST",
"Access-Control-Allow-Credentials" : True,
"Access-Control-Allow-Origin" : "*",
"X-Requested-With" : "*"
},
}
|
AaronReer/ancp-bids | ancpbids/plugins/plugin_dsloader.py | import inspect
import os
import re
from .. import utils
from ..plugin import DatasetPlugin
class DatasetPopulationPlugin(DatasetPlugin):
def execute(self, dataset):
base_dir = dataset.base_dir_
self.schema = dataset.get_schema()
# load file system structure
self._load_folder(dataset, base_dir)
# transform files to artifacts, i.e. files containing entities in their name
self._convert_files_to_artifacts(dataset)
# expand structure based on schema-files
self._expand_members(dataset)
# convert Folders within derivatives to DerivativeFolder
self._convert_derivatives_folders(dataset.derivatives)
def _convert_derivatives_folders(self, parent):
if not parent:
return
for i, folder in enumerate(list(parent.folders)):
dfolder = self.schema.DerivativeFolder()
dfolder.parent_object_ = parent
dfolder.update(folder)
parent.folders[i] = dfolder
self._convert_derivatives_folders(dfolder)
self._expand_members(dfolder)
def _convert_files_to_artifacts(self, parent):
for i, file in enumerate(parent.files):
artifact = self._convert_to_artifact(file)
if not artifact:
continue
artifact.parent_object_ = parent
parent.files[i] = artifact
for folder in parent.folders:
self._convert_files_to_artifacts(folder)
def _convert_to_artifact(self, file):
parts = utils.parse_bids_name(file.name)
if not parts:
return None
artifact = self.schema.Artifact()
artifact.name = file.name
for key, value in parts['entities'].items():
entity = self.schema.EntityRef()
entity.key = key
value = self.schema.process_entity_value(key, value)
entity.value = value
artifact.entities.append(entity)
artifact.suffix = parts['suffix']
artifact.extension = parts['extension']
return artifact
def _handle_direct_folders(self, parent, member, pattern, new_type):
if not isinstance(parent, self.schema.Folder):
return
folders = list(filter(lambda f: re.match(pattern, f.name), parent.get_folders_sorted()))
for folder in folders:
obj = new_type()
obj.name = folder.name
obj.files = folder.files
obj.folders = folder.folders
parent.remove_folder(folder.name)
obj.parent_object_ = parent
if member['max'] > 1:
getattr(parent, member['name']).append(obj)
else:
setattr(parent, member['name'], obj)
self._expand_members(obj)
def _expand_member(self, parent, member):
typ = member['type']
if not issubclass(typ, self.schema.Model):
return
mapper_name = '_type_handler_%s' % typ.__name__
if mapper_name not in _TYPE_MAPPERS:
mapper_name = '_type_handler_default'
mapper = _TYPE_MAPPERS[mapper_name]
mapper(self, parent, member)
def _expand_members(self, folder):
members = folder.get_schema().get_members(type(folder))
for member in members:
self._expand_member(folder, member)
def _load_folder(self, parent, dir_path):
for root, directories, files in os.walk(dir_path):
for directory in sorted(directories):
folder = self.schema.Folder()
folder.parent_object_ = parent
folder.name = directory
parent.folders.append(folder)
self._load_folder(folder, '/'.join([root, directory]))
for file in sorted(files):
model_file = self.schema.File()
model_file.parent_object_ = parent
model_file.name = file
parent.files.append(model_file)
break
def _type_handler_default(self, parent, member):
typ = member['type']
if issubclass(typ, self.schema.JsonFile):
self._type_handler_JsonFile(parent, member, True)
elif issubclass(typ, self.schema.Folder):
pattern = '.*'
meta = member['meta']
if 'name_pattern' in meta:
pattern = meta['name_pattern']
self._handle_direct_folders(parent, member, pattern=pattern, new_type=typ)
def _type_handler_File(self, parent, member):
if not isinstance(parent, self.schema.Folder):
return
file = parent.get_file(member['name'])
if file:
setattr(parent, member['name'], file)
parent.remove_file(file.name)
def _type_handler_MetadataFile(self, parent, member):
if not isinstance(parent, self.schema.Folder):
return
if member['max'] > 1:
files = parent.get_files(member['meta']['name_pattern'])
files = list(filter(lambda f: isinstance(f, self.schema.Artifact), files))
for file in files:
mdfile = self.schema.MetadataFile()
mdfile.parent_object_ = parent
mdfile.update(file)
mdfile.contents = mdfile.load_contents()
getattr(parent, member['name']).append(mdfile)
parent.remove_file(file.name, from_meta=False)
else:
file = parent.get_file(member['name'])
if isinstance(file, self.schema.Artifact):
mdfile = self.schema.MetadataFile()
mdfile.parent_object_ = parent
mdfile.update(file)
mdfile.contents = mdfile.load_contents()
setattr(parent, member['name'], mdfile)
parent.remove_file(file.name, from_meta=False)
def _type_handler_Artifact(self, parent, member):
if not isinstance(parent, self.schema.Folder):
return
attr = getattr(parent, member['name'])
multi = isinstance(attr, list)
name = member['name']
files = parent.files if multi else list(filter(lambda f: f.name == name, parent.files))
for file in files:
if not isinstance(file, self.schema.Artifact):
continue
file.parent_object_ = parent
parent.remove_file(file.name)
if multi:
attr.append(file)
else:
setattr(parent, member['name'], file)
def _type_handler_Folder(self, parent, member):
if not isinstance(parent, self.schema.Folder):
return
name = member['name']
folder = parent.get_folder(name)
if folder:
setattr(parent, name, folder)
parent.remove_folder(name)
def _map_object(self, model_type, json_object):
target = model_type()
members = self.schema.get_members(model_type, False)
actual_props = json_object.keys()
direct_props = list(map(lambda m: (m['name'], m), members))
for prop_name, prop in direct_props:
if prop_name in actual_props:
value_type = prop['type']
value = json_object[prop_name]
if isinstance(value, list) and len(value) > 0:
value = list(map(lambda o: self._map_object(value_type, o) if isinstance(o, dict) else o, value))
if isinstance(value, dict):
value = self._map_object(value_type, value)
setattr(target, prop_name, value)
return target
def _type_handler_JsonFile(self, parent, member, is_subclass=False):
name = member['name']
file_name = name + '.json'
file = parent.get_file(file_name)
if not file:
return
json_object = file.contents if 'contents' in file else file.load_contents()
if not json_object:
return
model_type = member['type']
json_file = self._map_object(model_type, json_object)
json_file.name = file_name
json_file.contents = json_object
setattr(parent, member['name'], json_file)
parent.remove_file(file_name)
json_file.parent_object_ = parent
_TYPE_MAPPERS = {name: obj for name, obj in inspect.getmembers(DatasetPopulationPlugin) if
inspect.isfunction(obj) and obj.__name__.startswith('_type_handler_')}
|
AaronReer/ancp-bids | tests/auto/test_schema.py | import unittest
from ancpbids import model_v1_7_0, model_v1_7_1, load_dataset, load_schema
from ..base_test_case import BaseTestCase, DS005_DIR, DS005_SMALL_DIR
class SchemaTestCase(BaseTestCase):
def test_entitmatching(self):
self.assertEqual('sub', model_v1_7_0.fuzzy_match_entity_key('sub'))
self.assertEqual('sub', model_v1_7_0.fuzzy_match_entity_key('subject'))
self.assertEqual('sub', model_v1_7_0.fuzzy_match_entity_key('subjects'))
self.assertEqual('sub', model_v1_7_0.fuzzy_match_entity_key('subjs'))
self.assertEqual('desc', model_v1_7_0.fuzzy_match_entity_key('des'))
self.assertEqual('desc', model_v1_7_0.fuzzy_match_entity_key('dscr'))
self.assertEqual('desc', model_v1_7_0.fuzzy_match_entity_key('descriptions'))
def test_schema_versions(self):
ds = load_dataset(DS005_DIR)
schema = ds.get_schema()
self.assertEqual(schema, model_v1_7_0)
self.assertEqual('v1.7.0', schema.VERSION)
ds = load_dataset(DS005_SMALL_DIR)
schema = ds.get_schema()
self.assertEqual(schema, model_v1_7_1)
self.assertEqual('v1.7.1', schema.VERSION)
def test_load_schema(self):
schema_v170 = load_schema(DS005_DIR)
self.assertEqual(schema_v170, model_v1_7_0)
self.assertEqual('v1.7.0', schema_v170.VERSION)
schema_v171 = load_schema(DS005_SMALL_DIR)
self.assertEqual(schema_v171, model_v1_7_1)
self.assertEqual('v1.7.1', schema_v171.VERSION)
# the classes of each schema are separate identities
self.assertFalse(schema_v170.Model == schema_v171.Model)
if __name__ == '__main__':
unittest.main()
|
AaronReer/ancp-bids | ancpbids/plugins/plugin_query.py | <filename>ancpbids/plugins/plugin_query.py
import re
from fnmatch import fnmatch
from ancpbids.plugin import SchemaPlugin
class BoolExpr:
pass
class CompExpr:
pass
class TrueExpr(BoolExpr):
def __init__(self, *args):
pass
def eval(self, context) -> bool:
return True
class AnyExpr(BoolExpr):
def __init__(self, *bool_ops: CompExpr):
self.bool_ops = bool_ops
def eval(self, context) -> bool:
return any([op.eval(context) for op in self.bool_ops])
class AllExpr(BoolExpr):
def __init__(self, *bool_ops: CompExpr):
self.bool_ops = bool_ops
def eval(self, context) -> bool:
for op in self.bool_ops:
if not op.eval(context):
# early exit to prevent evaluating remaining ops
return False
return True
class EqExpr(CompExpr):
def __init__(self, attr: property, value):
self.attr = attr
self.value = value
def eval(self, context) -> bool:
value = self.attr.fget(context)
return self.value == value
class ReExpr(CompExpr):
def __init__(self, attr: property, regex_pattern: str):
self.attr = attr
self.regex_pattern = re.compile(regex_pattern)
def eval(self, context) -> bool:
value = self.attr.fget(context)
value = str(value)
return self.regex_pattern.match(value)
class CustomOpExpr(CompExpr):
def __init__(self, op):
self.op = op
def eval(self, context) -> bool:
return self.op(context)
class FnMatchExpr(CompExpr):
def __init__(self, attr: property, pattern):
self.attr = attr
self.pattern = pattern
def eval(self, context) -> bool:
value = self.attr.fget(context)
return value is not None and fnmatch(value, self.pattern)
class EntityExpr(CompExpr):
def __init__(self, schema, key, value, op=FnMatchExpr):
self.schema = schema
self.op = AllExpr(EqExpr(schema.EntityRef.key, key.entity_), op(schema.EntityRef.value, value))
def eval(self, context) -> bool:
if not isinstance(context, self.schema.Artifact):
# for non-Artifacts, for example File, just return false
return False
return any([self.op.eval(e) for e in context.entities])
class Select:
def __init__(self, context, filter_type):
self.schema = context.get_schema()
self.context = context
self.filter_type = filter_type
self._where = TrueExpr()
self._subtree = TrueExpr()
def subtree(self, bool_expr: BoolExpr):
self._subtree = bool_expr
def where(self, bool_expr: BoolExpr):
self._where = bool_expr
return self
def _exec(self, callback):
for m in self.context.to_generator(filter_=lambda o: self._subtree.eval(o)):
if isinstance(m, self.filter_type) and self._where.eval(m):
yield callback(m)
def get_file_paths(self):
return self._exec(self.schema.File.get_relative_path)
def get_file_paths_absolute(self):
return self._exec(self.schema.File.get_absolute_path)
def get_artifacts(self):
# TODO filter by Artifact instances
return self.objects()
def objects(self, as_list=False):
result = self._exec(lambda m: m)
if as_list:
result = list(result)
return result
def select(context, target_type):
return Select(context, target_type)
class QuerySchemaPlugin(SchemaPlugin):
def execute(self, schema):
schema.Model.select = select
|
AaronReer/ancp-bids | ancpbids/pybids_compat.py | import os.path
from collections import OrderedDict
from functools import partial
from typing import List, Union, Dict
import ancpbids
from ancpbids import CustomOpExpr, EntityExpr, AllExpr, ValidationPlugin
from . import load_dataset, LOGGER
from .plugins.plugin_query import FnMatchExpr, AnyExpr
from .utils import deepupdate
class BIDSLayout:
"""A convenience class to provide access to an in-memory representation of a BIDS dataset.
.. code-block::
dataset_path = 'path/to/your/dataset'
layout = BIDSLayout(dataset_path)
Parameters
----------
ds_dir:
the (absolute) path to the dataset to load
"""
def __init__(self, ds_dir: str, **kwargs):
self.dataset = load_dataset(ds_dir)
self.schema = self.dataset.get_schema()
def _to_any_expr(self, value, ctor):
# if the value is a list, then wrap it in an AnyExpr
if isinstance(value, list):
ops = []
for v in value:
ops.append(ctor(v))
return AnyExpr(*ops)
# else just return using the constructor function
return ctor(value)
def __getattr__(self, key, **kwargs):
k = key if not key.startswith("get_") else key[4:]
return partial(self.get, return_type='id', target=k, **kwargs)
def get_metadata(self, *args, **kwargs) -> dict:
"""Returns a dictionary of metadata matching the provided criteria (see :meth:`ancpbids.BIDSLayout.get`).
Also takes the BIDS inheritance principle into account, i.e. any metadata defined at dataset level
may be overridden by a more specific metadata entry at a lower level such as the subject level.
As of the BIDS specification, metadata is kept in JSON files,
i.e. only JSON files will be assumed to contain metadata.
"""
qry_result = filter(lambda a: isinstance(a, self.schema.MetadataFile), self.get(*args, **kwargs))
# build lists of ancestors + the leaf (metadata file)
ancestors = list(map(lambda e: (list(reversed(list(e.iterancestors()))), e), qry_result))
# sort by number of ancestors
# TODO must sort by the items within the list not just by length of list
# example: [xyz,abc] would be treated the same when it should be [abc, xyz]
ancestors.sort(key=lambda e: len(e[0]))
metadata = {}
if ancestors:
# start with first metadata file
deepupdate(metadata, ancestors[0][1].contents)
if len(ancestors) > 1:
for i in range(1, len(ancestors)):
# FIXME ancestors handling is unstable, disable it for now
if False:
a0 = ancestors[i - 1][0]
a1 = ancestors[i][0]
# remove the ancestors from a0 and make sure it is empty, i.e. both nodes have same ancestors
remaining_ancestors = set(a0).difference(*a1)
if remaining_ancestors:
# if remaining ancestors list is not empty,
# this is interpreted as having the leaves from different branches
# for example, metadata from func/sub-01/...json must not be mixed with func/sub-02/...json
LOGGER.warn("Query returned metadata files from incompatible sources.")
deepupdate(metadata, ancestors[i][1].contents)
return metadata
def _require_artifact(self, expr) -> AllExpr:
"""Wraps the provided expression in an expression that makes sure the context of evaluation is an Artifact.
Parameters
----------
expr :
the expression to wrap
Returns
-------
a wrapping expression to make sure that the provided object is an instance of Artifact
"""
return AllExpr(CustomOpExpr(lambda m: isinstance(m, self.schema.Artifact)), expr)
def get(self, return_type: str = 'object', target: str = None, scope: str = None,
extension: Union[str, List[str]] = None, suffix: Union[str, List[str]] = None,
**entities) -> Union[List[str], List[object]]:
"""Depending on the return_type value returns either paths to files that matched the filtering criteria
or :class:`Artifact <ancpbids.model_v1_7_0.Artifact>` objects for further processing by the caller.
Note that all provided filter criteria are AND combined, i.e. subj='02',task='lang' will match files containing
'02' as a subject AND 'lang' as a task. If you provide a list of values for a criteria, they will be OR combined.
.. code-block::
file_paths = layout.get(subj='02', task='lang', suffix='bold', return_type='files')
file_paths = layout.get(subj=['02', '03'], task='lang', return_type='files')
Parameters
----------
return_type:
Either 'files' to return paths of matched files
or 'object' to return :class:`Artifact <ancpbids.model_v1_7_0.Artifact>` object, defaults to 'object'
target:
Either `suffixes`, `extensions` or one of any valid BIDS entities key
(see :class:`EntityEnum <ancpbids.model_v1_7_0.EntityEnum>`, defaults to `None`
scope:
a hint where to search for files
If passed, only nodes/directories that match the specified scope will be
searched. Possible values include:
'all' (default): search all available directories.
'derivatives': search all derivatives directories.
'raw': search only BIDS-Raw directories.
'self': search only the directly called BIDSLayout.
<PipelineName>: the name of a BIDS-Derivatives pipeline.
extension:
criterion to match any files containing the provided extension only
suffix:
criterion to match any files containing the provided suffix only
entities
a list of key-values to match the entities of interest, example: subj='02',task='lang'
Returns
-------
depending on the return_type value either paths to files that matched the filtering criteria
or Artifact objects for further processing by the caller
"""
if scope is None:
scope = 'all'
if return_type == 'id':
if not target:
raise ValueError("return_type=id requires the target parameter to be set")
context = self.dataset
ops = []
target_type = self.schema.File
if scope.startswith("derivatives"):
context = self.dataset.derivatives
# we already consumed the first path segment
segments = os.path.normpath(scope).split(os.sep)[1:]
for segment in segments:
context = context.get_folder(segment)
# derivatives may contain non-artifacts which should also be considered
target_type = self.schema.File
select = context.select(target_type)
if scope == 'raw':
# the raw scope does not consider derivatives folder but everything else
select.subtree(CustomOpExpr(lambda m: not isinstance(m, self.schema.DerivativeFolder)))
result_extractor = None
if target:
if target in 'suffixes':
suffix = '*'
result_extractor = lambda artifacts: [a.suffix for a in artifacts]
elif target in 'extensions':
extension = '*'
result_extractor = lambda artifacts: [a.extension for a in artifacts]
else:
target = self.schema.fuzzy_match_entity_key(target)
entities = {**entities, target: '*'}
result_extractor = lambda artifacts: [entity.value for a in artifacts for entity in
filter(lambda e: e.key == target, a.entities)]
for k, v in entities.items():
entity_key = self.schema.fuzzy_match_entity(k)
v = self.schema.process_entity_value(k, v)
ops.append(
self._require_artifact(self._to_any_expr(v, lambda val: EntityExpr(self.schema, entity_key, val))))
if extension:
ops.append(self._require_artifact(
self._to_any_expr(extension, lambda ext: FnMatchExpr(self.schema.Artifact.extension, ext))))
if suffix:
ops.append(
self._require_artifact(
self._to_any_expr(suffix, lambda suf: FnMatchExpr(self.schema.Artifact.suffix, suf))))
select.where(AllExpr(*ops))
if return_type and return_type.startswith("file"):
return list(select.get_file_paths_absolute())
else:
artifacts = select.objects()
if result_extractor:
return sorted(set(result_extractor(artifacts)))
return list(artifacts)
def get_entities(self, scope: str = None, sort: bool = False) -> dict:
"""Returns a unique set of entities found within the dataset as a dict.
Each key of the resulting dict contains a list of values (with at least one element).
Example dict:
.. code-block::
{
'sub': ['01', '02', '03'],
'task': ['gamblestask']
}
Parameters
----------
scope:
see BIDSLayout.get()
sort: default is `False`
whether to sort the keys by name
Returns
-------
dict
a unique set of entities found within the dataset as a dict
"""
artifacts = filter(lambda m: isinstance(m, self.schema.Artifact), self.get(scope=scope))
result = OrderedDict()
for e in [e for a in artifacts for e in a.entities]:
if e.key not in result:
result[e.key] = set()
result[e.key].add(e.value)
if sort:
result = {k: sorted(v) for k, v in sorted(result.items())}
return result
def get_dataset_description(self) -> dict:
"""
Returns
-------
the dataset's dataset_description.json as a dictionary or None if not provided
"""
return self.dataset.dataset_description
def get_dataset(self) -> object:
"""
Returns
-------
the in-memory representation of this layout/dataset
"""
return self.dataset
def write_derivative(self, derivative):
"""Writes the provided derivative folder to the dataset.
Note that a 'derivatives' folder will be created if not present.
Parameters
----------
derivative:
the derivative folder to write
"""
assert isinstance(derivative, self.schema.DerivativeFolder)
ancpbids.write_derivative(self.dataset, derivative)
def validate(self) -> ValidationPlugin.ValidationReport:
"""Validates a dataset and returns a report object containing any detected validation errors.
Example:
.. code-block::
report = layout.validate()
for message in report.messages:
print(message)
if report.has_errors():
raise "The dataset contains validation errors, cannot continue".
Returns
-------
a report object containing any detected validation errors or warning
"""
return ancpbids.validate_dataset(self.dataset)
|
AaronReer/ancp-bids | ancpbids/utils.py | <gh_stars>0
import logging
import os
FILE_READERS = {}
FILE_WRITERS = {}
LOGGER = logging.getLogger(__file__)
def parse_bids_name(name: str):
"""Parses a given string (file name) according to the BIDS naming scheme.
Parameters
----------
name
The file name to parse. If a full path (with path separaters), the path segments will be ignored.
Returns
-------
dict
A dictionary describing the BIDS naming components.
Examples
--------
>>> bids_obj = parse_bids_name("sub-11_task-mixedgamblestask_run-02_bold.nii.gz")
{'entities': {'sub': '11', 'task': 'mixedgamblestask', 'run': '02'}, 'suffix': 'bold', 'extension': '.nii.gz'}
"""
base_name = os.path.basename(name)
parts = base_name.split(os.extsep, 1)
if len(parts) != 2:
# if extension missing, then not a valid BIDS file
return None
extension = os.extsep + parts[1]
underscore_parts = parts[0].split('_')
if len(underscore_parts) < 2:
return None
# last segment must be suffix
suffix = underscore_parts[-1]
if '-' in suffix:
return None
entities = {}
for i in range(0, len(underscore_parts) - 1):
dash_parts = underscore_parts[i].split('-')
if len(dash_parts) < 2:
# not a key-value pair
return None
entities[dash_parts[0]] = dash_parts[1]
return {
'entities': entities,
'suffix': suffix,
'extension': extension
}
def load_contents(file_path):
"""Loads the contents of the provided file path.
Parameters
----------
file_path :
the file path to load contents from
Returns
-------
The result depends on the extension of the file name.
For example, a .json file may be returned as an ordinary Python dict or a .txt as a str value.
"""
if not os.path.exists(file_path):
return None
reader = None
file_name = os.path.basename(file_path)
parts = os.path.splitext(file_name)
if len(parts) > 1:
extension = parts[-1][1:]
if extension in FILE_READERS:
reader = FILE_READERS[extension]
if reader is None:
LOGGER.debug("No reader found for file '%s', defaulting to 'txt' file reader" % file_name)
reader = FILE_READERS['txt']
if reader is None:
raise ValueError('No file reader registered to load file %s' % file_path)
return reader(file_path)
def write_contents(file_path: str, contents):
"""Writes the provided contents to the target file path using a registered file writer.
A valid file writer may be inferred by the file's extension and/or the given contents object.
If no file writer is found for the given file, a `ValueError` is raised.
Parameters
----------
file_path:
The file path to write to.
contents:
The contents to write to the target file.
"""
writer = None
parts = os.path.splitext(file_path)
if len(parts) > 1:
extension = parts[-1][1:]
if extension in FILE_WRITERS:
writer = FILE_WRITERS[extension]
if not writer:
raise ValueError("No file writer registered for file: %s" % file_path)
writer(file_path, contents)
def deepupdate(target, src):
"""Deep update target dict with src
For each k,v in src: if k doesn't exist in target, it is deep copied from
src to target. Otherwise, if v is a list, target[k] is extended with
src[k]. If v is a set, target[k] is updated with v, If v is a dict,
recursively deep-update it.
Examples:
>>> t = {'name': 'Ferry', 'hobbies': ['programming', 'sci-fi']}
>>> deepupdate(t, {'hobbies': ['gaming']})
>>> print t
{'name': 'Ferry', 'hobbies': ['programming', 'sci-fi', 'gaming']}
Copyright <NAME>, released under the MIT license.
"""
import copy
for k, v in src.items():
if type(v) == list:
if k not in target:
target[k] = copy.deepcopy(v)
else:
target[k].extend(v)
elif type(v) == dict:
if k not in target:
target[k] = copy.deepcopy(v)
else:
deepupdate(target[k], v)
elif type(v) == set:
if k not in target:
target[k] = v.copy()
else:
target[k].update(v.copy())
else:
target[k] = copy.copy(v)
def fetch_dataset(dataset_id: str, output_dir='~/.ancp-bids/datasets'):
"""Downloads and extracts an ancpBIDS test dataset from Github.
Parameters
----------
dataset_id :
The dataset ID of the ancp-bids-datasets github repository.
See `https://github.com/ANCPLabOldenburg/ancp-bids-dataset` for more details.
output_dir :
The output directory to download and extract the dataset to.
Default is to write to user's home directory at `~/.ancp-bids/datasets`
Returns
-------
The path of the extracted dataset.
"""
output_dir = os.path.expanduser(output_dir)
output_dir = os.path.abspath(os.path.normpath(output_dir))
output_path = os.path.join(output_dir, dataset_id)
if os.path.exists(output_path):
return output_path
os.makedirs(output_path)
download_file = f'{dataset_id}-testdata.zip'
download_path = os.path.join(output_dir, download_file)
if os.path.exists(download_path):
return output_path
url = f'https://github.com/ANCPLabOldenburg/ancp-bids-dataset/raw/main/{download_file}'
import urllib.request, zipfile
with urllib.request.urlopen(url) as dl_file:
with open(download_path, 'wb') as out_file:
out_file.write(dl_file.read())
z = zipfile.ZipFile(download_path)
z.extractall(output_dir)
return output_path
|
AaronReer/ancp-bids | ancpbids/plugin.py | <reponame>AaronReer/ancp-bids
import importlib
import inspect
import pkgutil
from typing import List
# global plugins registry (list of plugin metadata/settings)
__PLUGINS__ = []
class Plugin:
"""Base class of all plugins.
"""
def __init__(self, **props):
self.props = props
class SchemaPlugin(Plugin):
"""A schema plugin may extend/modify a BIDS schema representation module.
For example, to monkey-patch generated classes.
"""
def execute(self, schema):
raise NotImplementedError()
class DatasetPlugin(Plugin):
"""A dataset plugin may enhance an in-memory graph of a dataset.
"""
def execute(self, dataset):
raise NotImplementedError()
class FileHandlerPlugin(Plugin):
"""A file handler plugin may register a reader or writer function to allow handling unknown file extensions.
"""
def execute(self, file_readers_registry, file_writers_registry):
raise NotImplementedError()
class WritingPlugin(Plugin):
"""A writing plugin may write additional files/folders when a dataset is stored back to file system.
This may be most interesting to write derivatives to a dataset."""
def execute(self, dataset, target_dir: str, context_folder=None,
src_dir: str = None):
raise NotImplementedError()
class ValidationPlugin(Plugin):
"""A validation plugin may extend the rules to validate a dataset against."""
class ValidationReport:
"""Contains validation messages (errors/warnings) after a dataset has been validated."""
def __init__(self):
self.messages = []
def error(self, message):
"""Adds a new error message to the report.
Parameters
----------
message:
the error message to add to the report
"""
self.messages.append({
'severity': 'error',
'message': message
})
def warn(self, message):
"""Adds a new warning message to the report.
Parameters
----------
message:
the warning message to add to the report
"""
self.messages.append({
'severity': 'warn',
'message': message
})
def has_errors(self):
"""
Returns
-------
bool
whether this report contains errors
"""
for m in self.messages:
if m['severity'] == 'error':
return True
return False
def execute(self, dataset, report: ValidationReport):
raise NotImplementedError()
def is_valid_plugin(plugin_class):
"""
Parameters
----------
plugin_class:
the class to check if known to be a valid plugin class
Returns
-------
bool
whether the class is considered a valid plugin class
"""
plugin_types = (SchemaPlugin, DatasetPlugin, WritingPlugin, ValidationPlugin, FileHandlerPlugin)
return issubclass(plugin_class, plugin_types) and plugin_class not in plugin_types
def load_plugins_by_package(ns_pkg, ranking: int = 1000, **props):
"""Loads all valid plugin classes by the provided package.
Parameters
----------
ns_pkg:
the package to scan for plugin classes
ranking:
the ranking to use for any detected plugin class
props
the properties to assign to the detected plugin classes
Returns
-------
list
a list of plugin classes or empty if no valid plugin classes found
"""
mods = [importlib.import_module(name) for finder, name, ispkg in
pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + ".")]
for mod in mods:
for mem in inspect.getmembers(mod, inspect.isclass):
plugin_class = mem[1]
if is_valid_plugin(plugin_class):
register_plugin(plugin_class, ranking, **props)
def register_plugin(plugin_class, ranking: int = 1000, **props):
"""Registers the provided plugin class. If the class is not considered a valid plugin class a ValueError is raised.
Parameters
----------
plugin_class:
The plugin class to register.
ranking:
The rank to use for the plugin to help prioritize plugins of same type.
Note that the lower the ranking the higher its prioritization in the processing.
System level plugins are registered with `ranking = 0`, i.e.
if you need your plugin to be prioritized over system plugins, use a ranking below 0.
props
Additional (static) properties to attach to the provided plugin class.
"""
if not is_valid_plugin(plugin_class):
raise ValueError('Invalid plugin class: %s' % plugin_class.__name__)
__PLUGINS__.append({
'ranking': ranking,
'plugin_class': plugin_class,
'props': props
})
def get_plugins(plugin_class, **props) -> List[Plugin]:
"""Returns a list of plugin instances matching the provided plugin class and properties.
Parameters
----------
plugin_class:
the plugin class to filter by
props:
additional filters found in any attached plugin properties
Returns
-------
a list of plugin instances matching the provided plugin class and properties
"""
plugins = filter(lambda entry: issubclass(entry['plugin_class'], plugin_class), __PLUGINS__)
plugins = sorted(plugins, key=lambda entry: entry['ranking'])
# note that a concrete instance of the plugin classes is returned
return list(map(lambda entry: entry['plugin_class'](**entry['props']), plugins))
|
AaronReer/ancp-bids | ancpbids/plugins/plugin_files_handlers.py | from ancpbids.plugin import FileHandlerPlugin
def read_yaml(file_path: str):
import yaml
with open(file_path, 'r') as stream:
try:
return yaml.load(stream, Loader=yaml.FullLoader)
except:
return None
def read_json(file_path: str):
# we cannot use yaml to load json if it contains any TABs for indentation
import json
with open(file_path, 'r') as stream:
try:
return json.load(stream)
except:
return None
def read_plain_text(file_path: str):
with open(file_path, 'r') as file:
return file.readlines()
def read_tsv(file_path: str):
import numpy
df = numpy.genfromtxt(file_path, delimiter='\t', dtype=None, names=True)
return df
def write_json(file_path: str, contents: dict):
"""Writes the contents as a .json file to the given file path.
Parameters
----------
file_path:
The path to the file to store the contents to.
contents:
The contents of the target .json file.
"""
import json
with open(file_path, 'w') as fp:
json.dump(contents, fp)
def write_txt(file_path: str, contents: dict):
"""Writes the contents as a .txt file to the given file path.
Parameters
----------
file_path:
The path to the file to store the contents to.
contents:
The contents of the target .txt file.
"""
with open(file_path, 'w') as fp:
fp.write(str(contents))
class FilesHandlerPlugin(FileHandlerPlugin):
def execute(self, file_readers_registry, file_writers_registry):
file_readers_registry['yaml'] = read_yaml
file_readers_registry['json'] = read_json
file_readers_registry['txt'] = read_plain_text
file_readers_registry['tsv'] = read_tsv
file_writers_registry['json'] = write_json
file_writers_registry['txt'] = write_txt
|
AaronReer/ancp-bids | tests/manual/test_benchmark.py | from unittest import skip
import sys
from ..base_test_case import *
OPENNEURO_DS001734 = os.path.join(os.environ.get('TEST_DATASETS', os.path.expanduser('~/datasets')), 'ds001734')
if not os.path.isdir(OPENNEURO_DS001734):
print('test dataset not found: ' + OPENNEURO_DS001734)
sys.exit(1)
class BenchmarkTestCase(BaseTestCase):
def _assert_on(self, layout_type):
layout = layout_type(OPENNEURO_DS001734, derivatives=True)
subjects = layout.get_subjects(scope="raw")
self.assertEqual(108, len(subjects))
self.assertEqual(0, len(layout.get_sessions()))
bold_run1 = layout.get(scope="raw", suffix='bold', run='01', extension='.nii.gz', return_type='filename')
self.assertEqual(108, len(bold_run1))
def test_ancpbids_openneuro_ds001734(self):
import ancpbids
self._assert_on(ancpbids.BIDSLayout)
def test_pybids_measure_scan_ds001734(self):
import bids
self._assert_on(bids.BIDSLayout)
|
AaronReer/ancp-bids | tests/manual/test_performance_regressions.py | from ..base_test_case import *
import ancpbids
class PerformanceRegressionTestCase(BaseTestCase):
def test_batch_loading(self):
for i in range(0, 10):
ds = ancpbids.load_dataset(DS005_DIR)
|
AaronReer/ancp-bids | ancpbids/plugins/plugin_dssaver.py | import inspect
import json
import os
import ancpbids
from ancpbids.plugin import WritingPlugin
class DatasetWritingPlugin(WritingPlugin):
def execute(self, ds, target_dir: str, context_folder=None, src_dir: str = None):
if context_folder is None and os.path.exists(target_dir) and len(os.listdir(target_dir)) > 0:
raise ValueError("Directory not empty: " + target_dir)
if context_folder is None:
context_folder = ds
if src_dir is None:
src_dir = ds.get_absolute_path()
self.schema = ds.get_schema()
generator = context_folder.to_generator()
for obj in generator:
typ = type(obj)
mapper_name = '_type_handler_' + typ.__name__
if mapper_name not in _TYPE_MAPPERS:
mapper_name = '_type_handler_default'
mapper = _TYPE_MAPPERS[mapper_name]
mapper(self, src_dir, target_dir, obj)
# copy internal children (files/folders)
self._type_handler_Folder(src_dir, target_dir, context_folder, traverse_children=True)
def _type_handler_default(self, src_dir, target_dir, obj):
if isinstance(obj, self.schema.Folder):
self._type_handler_Folder(src_dir, target_dir, obj)
elif isinstance(obj, self.schema.File):
self._type_handler_File(src_dir, target_dir, obj)
def _type_handler_File(self, src_dir, target_dir, file, new_file_name=None):
if hasattr(file, 'content') and callable(file.content):
file.content(file.get_absolute_path())
else:
ancpbids.utils.write_contents(file.get_absolute_path(), file)
def _type_handler_Folder(self, src_dir, target_dir, folder, traverse_children=False):
new_dir = os.path.join(target_dir, folder.get_relative_path())
# the new directory may exist because model Artifacts/Folders are processed first
if not os.path.exists(new_dir):
os.mkdir(new_dir)
if traverse_children:
for child_folder in folder.folders:
self._type_handler_Folder(src_dir, target_dir, child_folder)
for child_file in folder.files:
self._type_handler_File(src_dir, target_dir, child_file)
def _get_ordered_entity_keys(self, artifact):
schema = artifact.get_schema()
entity_refs = artifact.entities
schema_entities = list(map(lambda e: e.entity_, list(schema.EntityEnum)))
expected_key_order = {k: i for i, k in enumerate(schema_entities)}
expected_order_key = {i: k for i, k in enumerate(schema_entities)}
artifact_keys = list(map(lambda e: e.key, entity_refs))
actual_keys_order = list(map(lambda k: expected_key_order[k], artifact_keys))
expected = tuple(map(lambda k: expected_order_key[k], sorted(actual_keys_order)))
return expected
def _type_handler_Artifact(self, src_dir, target_dir, artifact):
segments = []
schema = artifact.get_schema()
# add missing entities
for ancestor in artifact.iterancestors():
if isinstance(ancestor, schema.Folder):
name = ancestor.name
if name.startswith("ses-"):
artifact.add_entity('ses', name[4:])
if name.startswith("sub-"):
artifact.add_entity('sub', name[4:])
# sort according order defined in schema
ordered_keys = self._get_ordered_entity_keys(artifact)
for ok in ordered_keys:
seg = '-'.join([ok, artifact.get_entity(ok)])
segments.append(seg)
segments.append(artifact.suffix)
new_file_name = '_'.join(segments) + artifact.extension
artifact.name = new_file_name
self._type_handler_File(src_dir, target_dir, artifact, new_file_name)
_TYPE_MAPPERS = {name: obj for name, obj in inspect.getmembers(DatasetWritingPlugin) if
inspect.isfunction(obj) and obj.__name__.startswith('_type_handler_')}
|
AaronReer/ancp-bids | ancpbids/__init__.py | <filename>ancpbids/__init__.py<gh_stars>0
import logging
import os
import sys
from . import plugins
from . import utils
from .plugin import get_plugins, load_plugins_by_package, DatasetPlugin, WritingPlugin, ValidationPlugin, SchemaPlugin, \
FileHandlerPlugin
from .plugins.plugin_query import BoolExpr, Select, EqExpr, AnyExpr, AllExpr, ReExpr, CustomOpExpr, \
EntityExpr
LOGGER = logging.getLogger("ancpbids")
# ENTITIES_PATTERN = regex.compile(r'(([^\W_]+)-([^\W_]+)_)+([^\W_]+)((\.[^\W_]+)+)')
def load_dataset(base_dir: str):
"""Loads a dataset given its directory path on the file system.
.. code-block::
from ancpbids import load_dataset, validate_dataset
dataset_path = 'path/to/your/dataset'
dataset = load_dataset(dataset_path)
Parameters
----------
base_dir:
the dataset path to load from
Returns
-------
str
an object instance of type :py:class:`ancpbids.model.Dataset` which represents the dataset as an in-memory graph
"""
schema = load_schema(base_dir)
ds = schema.Dataset()
ds.name = os.path.basename(base_dir)
ds.base_dir_ = base_dir
dataset_plugins = get_plugins(DatasetPlugin)
for dsplugin in dataset_plugins:
dsplugin.execute(ds)
return ds
def load_schema(base_dir):
"""Loads a BIDS schema object which represents the static/formal definition of the BIDS specification.
As per BIDS spec, a BIDS compliant dataset must have a BIDSVersion field in the dataset_description.json
file at the top level. This field is used to determine which BIDS schema to load.
In case the BIDSVersion field is missing or not supported, the earliest supported schema will be returned.
Parameters
----------
base_dir:
The dataset directory path where a dataset_description.json must be located.
Returns
-------
object
A BIDS schema object which represents the static/formal definition of the BIDS specification.
"""
ds_descr_path = os.path.join(base_dir, "dataset_description.json")
if os.path.exists(ds_descr_path):
ds_descr = utils.load_contents(ds_descr_path)
if isinstance(ds_descr, dict) and 'BIDSVersion' in ds_descr:
schema_version = ds_descr['BIDSVersion']
schema_version = schema_version.replace('.', '_')
schema_name = f'ancpbids.model_v{schema_version}'
if schema_name in sys.modules:
schema = sys.modules[schema_name]
return schema
# assume using the earliest supported schema
from . import model_v1_7_0
return model_v1_7_0
def save_dataset(ds: object, target_dir: str, context_folder=None):
"""Copies the dataset graph into the provided target directory.
EXPERIMENTAL/UNSTABLE
Parameters
----------
ds:
the dataset graph to save
target_dir
the target directory to save to
context_folder
a folder node within the dataset graph to limit to
"""
dataset_plugins = get_plugins(WritingPlugin)
for dsplugin in dataset_plugins:
dsplugin.execute(ds, target_dir, context_folder=context_folder)
def validate_dataset(dataset) -> ValidationPlugin.ValidationReport:
"""Validates a dataset and returns a report object containing any detected validation errors.
Example:
.. code-block::
report = validate_dataset(dataset)
for message in report.messages:
print(message)
if report.has_errors():
raise "The dataset contains validation errors, cannot continue".
Parameters
----------
dataset:
the dataset to validate
Returns
-------
ValidationPlugin.ValidationReport
a report object containing any detected validation errors or warning
"""
return _internal_validate_dataset(dataset)
def _internal_validate_dataset(dataset, plugin_acceptor=None):
validation_plugins = get_plugins(ValidationPlugin)
report = ValidationPlugin.ValidationReport()
for validation_plugin in validation_plugins:
# if plugin is disabled, skip it
if callable(plugin_acceptor) and not plugin_acceptor(validation_plugin):
continue
validation_plugin.execute(dataset=dataset, report=report)
return report
def write_derivative(ds, derivative):
"""Writes the provided derivative folder to the dataset.
Note that a 'derivatives' folder will be created if not present.
Parameters
----------
ds:
the dataset object to extend
derivative:
the derivative folder to write
"""
save_dataset(ds, target_dir=ds.get_absolute_path(), context_folder=derivative)
# load system plugins using lowest rank value
load_plugins_by_package(plugins, ranking=0, system=True)
# execute all SchemaPlugins, these plugins may monkey-patch the schema
from ancpbids import model_v1_7_0, model_v1_7_1
for pl in get_plugins(SchemaPlugin):
for model in [model_v1_7_0, model_v1_7_1]:
pl.execute(model)
# load file handler plugins
for pl in get_plugins(FileHandlerPlugin):
pl.execute(utils.FILE_READERS, utils.FILE_WRITERS)
from .pybids_compat import BIDSLayout
select = Select
any_of = AnyExpr
all_of = AllExpr
eq = EqExpr
re = ReExpr
op = CustomOpExpr
entity = EntityExpr
from . import _version
__version__ = _version.get_versions()['version']
|
AaronReer/ancp-bids | ancpbids/plugins/plugin_schema_patches.py | import fnmatch
import inspect
import os
from difflib import SequenceMatcher
from ancpbids.plugin import SchemaPlugin
def has_entity(artifact, entity_):
for e in artifact.entities:
if e.key == entity_:
return True
return False
def get_entity(artifact, entity_):
for e in artifact.entities:
if e.key == entity_:
return e.value
return None
def add_entity(artifact, key, value):
schema = artifact.get_schema()
if isinstance(key, schema.EntityEnum):
key = key.entity_
eref = schema.EntityRef(key, value)
artifact.entities.append(eref)
def load_file_contents(folder, file_name):
from ancpbids import utils
file_path = get_absolute_path(folder, file_name)
contents = utils.load_contents(file_path)
return contents
def load_contents(file):
from ancpbids import utils
file_path = get_absolute_path(file.parent_object_, file.name)
contents = utils.load_contents(file_path)
return contents
def get_absolute_path_by_file(file):
return get_absolute_path(file.parent_object_, file.name)
def get_absolute_path(folder, file_name=None):
return _get_path(folder, file_name, True)
def _folder_get_relative_path(folder):
return _get_path(folder, None, False)
def _file_get_relative_path(file):
return _get_path(file.parent_object_, file.name, False)
def _get_path(folder, file_name=None, absolute=True):
schema = folder.get_schema()
segments = []
if file_name:
segments.append(file_name)
current_folder = folder
while current_folder is not None:
if isinstance(current_folder, schema.Dataset):
if absolute:
segments.insert(0, current_folder.base_dir_)
# assume we reached the highest level, maybe not good for nested datasets
break
else:
segments.insert(0, current_folder.name)
current_folder = current_folder.parent_object_
_path = os.path.join(*segments) if segments else ''
return os.path.normpath(_path)
def remove_file(folder, file_name, from_meta=True):
folder.files = list(filter(lambda file: file.name != file_name, folder.files))
if from_meta:
folder.metadatafiles = list(filter(lambda file: file.name != file_name, folder.metadatafiles))
def create_artifact(folder):
schema = folder.get_schema()
artifact = schema.Artifact()
artifact.parent_object_ = folder
folder.files.append(artifact)
return artifact
def create_folder(folder, type_=None, **kwargs):
if not type_:
type_ = folder.get_schema().Folder
sub_folder = type_(**kwargs)
sub_folder.parent_object_ = folder
folder.folders.append(sub_folder)
return sub_folder
def create_derivative(ds, **kwargs):
schema = ds.get_schema()
derivatives_folder = ds.derivatives
if not ds.derivatives:
derivatives_folder = schema.DerivativeFolder()
derivatives_folder.parent_object_ = ds
derivatives_folder.name = "derivatives"
ds.derivatives = derivatives_folder
derivative = schema.DerivativeFolder(**kwargs)
derivative.parent_object_ = derivatives_folder
derivatives_folder.folders.append(derivative)
derivative.dataset_description = schema.DerivativeDatasetDescriptionFile()
derivative.dataset_description.parent_object_ = derivative
derivative.dataset_description.GeneratedBy = schema.GeneratedBy()
if ds.dataset_description:
derivative.dataset_description.update(ds.dataset_description)
return derivative
def get_file(folder, file_name, from_meta=True):
file = next(filter(lambda file: file.name == file_name, folder.files), None)
if not file and from_meta:
# search in metadatafiles
file = next(filter(lambda file: file.name == file_name, folder.metadatafiles), None)
return file
def get_files(folder, name_pattern):
return list(filter(lambda file: fnmatch.fnmatch(file.name, name_pattern), folder.files))
def remove_folder(folder, folder_name):
folder.folders = list(filter(lambda f: f.name != folder_name, folder.folders))
def get_folder(folder, folder_name):
return next(filter(lambda f: f.name == folder_name, folder.folders), None)
def get_files_sorted(folder):
return sorted(folder.files, key=lambda f: f.name)
def get_folders_sorted(folder):
return sorted(folder.folders, key=lambda f: f.name)
def to_generator(source, depth_first=False, filter_=None):
schema = source.get_schema()
if not depth_first:
if filter_ and not filter_(source):
return
yield source
for key, value in source.items():
if isinstance(value, schema.Model):
yield from to_generator(value, depth_first, filter_)
elif isinstance(value, list):
for item in value:
if isinstance(item, schema.Model):
yield from to_generator(item, depth_first, filter_)
if depth_first:
if filter_ and not filter_(source):
return
yield source
def iterancestors(source):
context = source
while context is not None:
if not hasattr(context, 'parent_object_'):
break
context = context.parent_object_
yield context
def to_dict(source):
return source
def get_model_classes(schema):
if not hasattr(schema, '_CLASSES'):
schema._CLASSES = {name: obj for name, obj in inspect.getmembers(schema) if inspect.isclass(obj)}
return schema._CLASSES
def _get_element_members(schema, element_type):
element_members = []
try:
members = element_type.MEMBERS
element_members = list(
map(lambda item: {'name': item[0], **item[1], 'type': _to_type(schema, item[1]['type'])},
members.items()))
except AttributeError as ae:
pass
return element_members
def get_members(schema, element_type, include_superclass=True):
if element_type == schema.Model:
return []
super_members = []
if include_superclass:
superclass = element_type
while True:
try:
superclass = inspect.getmro(superclass)[1]
if not superclass or superclass == schema.Model:
break
super_members = super_members + _get_element_members(schema, superclass)
except AttributeError:
pass
element_members = _get_element_members(schema, element_type)
return super_members + element_members
def _to_type(schema, model_type_name: str):
classes = schema.get_model_classes()
if model_type_name in classes:
return classes[model_type_name]
if model_type_name in __builtins__:
return __builtins__[model_type_name]
return model_type_name
def _trim_int(value):
try:
# remove paddings/fillers in index values: 001 -> 1, 000230 -> 230
return str(int(value))
except ValueError:
return value
def process_entity_value(schema, key, value):
if not value:
return value
for sc_entity in filter(lambda e: e.literal_ == key, schema.EntityEnum.__members__.values()):
if sc_entity.format_ == 'index':
if isinstance(value, list):
return list(map(lambda v: _trim_int(v), value))
else:
return _trim_int(value)
return value
def fuzzy_match_entity_key(schema, user_key):
return fuzzy_match_entity(schema, user_key).entity_
def fuzzy_match_entity(schema, user_key):
ratios = list(
map(lambda item: (
item,
1.0 if item.literal_.startswith(user_key) else SequenceMatcher(None, user_key,
item.literal_).quick_ratio()),
list(schema.EntityEnum)))
ratios = sorted(ratios, key=lambda t: t[1])
return ratios[-1][0]
class PatchingSchemaPlugin(SchemaPlugin):
def execute(self, schema):
schema.Artifact.has_entity = has_entity
schema.Artifact.get_entity = get_entity
schema.Artifact.add_entity = add_entity
schema.Folder.load_file_contents = load_file_contents
schema.File.load_contents = load_contents
schema.File.get_absolute_path = get_absolute_path_by_file
schema.Folder.get_relative_path = _folder_get_relative_path
schema.Folder.get_absolute_path = get_absolute_path
schema.File.get_relative_path = _file_get_relative_path
schema.Folder.remove_file = remove_file
schema.Folder.create_artifact = create_artifact
schema.Folder.create_folder = create_folder
schema.Dataset.create_derivative = create_derivative
schema.Folder.get_file = get_file
schema.Folder.get_files = get_files
schema.Folder.remove_folder = remove_folder
schema.Folder.get_folder = get_folder
schema.Folder.get_files_sorted = get_files_sorted
schema.Folder.get_folders_sorted = get_folders_sorted
schema.Model.to_generator = to_generator
schema.Model.to_dict = to_dict
schema.Model.iterancestors = iterancestors
schema.get_model_classes = lambda: get_model_classes(schema)
schema.get_members = lambda element_type, include_superclass=True: get_members(schema, element_type,
include_superclass)
schema.process_entity_value = lambda key, value: process_entity_value(schema, key, value)
schema.fuzzy_match_entity_key = lambda user_key: fuzzy_match_entity_key(schema, user_key)
schema.fuzzy_match_entity = lambda user_key: fuzzy_match_entity(schema, user_key)
|
AaronReer/ancp-bids | ancpbids/plugins/plugin_dsvalidator.py | <filename>ancpbids/plugins/plugin_dsvalidator.py<gh_stars>0
from ancpbids.plugin import ValidationPlugin
class StaticStructureValidationPlugin(ValidationPlugin):
def execute(self, dataset, report: ValidationPlugin.ValidationReport):
self.schema = dataset.get_schema()
gen = dataset.to_generator()
for obj in gen:
top_path = obj.get_relative_path().replace("\\", "/") if isinstance(obj, (
self.schema.File, self.schema.Folder)) else '???'
members = self.schema.get_members(type(obj))
for member in members:
typ = member['type']
name = member['name']
lb = member['min']
ub = member['max']
val = getattr(obj, name)
use = member['use']
if (lb > 0 or use == 'required') and not val:
report.error(f"Missing required field {name} at {top_path}.")
if use == 'recommended' and not val:
report.warn(f"Missing recommended field {name} at {top_path}.")
class DatatypesValidationPlugin(ValidationPlugin):
def execute(self, dataset, report: ValidationPlugin.ValidationReport):
invalid = []
valid_datatypes = [v.literal_ for v in dataset.get_schema().DatatypeEnum.__members__.values()]
for subject in dataset.subjects:
invalid.extend([f for f in subject.datatypes if f.name not in valid_datatypes])
for session in subject.sessions:
invalid.extend([f for f in session.datatypes if f.name not in valid_datatypes])
for folder in invalid:
report.error("Unsupported datatype folder '%s'" % folder.get_relative_path())
class EntitiesValidationPlugin(ValidationPlugin):
def execute(self, dataset, report: ValidationPlugin.ValidationReport):
schema = dataset.get_schema()
artifacts = dataset.select(schema.Artifact).get_artifacts()
entities = list(map(lambda e: e.entity_, list(schema.EntityEnum)))
expected_key_order = {k: i for i, k in enumerate(entities)}
expected_order_key = {i: k for i, k in enumerate(entities)}
for artifact in artifacts:
entity_refs = artifact.entities
found_invalid_key = False
for ref in entity_refs:
if ref.key not in entities:
report.error(
"Invalid entity '%s' in artifact '%s'" % (ref.key, artifact.get_relative_path()))
found_invalid_key = True
if found_invalid_key:
# we cannot check the order of entities if invalid entity found
continue
# now, check if order of entities matches order in schema
keys = list(map(lambda e: e.key, entity_refs))
actual_keys_order = list(map(lambda k: expected_key_order[k], keys))
for i in range(0, len(actual_keys_order) - 1):
if actual_keys_order[i] > actual_keys_order[i + 1]:
expected = tuple(map(lambda k: expected_order_key[k], sorted(actual_keys_order)))
report.error(
"Invalid entities order: expected=%s, found=%s, artifact=%s" % (
expected, tuple(keys), artifact.get_relative_path()))
break
class SuffixesValidationPlugin(ValidationPlugin):
def execute(self, dataset, report: ValidationPlugin.ValidationReport):
pass
|
AaronReer/ancp-bids | tests/auto/test_validation.py | <gh_stars>0
from ancpbids import load_dataset, _internal_validate_dataset
from ..base_test_case import *
from ancpbids.plugin import ValidationPlugin
from ancpbids.plugins import plugin_dsvalidator
class ValidationTestCase(BaseTestCase):
def createSUT(self, ds_dir, rule_class):
test_ds = load_dataset(ds_dir)
# only test this plugin
report = _internal_validate_dataset(test_ds, lambda plugin: isinstance(plugin, rule_class))
self.assertTrue(isinstance(report, ValidationPlugin.ValidationReport))
return report
def test_validate_static_structure(self):
report = self.createSUT(DS005_CONFLICT_DIR, plugin_dsvalidator.StaticStructureValidationPlugin)
self.assertEqual(1, len(report.messages))
self.assertTrue('dataset_description' in report.messages[0]['message'],
'dataset_description file should have been reported as missing')
def test_validate_datatypes(self):
report = self.createSUT(DS005_CONFLICT_DIR, plugin_dsvalidator.DatatypesValidationPlugin)
self.assertEqual(2, len(report.messages))
self.assertEqual("Unsupported datatype folder 'sub-01/abc'", report.messages[0]['message'].replace('\\', '/'))
self.assertEqual("Unsupported datatype folder 'sub-01/xyz'", report.messages[1]['message'].replace('\\', '/'))
def test_validation_entities(self):
report = self.createSUT(RESOURCES_FOLDER + "/ds005_entities_validation",
plugin_dsvalidator.EntitiesValidationPlugin)
self.assertEqual(2, len(report.messages))
self.assertEqual(
"Invalid entities order: expected=('sub', 'task', 'run'), found=('sub', 'run', 'task'), "
"artifact=sub-01/func/sub-01_run-03_task-mixedgamblestask_events.tsv",
report.messages[0]['message'].replace('\\', '/'))
self.assertEqual("Invalid entity 'xyz' in artifact "
"'sub-01/func/sub-01_task-mixedgamblestask_run-03_xyz-001_events.tsv'",
report.messages[1]['message'].replace('\\', '/'))
if __name__ == '__main__':
unittest.main()
|
AaronReer/ancp-bids | tests/auto/test_fetch_dataset.py | <filename>tests/auto/test_fetch_dataset.py
import os.path
import unittest
import tempfile
from ancpbids import utils, BIDSLayout
from ..base_test_case import BaseTestCase
class FetchDatasetTestCase(BaseTestCase):
def test_fetch_dataset(self):
with tempfile.TemporaryDirectory() as temp_dir:
ds_path = utils.fetch_dataset('ds003483', output_dir=temp_dir)
self.assertEqual(os.path.join(temp_dir, 'ds003483'), ds_path)
layout = BIDSLayout(ds_path)
# some basic checks to make sure the dataset is downloaded and unzipped as expected
self.assertEqual('ds003483', layout.dataset.name)
self.assertEqual(21, len(layout.get_subjects()))
self.assertEqual(['channels', 'coordsystem', 'events', 'meg', 'scans'], layout.get_suffixes())
if __name__ == '__main__':
unittest.main()
|
AaronReer/ancp-bids | tests/auto/test_query.py | <filename>tests/auto/test_query.py
import os.path
import ancpbids
from ancpbids import select, re, any_of, all_of, eq, op, entity
from ..base_test_case import *
class QueryTestCase(BaseTestCase):
def test_bidslayout_entities_formatting(self):
layout = ancpbids.BIDSLayout(ENTITIES_DIR)
files = layout.get(sub='02', run='3', return_type='filename')
self.assertEqual(1, len(files))
self.assertTrue(files[0].endswith("sub-02_task-abc_run-00003_events.tsv"))
files = layout.get(sub='02', run=['000000003'], return_type='filename')
self.assertEqual(1, len(files))
self.assertTrue(files[0].endswith("sub-02_task-abc_run-00003_events.tsv"))
# should also handle invalid formats, i.e. run not as an index but a label
files = layout.get(sub='02', run='xyz', return_type='filename')
self.assertEqual(1, len(files))
self.assertTrue(files[0].endswith("sub-02_task-abc_run-xyz_events.tsv"))
def test_bidslayout_entities_any(self):
layout = ancpbids.BIDSLayout(ENTITIES_DIR)
files = layout.get(sub='*', suffix='test', task='abc', return_type='filename')
self.assertEqual(2, len(files))
self.assertTrue(files[0].endswith("sub-bar_task-abc_test.txt"))
self.assertTrue(files[1].endswith("sub-foo_task-abc_test.txt"))
def test_bidslayout_subjects_filtered(self):
layout = ancpbids.BIDSLayout(ENTITIES_DIR)
subjects = layout.get_subjects(task='abc')
self.assertEqual(3, len(subjects))
self.assertListEqual(['02', 'bar', 'foo'], subjects)
def test_bidslayout(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
subjects = layout.get_subjects()
subjects_expected = ['%02d' % i for i in range(1, 17)]
self.assertListEqual(subjects_expected, subjects)
sessions = layout.get_sessions()
self.assertEqual(0, len(sessions))
tasks = layout.get_tasks()
self.assertListEqual(['mixedgamblestask'], tasks)
def test_bidslayout_get(self):
layout = ancpbids.BIDSLayout(SYNTHETIC_DIR)
mask_niftis = layout.get(scope='derivatives',
return_type='filename',
suffix='mask',
extension='.nii',
sub='03',
ses='02',
task='nback',
run=["01", "02"])
self.assertEqual(4, len(mask_niftis))
expected_paths = [
'derivatives/fmriprep/sub-03/ses-02/func/sub-03_ses-02_task-nback_run-01_space-MNI152NLin2009cAsym_desc-brain_mask.nii',
'derivatives/fmriprep/sub-03/ses-02/func/sub-03_ses-02_task-nback_run-01_space-T1w_desc-brain_mask.nii',
'derivatives/fmriprep/sub-03/ses-02/func/sub-03_ses-02_task-nback_run-02_space-MNI152NLin2009cAsym_desc-brain_mask.nii',
'derivatives/fmriprep/sub-03/ses-02/func/sub-03_ses-02_task-nback_run-02_space-T1w_desc-brain_mask.nii',
]
expected_paths = list(map(lambda p: os.path.normpath(os.path.join(SYNTHETIC_DIR, p)), expected_paths))
for file in expected_paths:
self.assertTrue(list(filter(lambda p: file == p, mask_niftis)))
def test_bidslayout_get_entities(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
sorted_entities = layout.get_entities(scope='raw', sort=True)
# note: 'ds' and 'type' entities are contained in folder 'models' at dataset level, so considered raw data
self.assertListEqual(['ds', 'run', 'sub', 'task', 'type'], list(sorted_entities.keys()))
self.assertListEqual(['1', '2', '3'], sorted_entities['run'])
self.assertEqual(['%02d' % i for i in range(1, 17)], sorted_entities['sub'])
self.assertListEqual(['mixedgamblestask'], sorted_entities['task'])
def test_bidslayout_get_suffixes(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
suffixes = layout.get_suffixes()
self.assertListEqual(['T1w', 'bold', 'dwi', 'events', 'model'], suffixes)
def test_bidslayout_get_extensions(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
extensions = layout.get_extensions()
self.assertListEqual(['.json', '.nii.gz', '.tsv'], extensions)
def test_bidslayout_get_metadata(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
metadata = layout.get_metadata(task='mixedgamblestask', suffix='bold')
self.assertTrue(isinstance(metadata, dict))
self.assertEqual(2.0, metadata['RepetitionTime'])
self.assertEqual('mixed-gambles task', metadata['TaskName'])
self.assertListEqual([0.0, 0.0571, 0.1143, 0.1714, 0.2286, 0.2857], metadata['SliceTiming'])
def test_bidslayout_get_metadata_inheritance(self):
layout = ancpbids.BIDSLayout(DS005_DIR + "-small")
task_bold = layout.dataset.get_file('task-mixedgamblestask_bold.json').load_contents()
# at the top level, RepetionTime is set to 2.0
self.assertEqual(2.0, task_bold['RepetitionTime'])
# now load all metadata matching the filter
metadata = layout.get_metadata(task='mixedgamblestask', suffix='bold')
self.assertTrue(isinstance(metadata, dict))
# now, since at subject/run level the RepetionTime is overridden, it should be 2.5
self.assertEqual(2.5, metadata['RepetitionTime'])
def test_query_language(self):
ds = ancpbids.load_dataset(DS005_DIR)
schema = ds.get_schema()
file_paths = ds.select(schema.Artifact) \
.where(all_of(eq(schema.Artifact.suffix, 'bold'),
entity(schema, schema.EntityEnum._subject, '02'))) \
.get_file_paths()
file_paths = list(file_paths)
self.assertEqual(3, len(file_paths))
if __name__ == '__main__':
unittest.main()
|
AaronReer/ancp-bids | tests/base_test_case.py | import os
import unittest
TEST_FOLDER = os.path.dirname(__file__)
RESOURCES_FOLDER = TEST_FOLDER + "/data"
DS005_DIR = RESOURCES_FOLDER + "/ds005"
DS005_CONFLICT_DIR = RESOURCES_FOLDER + "/ds005_conflict"
DS005_SMALL_DIR = RESOURCES_FOLDER + "/ds005-small"
SYNTHETIC_DIR = RESOURCES_FOLDER + "/synthetic"
ENTITIES_DIR = RESOURCES_FOLDER + "/ds005_entities_validation"
class BaseTestCase(unittest.TestCase):
pass
|
AaronReer/ancp-bids | tests/auto/test_writing.py | import time
import unittest
import numpy as np
import pandas as pd
import shutil
import tempfile
import ancpbids
from ancpbids import model, re
from ..base_test_case import BaseTestCase, DS005_DIR
class WritingTestCase(BaseTestCase):
def write_test_derivative(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
dataset = layout.get_dataset()
pipeline_name = "mypipeline-%d" % time.time()
derivative = dataset.create_derivative(name=pipeline_name)
derivative.dataset_description.GeneratedBy.Name = "My Test Pipeline"
task_label = layout.get_tasks()[0]
for sub_label in layout.get_subjects():
subject = derivative.create_folder(name='sub-' + sub_label)
# do some complex task
# ... doing complex task ...
# ... done
txt_artifact = subject.create_artifact()
txt_artifact.add_entity("desc", "mypipeline")
txt_artifact.suffix = 'textual'
txt_artifact.extension = ".txt"
txt_artifact.content = "Subject %s participated in task %s" % (sub_label, task_label)
# create some random data
df = pd.DataFrame(np.random.randint(0, 100, size=(100, 4)), columns=list('ABCD'))
ev_artifact = subject.create_artifact()
ev_artifact.add_entity("desc", "mypipeline")
ev_artifact.suffix = 'events'
ev_artifact.extension = ".tsv"
# at this point, the file path is not known and will be provided
# to lambda when the derivative is written to disk
ev_artifact.content = lambda file_path: df.to_csv(file_path, index=None)
layout.write_derivative(derivative)
return DS005_DIR, pipeline_name
def test_write_derivative(self):
# create a temporary dataset with a test derivative and return its root path and the created derivative
ds_path, pipeline_name = self.write_test_derivative()
# pretend loading a new dataset
layout = ancpbids.BIDSLayout(ds_path)
# get the underlying graph/dataset for further inspection
dataset = layout.get_dataset()
derivative_folder = filter(lambda f: f.name == pipeline_name, dataset.derivatives.folders)
self.assertIsNotNone(derivative_folder)
derivative_folder = next(derivative_folder)
schema = dataset.get_schema()
subjects = derivative_folder.select(schema.Folder) \
.where(re(schema.Folder.name, r"sub-[\d]+")) \
.objects(True)
self.assertEqual(16, len(subjects))
for i, subject in enumerate(subjects):
exptected_sub_name = "sub-%02d" % (i + 1)
self.assertEqual(exptected_sub_name, subject.name)
for artifact in subject.files:
# check if 'sub' entity has been automatically added as it is inferrable from its parent directory
self.assertEqual(exptected_sub_name, "sub-%s" % artifact.get_entity('sub'))
self.assertTrue(isinstance(derivative_folder.dataset_description, schema.DerivativeDatasetDescriptionFile))
self.assertEqual(derivative_folder.dataset_description.GeneratedBy.Name, "My Test Pipeline")
if __name__ == '__main__':
unittest.main()
|
AaronReer/ancp-bids | setup.py | #!/usr/bin/env python
import sys, os
from setuptools import setup
import versioneer
# Give setuptools a hint to complain if it's too old a version
# 30.3.0 allows us to put most metadata in setup.cfg
# Should match pyproject.toml
SETUP_REQUIRES = ['setuptools >= 30.3.0']
# This enables setuptools to install wheel on-the-fly
SETUP_REQUIRES += ['wheel'] if 'bdist_wheel' in sys.argv else []
INSTALL_REQUIRES = []
if __name__ == '__main__':
REQ_FILE_PATH = os.path.dirname(os.path.realpath(__file__)) + "/requirements.txt"
with open(REQ_FILE_PATH) as f:
INSTALL_REQUIRES = list(f.read().splitlines())
setup(name="ancpbids",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
setup_requires=SETUP_REQUIRES,
install_requires=INSTALL_REQUIRES)
|
AaronReer/ancp-bids | tests/auto/test_regressions.py | import ancpbids
from ..base_test_case import *
class RegressionsTestCase(BaseTestCase):
def test_get_all_files_from_pipeline(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
all_derivative_files = layout.get(scope='derivatives/affine/matrix', return_type='file')
# that derivatives folder has no valid BIDS files (of type model.Artifact)
# but ordinary files (if type model.File)
self.assertEqual(16, len(all_derivative_files))
def test_get_metadata_no_params(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
md = layout.get_metadata()
expected_md = {'RepetitionTime': 2.0, 'TaskName': 'mixed-gambles task',
'SliceTiming': [0.0, 0.0571, 0.1143, 0.1714, 0.2286, 0.2857], 'run': 1}
self.assertEqual(md, expected_md)
def test_report_errors(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
report = layout.validate()
self.assertTrue(report.has_errors())
def test_get_entitites_no_scope(self):
layout = ancpbids.BIDSLayout(DS005_DIR)
entities = layout.get_entities(scope=None)
self.assertEqual(['ds', 'type', 'task', 'sub', 'run', 'desc'], list(entities.keys()))
if __name__ == '__main__':
unittest.main()
|
unusualwork/ptf | modules/exploitation/parameth.py | <reponame>unusualwork/ptf
AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update parameth"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/maK-/parameth"
INSTALL_LOCATION="parameth"
DEBIAN=""
AFTER_COMMANDS="cd {INSTALL_LOCATION}, pip install -r requirements.txt"
LAUNCHER="parameth"
|
unusualwork/ptf | modules/intelligence-gathering/hacks.py | AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update some random scripts"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/EdOverflow/hacks"
INSTALL_LOCATION="hacks"
DEBIAN=""
AFTER_COMMANDS="cd {INSTALL_LOCATION},cd .., cp -R hacks/. /usr/local/bin/"
|
unusualwork/ptf | modules/intelligence-gathering/turbolist3r.py | AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update Turbolist3r"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/fleetcaptain/Turbolist3r"
INSTALL_LOCATION="turbolist3r"
DEBIAN="python-requests"
AFTER_COMMANDS="cd {INSTALL_LOCATION}, pip install -r requirements.txt"
LAUNCHER="turbolist3r"
|
unusualwork/ptf | modules/vulnerability-analysis/linkfinder.py | <gh_stars>1-10
AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update Linkfinder"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/GerbenJavado/LinkFinder"
INSTALL_LOCATION="linkfinder"
AFTER_COMMANDS="cd {INSTALL_LOCATION}, python setup.py install"
LAUNCHER="linkfinder"
|
unusualwork/ptf | modules/exploitation/wpsploit.py | AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update WPSploit"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/espreto/wpsploit"
INSTALL_LOCATION="wpsploit"
DEBIAN=""
AFTER_COMMANDS="cd {INSTALL_LOCATION},mv modules/auxiliary/ /home/*/.msf4/modules/,mv modules/exploits/ /home/*/.msf4/modules/"
|
unusualwork/ptf | modules/intelligence-gathering/subover.py | <reponame>unusualwork/ptf<filename>modules/intelligence-gathering/subover.py
AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update SubOver"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/Ice3man543/SubOver"
INSTALL_LOCATION="subover"
DEBIAN="golang"
AFTER_COMMANDS="cd ~/go/bin/,go get github.com/Ice3man543/SubOver, mv SubOver /usr/local/bin/subover"
|
unusualwork/ptf | modules/vulnerability-analysis/cloudflair.py | <filename>modules/vulnerability-analysis/cloudflair.py<gh_stars>1-10
AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update cloudflair"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/christophetd/cloudflair.git"
INSTALL_LOCATION="cloudflair"
DEBIAN=""
AFTER_COMMANDS="cd {INSTALL_LOCATION}, pip install -r requirements.txt"
LAUNCHER="cloudflair"
|
unusualwork/ptf | modules/intelligence-gathering/gitrob.py | <reponame>unusualwork/ptf
AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update Gitrob"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/michenriksen/gitrob"
INSTALL_LOCATION="gitrob"
DEBIAN="golang"
AFTER_COMMANDS="cd {INSTALL_LOCATION}, go get github.com/michenriksen/gitrob,cp ~/go/bin/gitrob /usr/local/bin"
|
unusualwork/ptf | modules/windows-tools/autochrome.py | AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update Autochrome"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/nccgroup/autochrome"
INSTALL_LOCATION="autochrome"
DEBIAN="unzip, ruby"
AFTER_COMMANDS="cd {INSTALL_LOCATION}, ruby autochrome.rb"
LAUNCHER=""
|
unusualwork/ptf | modules/intelligence-gathering/aquatone.py | <reponame>unusualwork/ptf<filename>modules/intelligence-gathering/aquatone.py
AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update Aquatone"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/michenriksen/aquatone"
INSTALL_LOCATION="aquatone"
DEBIAN=""
AFTER_COMMANDS="cd {INSTALL_LOCATION}, gem install aquatone"
LAUNCHER="aquatone"
|
unusualwork/ptf | modules/intelligence-gathering/massdns.py | AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update MassDNS"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="https://github.com/blechschmidt/massdns"
INSTALL_LOCATION="massdns"
DEBIAN=""
AFTER_COMMANDS="cd {INSTALL_LOCATION},make,cp bin/massdns /usr/local/bin/"
LAUNCHER="massdns"
|
unusualwork/ptf | modules/intelligence-gathering/subfinder.py | AUTHOR="<NAME>"
DESCRIPTION="This module wil install/update Subfinder"
INSTALL_TYPE="GIT"
REPOSITORY_LOCATION="go get github.com/Ice3man543/subfinder"
INSTALL_LOCATION="subfinder"
DEBIAN=""
AFTER_COMMANDS="cd ~/go/bin, go get -u github.com/Ice3man543/subfinder, mv subfinder /usr/local/bin/"
|
gregmbi/polyaxon | core/polyaxon/deploy/schemas/deployment.py | <reponame>gregmbi/polyaxon
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from marshmallow import ValidationError, fields, validate, validates_schema
from polyaxon.deploy.schemas.deployment_types import DeploymentTypes
from polyaxon.deploy.schemas.email import EmailSchema
from polyaxon.deploy.schemas.ingress import IngressSchema
from polyaxon.deploy.schemas.intervals import IntervalsSchema
from polyaxon.deploy.schemas.rbac import RBACSchema
from polyaxon.deploy.schemas.root_user import RootUserSchema
from polyaxon.deploy.schemas.security_context import SecurityContextSchema
from polyaxon.deploy.schemas.service import (
AgentServiceSchema,
ApiServiceSchema,
DockerRegistrySchema,
ExternalServicesSchema,
HelperServiceSchema,
HooksSchema,
PostgresqlSchema,
RabbitmqSchema,
RedisSchema,
ServiceSchema,
WorkerServiceSchema,
)
from polyaxon.deploy.schemas.service_types import ServiceTypes
from polyaxon.deploy.schemas.ssl import SSLSchema
from polyaxon.schemas.base import BaseCamelSchema, BaseConfig
from polyaxon.schemas.types import ConnectionTypeSchema
def check_redis(redis, external_services):
redis_disabled = redis.enabled is False if redis else False
external_redis = None
if external_services:
external_redis = external_services.redis
if redis_disabled and not external_redis:
raise ValidationError(
"A redis instance is required, please enable the in-cluster redis, "
"or provide an external instance."
)
def check_postgres(postgresql, external_services):
postgresql_disabled = postgresql.enabled is False if postgresql else False
external_postgresql = None
if external_services:
external_postgresql = external_services.postgresql
if postgresql_disabled and not external_postgresql:
raise ValidationError(
"A postgresql instance is required, "
"please enable the in-cluster postgresql, "
"or provide an external instance."
)
def check_rabbitmq(rabbitmq, external_services, broker):
rabbitmq_disabled = rabbitmq.enabled is False if rabbitmq else False
external_rabbitmq = None
rabbitmq_borker = broker != "redis"
if external_services:
external_rabbitmq = external_services.rabbitmq
if rabbitmq_disabled and rabbitmq_borker and not external_rabbitmq:
raise ValidationError(
"Rabbitmq is used as a broker, "
"an instance is required, "
"please enable the in-cluster rabbitmq, "
"or provide an external instance."
)
class DeploymentSchema(BaseCamelSchema):
deployment_type = fields.Str(
allow_none=True, validate=validate.OneOf(DeploymentTypes.VALUES)
)
deployment_version = fields.Str(allow_none=True)
namespace = fields.Str(allow_none=True)
rbac = fields.Nested(RBACSchema, allow_none=True)
polyaxon_secret = fields.Str(allow_none=True)
internal_token = fields.Str(allow_none=True)
password_length = fields.Int(allow_none=True)
ssl = fields.Nested(SSLSchema, allow_none=True)
encryption_secret = fields.Str(allow_none=True)
service_type = fields.Str(
allow_none=True, validate=validate.OneOf(ServiceTypes.VALUES)
)
admin_view_enabled = fields.Bool(allow_none=True)
timezone = fields.Str(allow_none=True)
environment = fields.Str(allow_none=True)
ingress = fields.Nested(IngressSchema, allow_none=True)
user = fields.Nested(RootUserSchema, allow_none=True)
node_selector = fields.Dict(allow_none=True)
tolerations = fields.List(fields.Dict(allow_none=True), allow_none=True)
affinity = fields.Dict(allow_none=True)
limit_resources = fields.Bool(allow_none=True)
global_replicas = fields.Int(allow_none=True)
global_concurrency = fields.Int(allow_none=True)
gateway = fields.Nested(ApiServiceSchema, allow_none=True)
api = fields.Nested(ApiServiceSchema, allow_none=True)
streams = fields.Nested(ApiServiceSchema, allow_none=True)
scheduler = fields.Nested(WorkerServiceSchema, allow_none=True)
worker = fields.Nested(WorkerServiceSchema, allow_none=True)
beat = fields.Nested(ServiceSchema, allow_none=True)
agent = fields.Nested(AgentServiceSchema, allow_none=True)
operator = fields.Nested(ServiceSchema, allow_none=True)
init = fields.Nested(HelperServiceSchema, allow_none=True)
sidecar = fields.Nested(HelperServiceSchema, allow_none=True)
tables_hook = fields.Nested(ServiceSchema, allow_none=True)
hooks = fields.Nested(HooksSchema, allow_none=True)
postgresql = fields.Nested(PostgresqlSchema, allow_none=True)
redis = fields.Nested(RedisSchema, allow_none=True)
rabbitmq = fields.Nested(RabbitmqSchema, data_key="<KEY>", allow_none=True)
broker = fields.Str(allow_none=True, validate=validate.OneOf(["redis", "rabbitmq"]))
docker_registry = fields.Nested(
DockerRegistrySchema, data_key="docker-registry", allow_none=True
)
email = fields.Nested(EmailSchema, allow_none=True)
ldap = fields.Raw(allow_none=True)
image_pull_secrets = fields.List(fields.Str(), allow_none=True)
host_name = fields.Str(allow_none=True)
allowed_hosts = fields.List(fields.Str(), allow_none=True)
intervals = fields.Nested(IntervalsSchema, allow_none=True)
artifacts_store = fields.Nested(ConnectionTypeSchema, allow_none=True,)
connections = fields.List(fields.Nested(ConnectionTypeSchema), allow_none=True,)
notification_connections = fields.List(
fields.Nested(ConnectionTypeSchema), allow_none=True,
)
admin_models = fields.List(fields.Str(allow_none=True), allow_none=True)
repos_access_token = fields.Str(allow_none=True)
log_level = fields.Str(allow_none=True)
tracker_backend = fields.Str(allow_none=True)
security_context = fields.Nested(SecurityContextSchema, allow_none=True)
external_services = fields.Nested(ExternalServicesSchema, allow_none=True)
debug_mode = fields.Bool(allow_none=True)
# Pending validation
dns = fields.Raw(allow_none=True)
plugins = fields.Raw(allow_none=True)
@staticmethod
def schema_config():
return DeploymentConfig
@validates_schema
def validate_deployment(self, data, **kwargs):
check_redis(data.get("redis"), data.get("external_services"))
check_postgres(data.get("postgresql"), data.get("external_services"))
check_rabbitmq(
data.get("rabbitmq"), data.get("external_services"), data.get("broker")
)
class DeploymentConfig(BaseConfig):
SCHEMA = DeploymentSchema
def __init__(
self,
deployment_type=None,
deployment_version=None,
namespace=None,
rbac=None,
polyaxon_secret=None,
internal_token=None,
password_length=None,
ssl=None,
encryption_secret=None,
service_type=None,
admin_view_enabled=None,
timezone=None,
environment=None,
ingress=None,
user=None,
node_selector=None,
tolerations=None,
affinity=None,
limit_resources=None,
global_replicas=None,
global_concurrency=None,
gateway=None,
api=None,
streams=None,
scheduler=None,
worker=None,
beat=None,
agent=None,
operator=None,
init=None,
sidecar=None,
tables_hook=None,
hooks=None,
postgresql=None,
redis=None,
rabbitmq=None,
broker=None,
docker_registry=None,
email=None,
ldap=None,
image_pull_secrets=None,
host_name=None,
allowed_hosts=None,
intervals=None,
artifacts_store=None,
connections=None,
notification_connections=None,
admin_models=None,
repos_access_token=None,
log_level=None,
tracker_backend=None,
security_context=None,
external_services=None,
debug_mode=None,
dns=None,
plugins=None,
):
check_redis(redis, external_services)
check_postgres(postgresql, external_services)
check_rabbitmq(rabbitmq, external_services, broker)
self.deployment_type = deployment_type
self.deployment_version = deployment_version
self.namespace = namespace
self.rbac = rbac
self.polyaxon_secret = polyaxon_secret
self.internal_token = internal_token
self.password_length = password_<PASSWORD>
self.ssl = ssl
self.dns = dns
self.encryption_secret = encryption_secret
self.service_type = service_type
self.admin_view_enabled = admin_view_enabled
self.timezone = timezone
self.environment = environment
self.ingress = ingress
self.user = user
self.node_selector = node_selector
self.tolerations = tolerations
self.affinity = affinity
self.limit_resources = limit_resources
self.global_replicas = global_replicas
self.global_concurrency = global_concurrency
self.gateway = gateway
self.api = api
self.streams = streams
self.scheduler = scheduler
self.worker = worker
self.beat = beat
self.agent = agent
self.operator = operator
self.init = init
self.sidecar = sidecar
self.tables_hook = tables_hook
self.hooks = hooks
self.postgresql = postgresql
self.redis = redis
self.rabbitmq = rabbitmq
self.broker = broker
self.docker_registry = docker_registry
self.email = email
self.ldap = ldap
self.image_pull_secrets = image_pull_secrets
self.host_name = host_name
self.allowed_hosts = allowed_hosts
self.intervals = intervals
self.artifacts_store = artifacts_store
self.connections = connections
self.notification_connections = notification_connections
self.admin_models = admin_models
self.repos_access_token = repos_access_token
self.log_level = log_level
self.tracker_backend = tracker_backend
self.security_context = security_context
self.external_services = external_services
self.debug_mode = debug_mode
self.plugins = plugins
|
gregmbi/polyaxon | core/polyaxon/managers/client.py | <filename>core/polyaxon/managers/client.py
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from polyaxon.config_reader.manager import ConfigManager
from polyaxon.config_reader.spec import ConfigSpec
from polyaxon.containers.contexts import (
CONTEXT_TMP_POLYAXON_PATH,
CONTEXT_USER_POLYAXON_PATH,
)
from polyaxon.managers.base import BaseConfigManager
from polyaxon.schemas.cli.client_config import ClientConfig
class ClientConfigManager(BaseConfigManager):
"""Manages client configuration .polyaxonclient file."""
IS_GLOBAL = True
CONFIG_FILE_NAME = ".polyaxonclient"
CONFIG = ClientConfig
@classmethod
def get_config_from_env(cls, **kwargs) -> ClientConfig:
tmp_path = os.path.join(CONTEXT_TMP_POLYAXON_PATH, cls.CONFIG_FILE_NAME)
user_path = os.path.join(CONTEXT_USER_POLYAXON_PATH, cls.CONFIG_FILE_NAME)
config = ConfigManager.read_configs(
[
os.environ,
ConfigSpec(tmp_path, config_type=".json", check_if_exists=False),
ConfigSpec(user_path, config_type=".json", check_if_exists=False),
]
)
return ClientConfig.from_dict(config.data)
|
gregmbi/polyaxon | core/polyaxon/polyboard/events/paths.py | <filename>core/polyaxon/polyboard/events/paths.py
def get_resource_path(run_path: str, kind: str = None, name: str = None) -> str:
_path = "{}/resources".format(run_path)
if kind:
_path = "{}/{}".format(_path, kind)
if name:
_path = "{}/{}.plx".format(_path, name)
return _path
def get_event_path(run_path: str, kind: str = None, name: str = None) -> str:
_path = "{}/events".format(run_path)
if kind:
_path = "{}/{}".format(_path, kind)
if name:
_path = "{}/{}.plx".format(_path, name)
return _path
def get_asset_path(
run_path: str, kind: str = None, name: str = None, step: int = None, ext=None
) -> str:
_path = "{}/assets".format(run_path)
if kind:
_path = "{}/{}".format(_path, kind)
if name:
_path = "{}/{}".format(_path, name)
if step is not None:
_path = "{}_{}".format(_path, step)
if ext:
_path = "{}.{}".format(_path, ext)
return _path
|
gregmbi/polyaxon | core/polyaxon/lifecycle.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import polyaxon_sdk
from polyaxon.utils.tz_utils import now
V1Statuses = polyaxon_sdk.V1Statuses
class StatusColor:
GREEN = "#1aaa55"
RED = "#aa310f"
BLUE = "#2e77aa"
YELLOW = "#aa9e4a"
GREY = "#485563"
@classmethod
def get_color(cls, status: str) -> str:
if status in [
V1Statuses.FAILED,
V1Statuses.STOPPED,
V1Statuses.UPSTREAM_FAILED,
]:
return cls.RED
if status == V1Statuses.SUCCEEDED:
return cls.GREEN
if status == V1Statuses.SKIPPED:
return cls.GREY
if LifeCycle.is_done(status):
return cls.GREY
return cls.YELLOW
class LifeCycle:
CHOICES = (
(V1Statuses.CREATED, V1Statuses.CREATED),
(V1Statuses.RESUMING, V1Statuses.RESUMING),
(V1Statuses.WARNING, V1Statuses.WARNING),
(V1Statuses.UNSCHEDULABLE, V1Statuses.UNSCHEDULABLE),
(V1Statuses.QUEUED, V1Statuses.QUEUED),
(V1Statuses.SCHEDULED, V1Statuses.SCHEDULED),
(V1Statuses.STARTING, V1Statuses.STARTING),
(V1Statuses.RUNNING, V1Statuses.RUNNING),
(V1Statuses.SUCCEEDED, V1Statuses.SUCCEEDED),
(V1Statuses.FAILED, V1Statuses.FAILED),
(V1Statuses.UPSTREAM_FAILED, V1Statuses.UPSTREAM_FAILED),
(V1Statuses.STOPPING, V1Statuses.STOPPING),
(V1Statuses.STOPPED, V1Statuses.STOPPED),
(V1Statuses.SKIPPED, V1Statuses.SKIPPED),
(V1Statuses.RETRYING, V1Statuses.RETRYING),
(V1Statuses.UNKNOWN, V1Statuses.UNKNOWN),
)
VALUES = {
V1Statuses.CREATED,
V1Statuses.RESUMING,
V1Statuses.WARNING,
V1Statuses.UNSCHEDULABLE,
V1Statuses.QUEUED,
V1Statuses.SCHEDULED,
V1Statuses.STARTING,
V1Statuses.RUNNING,
V1Statuses.SUCCEEDED,
V1Statuses.FAILED,
V1Statuses.UPSTREAM_FAILED,
V1Statuses.STOPPING,
V1Statuses.STOPPED,
V1Statuses.SKIPPED,
V1Statuses.RETRYING,
V1Statuses.UNKNOWN,
}
WARNING_VALUES = {V1Statuses.UNSCHEDULABLE, V1Statuses.WARNING}
PENDING_VALUES = {
V1Statuses.CREATED,
V1Statuses.RESUMING,
V1Statuses.SCHEDULED,
}
SCHEDULABLE_VALUES = {
V1Statuses.CREATED,
V1Statuses.RESUMING,
V1Statuses.RETRYING,
}
RUNNING_VALUES = {V1Statuses.STARTING, V1Statuses.RUNNING}
DONE_VALUES = {
V1Statuses.FAILED,
V1Statuses.UPSTREAM_FAILED,
V1Statuses.STOPPED,
V1Statuses.SKIPPED,
V1Statuses.SUCCEEDED,
}
@classmethod
def can_check_heartbeat(cls, status: str) -> bool:
return status in LifeCycle.RUNNING_VALUES
@classmethod
def is_unschedulable(cls, status: str) -> bool:
return status == V1Statuses.UNSCHEDULABLE
@classmethod
def is_warning(cls, status: str) -> bool:
return status in cls.WARNING_VALUES
@classmethod
def is_pending(cls, status: str) -> bool:
return status in cls.PENDING_VALUES
@classmethod
def is_queued(cls, status: str) -> bool:
return status == V1Statuses.QUEUED
@classmethod
def is_starting(cls, status: str) -> bool:
return status == V1Statuses.STARTING
@classmethod
def is_running(cls, status: str) -> bool:
return status in LifeCycle.RUNNING_VALUES
@classmethod
def is_unknown(cls, status: str) -> bool:
return status == V1Statuses.UNKNOWN
@classmethod
def is_k8s_stoppable(cls, status: str) -> bool:
return (
cls.is_running(status=status)
or cls.is_unschedulable(status=status)
or cls.is_warning(status=status)
or cls.is_unknown(status=status)
)
@classmethod
def is_stoppable(cls, status: str) -> bool:
return not cls.is_done(status)
@classmethod
def is_stopping(cls, status: str) -> bool:
return status == V1Statuses.STOPPING
@classmethod
def is_done(cls, status: str) -> bool:
return status in cls.DONE_VALUES
@classmethod
def failed(cls, status: str) -> bool:
return status == V1Statuses.FAILED or status == V1Statuses.UPSTREAM_FAILED
@classmethod
def succeeded(cls, status: str) -> bool:
return status == V1Statuses.SUCCEEDED
@classmethod
def stopped(cls, status: str) -> bool:
return status == V1Statuses.STOPPED
@classmethod
def skipped(cls, status: str) -> bool:
return status == V1Statuses.SKIPPED
class V1StatusCondition(polyaxon_sdk.V1StatusCondition):
@classmethod
def get_condition(
cls,
type=None, # noqa
status=None,
last_update_time=None,
last_transition_time=None,
reason=None,
message=None,
) -> "V1StatusCondition":
current_time = now()
last_update_time = last_update_time or current_time
last_transition_time = last_transition_time or current_time
return cls(
type=type.lower() if type else type,
status=status,
last_update_time=last_update_time,
last_transition_time=last_transition_time,
reason=reason,
message=message,
)
def __eq__(self, other: "V1StatusCondition"):
return (
self.type == other.type
and self.status == other.status
and self.reason == self.reason
and self.message == self.message
)
|
gregmbi/polyaxon | core/tests/test_polyflow/test_workflows/test_matrix.py | <reponame>gregmbi/polyaxon<filename>core/tests/test_polyflow/test_workflows/test_matrix.py
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
from marshmallow.exceptions import ValidationError
from tests.utils import BaseTestCase
from polyaxon.polyflow.parallel.matrix import (
V1HpChoice,
V1HpGeomSpace,
V1HpLinSpace,
V1HpLogNormal,
V1HpLogSpace,
V1HpLogUniform,
V1HpNormal,
V1HpPChoice,
V1HpQLogNormal,
V1HpQLogUniform,
V1HpQNormal,
V1HpQUniform,
V1HpRange,
V1HpUniform,
)
from polyaxon.polytune.matrix.utils import (
get_length,
get_max,
get_min,
sample,
to_numpy,
)
@pytest.mark.workflow_mark
class TestMatrixConfigs(BaseTestCase):
def test_matrix_values_option(self):
config_dict = {"kind": "choice", "value": [1, 2, 3]}
config = V1HpChoice.from_dict(config_dict)
assert config.to_dict() == config_dict
assert to_numpy(config) == config_dict["value"]
assert sample(config) in [1, 2, 3]
assert get_length(config) == 3
assert config.is_categorical is False
assert config.is_distribution is False
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is True
assert config.is_continuous is False
assert get_min(config) == 1
assert get_max(config) == 3
config_dict["value"] = ["ok", "nook"]
config = V1HpChoice.from_dict(config_dict)
assert config.to_dict() == config_dict
assert to_numpy(config) == config_dict["value"]
assert sample(config) in ["ok", "nook"]
assert get_length(config) == 2
assert config.is_categorical is True
assert config.is_distribution is False
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is True
assert config.is_continuous is False
assert get_min(config) is None
assert get_max(config) is None
config_dict["value"] = [[1, 2], [2, 4]]
config = V1HpChoice.from_dict(config_dict)
assert config.to_dict() == config_dict
assert to_numpy(config) == config_dict["value"]
assert sample(config) in [[1, 2], [2, 4]]
assert get_length(config) == 2
assert config.is_categorical is True
assert config.is_distribution is False
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is True
assert config.is_continuous is False
assert get_min(config) is None
assert get_max(config) is None
def test_matrix_pchoice_option(self):
config_dict = {"kind": "pchoice", "value": [(1, 0.1), (2, 0.3), (3, 6)]}
with self.assertRaises(ValidationError):
V1HpPChoice.from_dict(config_dict)
config_dict["value"] = [(1, 0.1), (2, 0.3), (3, 0.8)]
with self.assertRaises(ValidationError):
V1HpPChoice.from_dict(config_dict)
config_dict["value"] = [(1, 0.1), (2, 0.3), (3, -0.6)]
with self.assertRaises(ValidationError):
V1HpPChoice.from_dict(config_dict)
config_dict["value"] = ["ok", "nook"]
with self.assertRaises(ValidationError):
V1HpPChoice.from_dict(config_dict)
# Pass for correct config
config_dict["value"] = [(1, 0.1), (2, 0.1), (3, 0.8)]
config = V1HpPChoice.from_dict(config_dict)
assert config.to_dict() == config_dict
with self.assertRaises(ValidationError):
to_numpy(config)
assert sample(config) in [1, 2, 3]
assert get_length(config) == 3
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is True
assert config.is_continuous is False
assert get_min(config) is None
assert get_max(config) is None
def test_matrix_range_option(self):
def assert_equal(config, v1, v2, v3):
result = {"start": v1, "stop": v2, "step": v3}
assert config.to_dict()["value"] == result
np.testing.assert_array_equal(to_numpy(config), np.arange(**result))
assert get_length(config) == len(np.arange(**result))
assert sample(config) in np.arange(**result)
assert config.is_categorical is False
assert config.is_distribution is False
assert config.is_range is True
assert config.is_uniform is False
assert config.is_discrete is True
assert config.is_continuous is False
assert get_min(config) == v1
assert get_max(config) == v2
# as list
config_dict = {"kind": "range", "value": [1, 2, 3]}
config = V1HpRange.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:10:1"
config = V1HpRange.from_dict(config_dict)
assert_equal(config, 0, 10, 1)
# as dict
config_dict["value"] = {"start": 1.2, "stop": 1.8, "step": 0.1}
config = V1HpRange.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_linspace_option(self):
def assert_equal(config, v1, v2, v3):
result = {"start": v1, "stop": v2, "num": v3}
assert config.to_dict()["value"] == result
np.testing.assert_array_equal(to_numpy(config), np.linspace(**result))
assert get_length(config) == len(np.linspace(**result))
assert sample(config) in np.linspace(**result)
assert config.is_categorical is False
assert config.is_distribution is False
assert config.is_range is True
assert config.is_uniform is False
assert config.is_discrete is True
assert config.is_continuous is False
assert get_min(config) == v1
assert get_max(config) == v2
# as list
config_dict = {"kind": "linspace", "value": [1, 2, 3]}
config = V1HpLinSpace.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:10:1"
config = V1HpLinSpace.from_dict(config_dict)
assert_equal(config, 0, 10, 1)
# as dict
config_dict["value"] = {"start": 1.2, "stop": 1.8, "num": 0.1}
config = V1HpLinSpace.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_geomspace_option(self):
def assert_equal(config, v1, v2, v3):
result = {"start": v1, "stop": v2, "num": v3}
assert config.to_dict()["value"] == result
np.testing.assert_array_equal(to_numpy(config), np.geomspace(**result))
assert get_length(config) == len(np.geomspace(**result))
assert sample(config) in np.geomspace(**result)
assert config.is_categorical is False
assert config.is_distribution is False
assert config.is_range is True
assert config.is_uniform is False
assert config.is_discrete is True
assert config.is_continuous is False
assert get_min(config) == v1
assert get_max(config) == v2
# as list
config_dict = {"kind": "geomspace", "value": [1, 2, 3]}
config = V1HpGeomSpace.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "1:10:1"
config = V1HpGeomSpace.from_dict(config_dict)
assert_equal(config, 1, 10, 1)
# as dict
config_dict["value"] = {"start": 1.2, "stop": 1.8, "num": 1}
config = V1HpGeomSpace.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_logspace_option(self):
def assert_equal(config, v1, v2, v3, v4=None):
result = {"start": v1, "stop": v2, "num": v3}
if v4:
result["base"] = v4
assert config.to_dict()["value"] == result
np.testing.assert_array_equal(to_numpy(config), np.logspace(**result))
assert get_length(config) == len(np.logspace(**result))
assert sample(config) in np.logspace(**result)
assert config.is_categorical is False
assert config.is_distribution is False
assert config.is_range is True
assert config.is_uniform is False
assert config.is_discrete is True
assert config.is_continuous is False
assert get_min(config) == v1
assert get_max(config) == v2
# as list
config_dict = {"kind": "logspace", "value": [1, 2, 3]}
config = V1HpLogSpace.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# with base
config_dict["value"] = [1, 2, 3, 2]
config = V1HpLogSpace.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:10:1"
config = V1HpLogSpace.from_dict(config_dict)
assert_equal(config, 0, 10, 1)
# with base
config_dict["value"] = "0:10:1:2"
config = V1HpLogSpace.from_dict(config_dict)
assert_equal(config, 0, 10, 1, 2)
# as dict
config_dict["value"] = {"start": 1.2, "stop": 1.8, "num": 0.1}
config = V1HpLogSpace.from_dict(config_dict)
assert config.to_dict() == config_dict
# with base
config_dict["value"] = {"start": 1.2, "stop": 1.8, "num": 0.1, "base": 2}
config = V1HpLogSpace.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_uniform_option(self):
def assert_equal(config, v1, v2, v3=None):
result = {"low": v1, "high": v2}
if v3:
result["size"] = v3
assert config.to_dict()["value"] == result
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
get_length(config)
assert v1 <= sample(config) <= v2
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is True
assert config.is_discrete is False
assert config.is_continuous is True
assert get_min(config) == v1
assert get_max(config) == v2
# as list
config_dict = {"kind": "uniform", "value": [0, 1]}
config = V1HpUniform.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:1"
config = V1HpUniform.from_dict(config_dict)
assert_equal(config, 0, 1)
# as dict
config_dict["value"] = {"low": 0, "high": 1}
config = V1HpUniform.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_quniform_option(self):
def assert_equal(config, v1, v2, q, v3=None):
result = {"low": v1, "high": v2, "q": q}
if v3:
result["size"] = v3
assert config.to_dict()["value"] == result
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
get_length(config)
assert isinstance(sample(config), float)
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is False
assert config.is_continuous is True
assert get_min(config) is None
assert get_max(config) is None
# as list
config_dict = {"kind": "quniform", "value": [0, 1, 0.1]}
config = V1HpQUniform.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:1:0.1"
config = V1HpQUniform.from_dict(config_dict)
assert_equal(config, 0, 1, 0.1)
# as dict
config_dict["value"] = {"low": 0, "high": 1, "q": 0.1}
config = V1HpQUniform.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_loguniform_option(self):
def assert_equal(config, v1, v2, v3=None):
result = {"low": v1, "high": v2}
if v3:
result["size"] = v3
assert config.to_dict()["value"] == result
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
get_length(config)
assert isinstance(sample(config), float)
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is False
assert config.is_continuous is True
assert get_min(config) is None
assert get_max(config) is None
# as list
config_dict = {"kind": "loguniform", "value": [0, 1]}
config = V1HpLogUniform.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:1"
config = V1HpLogUniform.from_dict(config_dict)
assert_equal(config, 0, 1)
# as dict
config_dict["value"] = {"low": 0, "high": 1}
config = V1HpLogUniform.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_qloguniform_option(self):
def assert_equal(config, v1, v2, q, v3=None):
result = {"low": v1, "high": v2, "q": q}
if v3:
result["size"] = v3
assert config.to_dict()["value"] == result
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
get_length(config)
assert isinstance(sample(config), float)
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is False
assert config.is_continuous is True
assert get_min(config) is None
assert get_max(config) is None
# as list
config_dict = {"kind": "qloguniform", "value": [0, 1, 0.1]}
config = V1HpQLogUniform.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:1:0.1"
config = V1HpQLogUniform.from_dict(config_dict)
assert_equal(config, 0, 1, 0.1)
# as dict
config_dict["value"] = {"low": 0, "high": 1, "q": 0.1}
config = V1HpQLogUniform.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_normal_option(self):
def assert_equal(config, v1, v2, v3=None):
result = {"loc": v1, "scale": v2}
if v3:
result["size"] = v3
assert config.to_dict()["value"] == result
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
get_length(config)
assert isinstance(sample(config), float)
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is False
assert config.is_continuous is True
assert get_min(config) is None
assert get_max(config) is None
# as list
config_dict = {"kind": "normal", "value": [0, 1]}
config = V1HpNormal.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:1"
config = V1HpNormal.from_dict(config_dict)
assert_equal(config, 0, 1)
# as dict
config_dict["value"] = {"loc": 0, "scale": 1}
config = V1HpNormal.from_dict(config_dict)
assert config.to_dict() == config_dict
# as list
config_dict["value"] = [66, 30]
config = V1HpNormal.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "66:30"
config = V1HpNormal.from_dict(config_dict)
assert_equal(config, 66, 30)
# as dict
config_dict["value"] = {"loc": 60, "scale": 30}
config = V1HpNormal.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_qnormal_option(self):
def assert_equal(config, v1, v2, q, v3=None):
result = {"loc": v1, "scale": v2, "q": q}
if v3:
result["size"] = v3
assert config.to_dict()["value"] == result
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
get_length(config)
assert isinstance(sample(config), float)
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is False
assert config.is_continuous is True
assert get_min(config) is None
assert get_max(config) is None
# as list
config_dict = {"kind": "qnormal", "value": [0, 1, 0.1]}
config = V1HpQNormal.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:1:0.1"
config = V1HpQNormal.from_dict(config_dict)
assert_equal(config, 0, 1, 0.1)
# as dict
config_dict["value"] = {"loc": 0, "scale": 1, "q": 0.1}
config = V1HpQNormal.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_lognormal_option(self):
def assert_equal(config, v1, v2, v3=None):
result = {"loc": v1, "scale": v2}
if v3:
result["size"] = v3
assert config.to_dict()["value"] == result
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
get_length(config)
assert isinstance(sample(config), float)
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is False
assert config.is_continuous is True
assert get_min(config) is None
assert get_max(config) is None
# as list
config_dict = {"kind": "lognormal", "value": [0, 1]}
config = V1HpLogNormal.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:1"
config = V1HpLogNormal.from_dict(config_dict)
assert_equal(config, 0, 1)
# as dict
config_dict["value"] = {"loc": 0, "scale": 1}
config = V1HpLogNormal.from_dict(config_dict)
assert config.to_dict() == config_dict
def test_matrix_qlognormal_option(self):
def assert_equal(config, v1, v2, q, v3=None):
result = {"loc": v1, "scale": v2, "q": q}
if v3:
result["size"] = v3
assert config.to_dict()["value"] == result
with self.assertRaises(ValidationError):
to_numpy(config)
with self.assertRaises(ValidationError):
get_length(config)
assert isinstance(sample(config), float)
assert config.is_categorical is False
assert config.is_distribution is True
assert config.is_range is False
assert config.is_uniform is False
assert config.is_discrete is False
assert config.is_continuous is True
assert get_min(config) is None
assert get_max(config) is None
# as list
config_dict = {"kind": "qlognormal", "value": [0, 1, 0.1]}
config = V1HpQLogNormal.from_dict(config_dict)
assert_equal(config, *config_dict["value"])
# as string
config_dict["value"] = "0:1:0.1"
config = V1HpQLogNormal.from_dict(config_dict)
assert_equal(config, 0, 1, 0.1)
# as dict
config_dict["value"] = {"loc": 0, "scale": 1, "q": 0.1}
config = V1HpQLogNormal.from_dict(config_dict)
assert config.to_dict() == config_dict
|
gregmbi/polyaxon | core/polyaxon/cli/admin.py | <filename>core/polyaxon/cli/admin.py
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import click
from polyaxon.cli.errors import handle_cli_error
from polyaxon.deploy import reader
from polyaxon.logger import clean_outputs
from polyaxon.managers.deploy import DeployManager
from polyaxon.utils.formatting import Printer
from polyaxon.utils.list_utils import to_list
def read_deployment_config(filepaths):
if not filepaths:
return None
filepaths = to_list(filepaths)
for filepath in filepaths:
if not os.path.isfile(filepath):
Printer.print_error("`{}` must be a valid file".format(filepath))
sys.exit(1)
try:
deployment_config = reader.read(filepaths)
except Exception as e:
handle_cli_error(e, message="Polyaxon deployment file is not valid.")
sys.exit(1)
return deployment_config
@click.group()
@clean_outputs
def admin():
"""Commands for admin management."""
@admin.command()
@click.option(
"-f",
"--file",
"config_file",
type=click.Path(exists=True),
help="The polyaxon deployment config file(s) to check.",
)
@click.option(
"--manager-path",
"--manager_path",
type=click.Path(exists=True),
help="The path of the deployment manager, e.g. local chart.",
)
@click.option(
"--check",
is_flag=True,
default=False,
help="Check if deployment file and other requirements are met.",
)
@click.option(
"--dry_run",
"--dry-run",
is_flag=True,
default=False,
help="Dry run the configuration and generate a debuggable output.",
)
@clean_outputs
def deploy(config_file, manager_path, check, dry_run):
"""Deploy polyaxon."""
config = read_deployment_config(config_file)
manager = DeployManager(
config=config, filepath=config_file, manager_path=manager_path, dry_run=dry_run
)
exception = None
if check:
try:
manager.check()
except Exception as e:
handle_cli_error(e, message="Polyaxon deployment file is not valid.")
sys.exit(1)
Printer.print_success("Polyaxon deployment file is valid.")
else:
try:
manager.install()
except Exception as e:
Printer.print_error("Polyaxon could not be installed.")
exception = e
if exception:
Printer.print_error("Error message: {}.".format(exception))
@admin.command()
@click.option(
"-f",
"--file",
"config_file",
type=click.Path(exists=True),
help="The polyaxon deployment config file(s) to check.",
)
@click.option(
"--manager-path",
"--manager_path",
type=click.Path(exists=True),
help="The path of the deployment manager, e.g. local chart.",
)
@click.option(
"--check",
is_flag=True,
default=False,
help="Check if deployment file and other requirements are met.",
)
@click.option(
"--dry_run",
"--dry-run",
is_flag=True,
default=False,
help="Dry run the configuration and generate a debuggable output.",
)
@clean_outputs
def upgrade(config_file, manager_path, check, dry_run):
"""Upgrade a Polyaxon deployment."""
config = read_deployment_config(config_file)
manager = DeployManager(
config=config, filepath=config_file, manager_path=manager_path, dry_run=dry_run
)
exception = None
if check:
try:
manager.check()
except Exception as e:
handle_cli_error(e, message="Polyaxon deployment file is not valid.")
sys.exit(1)
Printer.print_success("Polyaxon deployment file is valid.")
else:
try:
manager.upgrade()
except Exception as e:
Printer.print_error("Polyaxon could not upgrade the deployment.")
exception = e
if exception:
Printer.print_error("Error message: {}.".format(exception))
@admin.command()
@click.option(
"-f",
"--file",
"config_file",
type=click.Path(exists=True),
help="The polyaxon deployment config file(s) to check.",
)
@clean_outputs
def teardown(config_file):
"""Teardown a polyaxon deployment given a config file."""
config = read_deployment_config(config_file)
manager = DeployManager(config=config, filepath=config_file)
exception = None
try:
if click.confirm("Would you like to execute pre-delete hooks?", default=True):
manager.teardown(hooks=True)
else:
manager.teardown(hooks=False)
except Exception as e:
Printer.print_error("Polyaxon could not teardown the deployment.")
exception = e
if exception:
Printer.print_error("Error message: {}.".format(exception))
|
gregmbi/polyaxon | core/polyaxon/cli/executor/platform.py | <reponame>gregmbi/polyaxon<gh_stars>0
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from typing import Callable, List
import click
from polyaxon_sdk import V1OperationBody
from polyaxon_sdk.rest import ApiException
from urllib3.exceptions import HTTPError
from polyaxon.cli.errors import handle_cli_error
from polyaxon.cli.operations import logs as run_logs
from polyaxon.cli.operations import statuses
from polyaxon.cli.upload import upload as upload_cmd
from polyaxon.client import PolyaxonClient
from polyaxon.managers.run import RunManager
from polyaxon.polyflow import V1Operation
from polyaxon.utils import cache
from polyaxon.utils.formatting import Printer
def run(
ctx,
name: str,
owner: str,
project_name: str,
description: str,
tags: List[str],
op_spec: V1Operation,
upload: Callable,
log: bool,
watch: bool,
can_upload: bool,
):
def create_run():
click.echo("Creating a run.")
body = V1OperationBody(
name=name,
description=description,
tags=tags,
content=op_spec.to_dict(dump=True)
)
try:
polyaxon_client = PolyaxonClient()
response = polyaxon_client.runs_v1.create_run(owner, project_name, body)
config = polyaxon_client.sanitize_for_serialization(response)
cache.cache(config_manager=RunManager, config=config)
Printer.print_success("A new run `{}` was created".format(response.uuid))
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not create a run.")
sys.exit(1)
# Check if we need to upload
if upload:
if can_upload:
Printer.print_error(
"Uploading is not supported when switching project context!"
)
click.echo(
"Please, either omit the `-u` option or `-p` / `--project=` option."
)
sys.exit(1)
ctx.invoke(upload_cmd, sync=False)
create_run()
# Check if we need to invoke logs
if watch:
ctx.obj = {"project": "{}/{}".format(owner, project_name)}
ctx.invoke(statuses, watch=True)
# Check if we need to invoke logs
if log:
ctx.obj = {"project": "{}/{}".format(owner, project_name)}
ctx.invoke(run_logs)
|
gregmbi/polyaxon | core/polyaxon/constants.py | <filename>core/polyaxon/constants.py
UNKNOWN = "unknown"
DEFAULT = "default"
|
gregmbi/polyaxon | sdks/python/http_client/v1/polyaxon_sdk/models/v1_io.py | <reponame>gregmbi/polyaxon<gh_stars>0
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.0.79
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1IO(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"name": "str",
"description": "str",
"iotype": "str",
"value": "object",
"is_optional": "bool",
"is_list": "bool",
"is_flag": "bool",
"delay_validation": "bool",
"options": "list[object]",
}
attribute_map = {
"name": "name",
"description": "description",
"iotype": "iotype",
"value": "value",
"is_optional": "is_optional",
"is_list": "is_list",
"is_flag": "is_flag",
"delay_validation": "delay_validation",
"options": "options",
}
def __init__(
self,
name=None,
description=None,
iotype=None,
value=None,
is_optional=None,
is_list=None,
is_flag=None,
delay_validation=None,
options=None,
local_vars_configuration=None,
): # noqa: E501
"""V1IO - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._name = None
self._description = None
self._iotype = None
self._value = None
self._is_optional = None
self._is_list = None
self._is_flag = None
self._delay_validation = None
self._options = None
self.discriminator = None
if name is not None:
self.name = name
if description is not None:
self.description = description
if iotype is not None:
self.iotype = iotype
if value is not None:
self.value = value
if is_optional is not None:
self.is_optional = is_optional
if is_list is not None:
self.is_list = is_list
if is_flag is not None:
self.is_flag = is_flag
if delay_validation is not None:
self.delay_validation = delay_validation
if options is not None:
self.options = options
@property
def name(self):
"""Gets the name of this V1IO. # noqa: E501
:return: The name of this V1IO. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1IO.
:param name: The name of this V1IO. # noqa: E501
:type: str
"""
self._name = name
@property
def description(self):
"""Gets the description of this V1IO. # noqa: E501
:return: The description of this V1IO. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this V1IO.
:param description: The description of this V1IO. # noqa: E501
:type: str
"""
self._description = description
@property
def iotype(self):
"""Gets the iotype of this V1IO. # noqa: E501
:return: The iotype of this V1IO. # noqa: E501
:rtype: str
"""
return self._iotype
@iotype.setter
def iotype(self, iotype):
"""Sets the iotype of this V1IO.
:param iotype: The iotype of this V1IO. # noqa: E501
:type: str
"""
self._iotype = iotype
@property
def value(self):
"""Gets the value of this V1IO. # noqa: E501
:return: The value of this V1IO. # noqa: E501
:rtype: object
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this V1IO.
:param value: The value of this V1IO. # noqa: E501
:type: object
"""
self._value = value
@property
def is_optional(self):
"""Gets the is_optional of this V1IO. # noqa: E501
:return: The is_optional of this V1IO. # noqa: E501
:rtype: bool
"""
return self._is_optional
@is_optional.setter
def is_optional(self, is_optional):
"""Sets the is_optional of this V1IO.
:param is_optional: The is_optional of this V1IO. # noqa: E501
:type: bool
"""
self._is_optional = is_optional
@property
def is_list(self):
"""Gets the is_list of this V1IO. # noqa: E501
:return: The is_list of this V1IO. # noqa: E501
:rtype: bool
"""
return self._is_list
@is_list.setter
def is_list(self, is_list):
"""Sets the is_list of this V1IO.
:param is_list: The is_list of this V1IO. # noqa: E501
:type: bool
"""
self._is_list = is_list
@property
def is_flag(self):
"""Gets the is_flag of this V1IO. # noqa: E501
:return: The is_flag of this V1IO. # noqa: E501
:rtype: bool
"""
return self._is_flag
@is_flag.setter
def is_flag(self, is_flag):
"""Sets the is_flag of this V1IO.
:param is_flag: The is_flag of this V1IO. # noqa: E501
:type: bool
"""
self._is_flag = is_flag
@property
def delay_validation(self):
"""Gets the delay_validation of this V1IO. # noqa: E501
:return: The delay_validation of this V1IO. # noqa: E501
:rtype: bool
"""
return self._delay_validation
@delay_validation.setter
def delay_validation(self, delay_validation):
"""Sets the delay_validation of this V1IO.
:param delay_validation: The delay_validation of this V1IO. # noqa: E501
:type: bool
"""
self._delay_validation = delay_validation
@property
def options(self):
"""Gets the options of this V1IO. # noqa: E501
:return: The options of this V1IO. # noqa: E501
:rtype: list[object]
"""
return self._options
@options.setter
def options(self, options):
"""Sets the options of this V1IO.
:param options: The options of this V1IO. # noqa: E501
:type: list[object]
"""
self._options = options
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1IO):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1IO):
return True
return self.to_dict() != other.to_dict()
|
gregmbi/polyaxon | core/polyaxon/main.py | <gh_stars>0
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import click
import click_completion
from marshmallow import ValidationError
from polyaxon import settings
from polyaxon.cli.admin import admin
from polyaxon.cli.auth import login, logout, whoami
from polyaxon.cli.check import check
from polyaxon.cli.completion import completion
from polyaxon.cli.config import config
from polyaxon.cli.dashboard import dashboard
from polyaxon.cli.init import init
from polyaxon.cli.operations import ops
from polyaxon.cli.projects import project
from polyaxon.cli.run import run
from polyaxon.cli.upload import upload
from polyaxon.cli.version import check_cli_version, upgrade, version
from polyaxon.logger import clean_outputs, configure_logger
from polyaxon.managers.client import ClientConfigManager
click_completion.init()
@click.group()
@click.option(
"-v", "--verbose", is_flag=True, default=False, help="Turn on debug logging"
)
@click.option(
"--offline",
is_flag=True,
default=False,
help="Run command in offline mode if supported. "
"Currently used for run command in --local mode.",
)
@click.pass_context
@clean_outputs
def cli(context, verbose, offline):
""" Polyaxon CLI tool to:
* Parse, Validate, and Check Polyaxonfiles.
* Interact with Polyaxon server.
* Run and Monitor experiments.
Check the help available for each command listed below.
"""
try:
configure_logger(verbose or ClientConfigManager.get_value("debug"))
except ValidationError:
ClientConfigManager.purge()
non_check_cmds = [
"completion",
"config",
"version",
"login",
"logout",
"deploy",
"admin",
"teardown",
"docker",
"initializer",
"sidecar",
"proxy",
"notify",
]
context.obj = context.obj or {}
if not settings.CLIENT_CONFIG.client_header:
settings.CLIENT_CONFIG.set_cli_header()
context.obj["offline"] = offline
if offline:
os.environ["POLYAXON_IS_OFFLINE"] = "true"
settings.CLIENT_CONFIG.is_offline = True
if not (
context.invoked_subcommand in non_check_cmds
or offline
or settings.CLIENT_CONFIG.no_api
):
check_cli_version()
cli.add_command(login)
cli.add_command(logout)
cli.add_command(whoami)
cli.add_command(upgrade)
cli.add_command(version)
cli.add_command(config)
cli.add_command(check)
cli.add_command(init)
cli.add_command(project)
cli.add_command(ops)
cli.add_command(upload)
cli.add_command(run)
cli.add_command(dashboard)
cli.add_command(admin)
cli.add_command(completion)
if settings.CLIENT_CONFIG.is_ops:
from polyaxon.cli.components.agent import agent
from polyaxon.cli.components.docker import docker
from polyaxon.cli.components.initializer import initializer
from polyaxon.cli.components.notifier import notify
from polyaxon.cli.components.proxies import proxy
from polyaxon.cli.components.sidecar import sidecar
cli.add_command(agent)
cli.add_command(docker)
cli.add_command(initializer)
cli.add_command(sidecar)
cli.add_command(proxy)
cli.add_command(notify)
|
gregmbi/polyaxon | core/polyaxon/polyflow/run/kinds.py | <reponame>gregmbi/polyaxon
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import polyaxon_sdk
class V1RunKind(polyaxon_sdk.V1RunKind):
CHOICES = (
(polyaxon_sdk.V1RunKind.JOB, polyaxon_sdk.V1RunKind.JOB),
(polyaxon_sdk.V1RunKind.SERVICE, polyaxon_sdk.V1RunKind.SERVICE),
(polyaxon_sdk.V1RunKind.DAG, polyaxon_sdk.V1RunKind.DAG),
(polyaxon_sdk.V1RunKind.PARALLEL, polyaxon_sdk.V1RunKind.PARALLEL),
(polyaxon_sdk.V1RunKind.SPARK, polyaxon_sdk.V1RunKind.SPARK),
(polyaxon_sdk.V1RunKind.DASK, polyaxon_sdk.V1RunKind.DASK),
(polyaxon_sdk.V1RunKind.FLINK, polyaxon_sdk.V1RunKind.FLINK),
(polyaxon_sdk.V1RunKind.RAY, polyaxon_sdk.V1RunKind.RAY),
(polyaxon_sdk.V1RunKind.MPIJOB, polyaxon_sdk.V1RunKind.MPIJOB),
(polyaxon_sdk.V1RunKind.TFJOB, polyaxon_sdk.V1RunKind.TFJOB),
(polyaxon_sdk.V1RunKind.PYTORCHJOB, polyaxon_sdk.V1RunKind.PYTORCHJOB),
(polyaxon_sdk.V1RunKind.SCHEDULER, polyaxon_sdk.V1RunKind.SCHEDULER),
(polyaxon_sdk.V1RunKind.TUNER, polyaxon_sdk.V1RunKind.TUNER),
(polyaxon_sdk.V1RunKind.WATCHDOG, polyaxon_sdk.V1RunKind.WATCHDOG),
(polyaxon_sdk.V1RunKind.NOTIFIER, polyaxon_sdk.V1RunKind.NOTIFIER),
)
VALUES = {
polyaxon_sdk.V1RunKind.JOB,
polyaxon_sdk.V1RunKind.SERVICE,
polyaxon_sdk.V1RunKind.DAG,
polyaxon_sdk.V1RunKind.PARALLEL,
polyaxon_sdk.V1RunKind.SPARK,
polyaxon_sdk.V1RunKind.DASK,
polyaxon_sdk.V1RunKind.FLINK,
polyaxon_sdk.V1RunKind.RAY,
polyaxon_sdk.V1RunKind.MPIJOB,
polyaxon_sdk.V1RunKind.TFJOB,
polyaxon_sdk.V1RunKind.PYTORCHJOB,
polyaxon_sdk.V1RunKind.SCHEDULER,
polyaxon_sdk.V1RunKind.TUNER,
polyaxon_sdk.V1RunKind.WATCHDOG,
polyaxon_sdk.V1RunKind.NOTIFIER,
}
class V1CloningKind(polyaxon_sdk.V1CloningKind):
CHOICES = (
(polyaxon_sdk.V1CloningKind.COPY, polyaxon_sdk.V1CloningKind.COPY),
(polyaxon_sdk.V1CloningKind.RESTART, polyaxon_sdk.V1CloningKind.RESTART,),
(polyaxon_sdk.V1CloningKind.CACHE, polyaxon_sdk.V1CloningKind.CACHE,),
(polyaxon_sdk.V1CloningKind.SCHEDULE, polyaxon_sdk.V1CloningKind.SCHEDULE,),
)
VALUES = {
polyaxon_sdk.V1CloningKind.COPY,
polyaxon_sdk.V1CloningKind.RESTART,
polyaxon_sdk.V1CloningKind.CACHE,
polyaxon_sdk.V1CloningKind.SCHEDULE,
}
class V1PipelineKind(polyaxon_sdk.V1PipelineKind):
CHOICES = (
(polyaxon_sdk.V1PipelineKind.DAG, polyaxon_sdk.V1PipelineKind.DAG),
(polyaxon_sdk.V1PipelineKind.PARALLEL, polyaxon_sdk.V1PipelineKind.PARALLEL,),
)
VALUES = {
polyaxon_sdk.V1PipelineKind.DAG,
polyaxon_sdk.V1PipelineKind.PARALLEL,
}
|
gregmbi/polyaxon | core/polyaxon/streams/tasks/notification.py | <reponame>gregmbi/polyaxon
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
import ujson
from polyaxon import settings, types
from polyaxon.agents.spawners.async_spawner import AsyncSpawner
from polyaxon.containers.containers import get_default_notification_container
from polyaxon.lifecycle import V1StatusCondition
from polyaxon.logger import logger
from polyaxon.polyaxonfile import OperationSpecification
from polyaxon.polyflow import V1IO, V1Component, V1Operation, V1Plugins, V1Termination
from polyaxon.polyflow.run import V1Notifier
from polyaxon.polypod import compiler
async def notify_run(
namespace: str,
owner: str,
project: str,
run_uuid: str,
run_name: str,
condition: V1StatusCondition,
connections: List[str],
):
spawner = AsyncSpawner(namespace=namespace)
await spawner.k8s_manager.setup()
for connection in connections:
connection_type = settings.AGENT_CONFIG.notification_connections_by_names.get(
connection
)
if not connection_type:
logger.warning(
"Could not create notification using connection {}, "
"the connection was not found or not set correctly.".format(
connection_type
)
)
continue
operation = V1Operation(
params={
"kind": connection_type.kind,
"owner": owner,
"project": project,
"run_uuid": run_uuid,
"run_name": run_name,
"condition": ujson.dumps(condition.to_dict()),
},
termination=V1Termination(max_retries=3),
component=V1Component(
name="slack-notification",
plugins=V1Plugins(
auth=False,
collect_logs=False,
collect_artifacts=False,
collect_resources=False,
sync_statuses=False,
),
inputs=[
V1IO(name="kind", iotype=types.STR, is_optional=False),
V1IO(name="owner", iotype=types.STR, is_optional=False),
V1IO(name="project", iotype=types.STR, is_optional=False),
V1IO(name="run_uuid", iotype=types.STR, is_optional=False),
V1IO(name="run_name", iotype=types.STR, is_optional=True),
V1IO(name="condition", iotype=types.STR, is_optional=True),
V1IO(name="connection", iotype=types.STR, is_optional=True),
],
run=V1Notifier(
connections=[connection],
container=get_default_notification_container(),
),
),
)
compiled_operation = OperationSpecification.compile_operation(operation)
resource = compiler.make(
owner_name=owner,
project_name=project,
project_uuid=project,
run_uuid=run_uuid,
run_name=run_name,
run_path=run_uuid,
compiled_operation=compiled_operation,
params=operation.params,
)
await spawner.create(
run_uuid=run_uuid,
run_kind=compiled_operation.get_run_kind(),
resource=resource,
)
|
gregmbi/polyaxon | examples/in_cluster/save_resume_restart/pytorch_save_resume/utils.py | from __future__ import division
from __future__ import print_function
import logging
import torch
# Polyaxon
from polyaxon_client.tracking import get_outputs_path
logging.basicConfig(level=logging.INFO)
def get_weight_filename():
# Polyaxon
return '{}/{}'.format(get_outputs_path(), 'checkpoint.pth.tar')
def set_seed(seed, cuda):
# Seed for reproducibility
torch.manual_seed(seed)
if cuda:
torch.cuda.manual_seed(seed)
|
gregmbi/polyaxon | core/polyaxon/deploy/schemas/service.py | <filename>core/polyaxon/deploy/schemas/service.py<gh_stars>0
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from marshmallow import EXCLUDE, fields
from polyaxon.deploy.schemas.celery import CelerySchema
from polyaxon.k8s import k8s_schemas
from polyaxon.schemas.base import BaseCamelSchema, BaseConfig
from polyaxon.schemas.fields.swagger import SwaggerField
class ServiceSchema(BaseCamelSchema):
enabled = fields.Bool(allow_none=True)
image = fields.Str(allow_none=True)
image_tag = fields.Str(allow_none=True)
image_pull_policy = fields.Str(allow_none=True)
replicas = fields.Int(allow_none=True)
concurrency = fields.Int(allow_none=True)
resources = SwaggerField(cls=k8s_schemas.V1ResourceRequirements, allow_none=True)
class Meta:
unknown = EXCLUDE
@staticmethod
def schema_config():
return V1Service
class V1Service(BaseConfig):
SCHEMA = ServiceSchema
REDUCED_ATTRIBUTES = [
"enabled",
"image",
"imageTag",
"imagePullPolicy",
"replicas",
"concurrency",
"resources",
]
def __init__(
self,
enabled=None,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
concurrency=None,
resources=None,
):
self.enabled = enabled
self.image = image
self.image_tag = image_tag
self.image_pull_policy = image_pull_policy
self.replicas = replicas
self.concurrency = concurrency
self.resources = resources
class WorkerServiceSchema(ServiceSchema):
celery = fields.Nested(CelerySchema, allow_none=True)
@staticmethod
def schema_config():
return WorkerServiceConfig
class WorkerServiceConfig(V1Service):
SCHEMA = WorkerServiceSchema
REDUCED_ATTRIBUTES = V1Service.REDUCED_ATTRIBUTES + ["celery"]
def __init__(
self,
enabled=None,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
concurrency=None,
resources=None,
celery=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
concurrency=concurrency,
resources=resources,
)
self.celery = celery
class HelperServiceSchema(ServiceSchema):
sleepInterval = fields.Int(allow_none=True)
syncInterval = fields.Int(allow_none=True)
@staticmethod
def schema_config():
return HelperServiceConfig
class HelperServiceConfig(V1Service):
SCHEMA = HelperServiceSchema
REDUCED_ATTRIBUTES = V1Service.REDUCED_ATTRIBUTES + [
"sleepInterval",
"syncInterval",
]
def __init__(
self,
enabled=None,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
concurrency=None,
resources=None,
sleep_interval=None,
sync_interval=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
concurrency=concurrency,
resources=resources,
)
self.sleep_interval = sleep_interval
self.sync_interval = sync_interval
class AgentServiceSchema(ServiceSchema):
instance = fields.String(allow_none=True)
token = fields.String(allow_none=True)
@staticmethod
def schema_config():
return AgentServiceConfig
class AgentServiceConfig(V1Service):
SCHEMA = AgentServiceSchema
REDUCED_ATTRIBUTES = V1Service.REDUCED_ATTRIBUTES + ["instance", "token"]
def __init__(
self,
enabled=None,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
concurrency=None,
resources=None,
instance=None,
token=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
concurrency=concurrency,
resources=resources,
)
self.instance = instance
self.token = token
class ApiServiceSchema(ServiceSchema):
service = fields.Dict(allow_none=True)
@staticmethod
def schema_config():
return ApiServiceConfig
class ApiServiceConfig(V1Service):
SCHEMA = ApiServiceSchema
def __init__(
self,
enabled=None,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
concurrency=None,
resources=None,
service=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
concurrency=concurrency,
resources=resources,
)
self.service = service
class HooksSchema(ServiceSchema):
load_fixtures = fields.Bool(allow_none=True)
@staticmethod
def schema_config():
return HooksConfig
class HooksConfig(V1Service):
SCHEMA = HooksSchema
REDUCED_ATTRIBUTES = V1Service.REDUCED_ATTRIBUTES + ["loadFixtures"]
def __init__(
self,
enabled=None,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
concurrency=None,
resources=None,
load_fixtures=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
concurrency=concurrency,
resources=resources,
)
self.load_fixtures = load_fixtures
class ThirdPartyServiceSchema(ServiceSchema):
enabled = fields.Bool(allow_none=True)
persistence = fields.Dict(allow_none=True)
node_selector = fields.Dict(allow_none=True)
affinity = fields.Dict(allow_none=True)
tolerations = fields.List(fields.Dict(allow_none=True), allow_none=True)
@staticmethod
def schema_config():
return ThirdPartyV1Service
class ThirdPartyV1Service(V1Service):
SCHEMA = ThirdPartyServiceSchema
REDUCED_ATTRIBUTES = [
"enabled",
"image",
"imageTag",
"imagePullPolicy",
"replicas",
"concurrency",
"resources",
"persistence",
"nodeSelector",
"affinity",
"tolerations",
]
def __init__(
self,
enabled=None,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
resources=None,
persistence=None,
node_selector=None,
affinity=None,
tolerations=None,
):
super().__init__(
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
resources=resources,
)
self.enabled = enabled
self.persistence = persistence
self.node_selector = node_selector
self.affinity = affinity
self.tolerations = tolerations
class PostgresqlSchema(ThirdPartyServiceSchema):
postgres_user = fields.Str(allow_none=True)
postgres_password = fields.Str(allow_none=True)
postgres_database = fields.Str(allow_none=True)
conn_max_age = fields.Int(allow_none=True)
@staticmethod
def schema_config():
return PostgresqlConfig
class PostgresqlConfig(ThirdPartyV1Service):
SCHEMA = PostgresqlSchema
REDUCED_ATTRIBUTES = ThirdPartyV1Service.REDUCED_ATTRIBUTES + [
"postgresUser",
"<PASSWORD>",
"postgresDatabase",
"connMaxAge",
]
def __init__(
self,
enabled=None,
postgres_user=None,
postgres_password=<PASSWORD>,
postgres_database=None,
conn_max_age=None,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
resources=None,
persistence=None,
node_selector=None,
affinity=None,
tolerations=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
resources=resources,
persistence=persistence,
node_selector=node_selector,
affinity=affinity,
tolerations=tolerations,
)
self.postgres_user = postgres_user
self.postgres_password = <PASSWORD>
self.postgres_database = postgres_database
self.conn_max_age = conn_max_age
class RedisSchema(ThirdPartyServiceSchema):
image = fields.Raw(allow_none=True)
use_password = fields.Bool(allow_none=True)
password = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return RedisConfig
class RedisConfig(ThirdPartyV1Service):
SCHEMA = RedisSchema
REDUCED_ATTRIBUTES = ThirdPartyV1Service.REDUCED_ATTRIBUTES + [
"usePassword",
"password",
]
def __init__(
self,
enabled=None,
use_password=None,
password=<PASSWORD>,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
resources=None,
persistence=None,
node_selector=None,
affinity=None,
tolerations=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
resources=resources,
persistence=persistence,
node_selector=node_selector,
affinity=affinity,
tolerations=tolerations,
)
self.use_password = <PASSWORD>
self.password = password
class RabbitmqSchema(ThirdPartyServiceSchema):
rabbitmq_username = fields.Str(allow_none=True)
rabbitmq_password = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return RabbitmqConfig
class RabbitmqConfig(ThirdPartyV1Service):
SCHEMA = RabbitmqSchema
REDUCED_ATTRIBUTES = ThirdPartyV1Service.REDUCED_ATTRIBUTES + [
"rabbitmqUsername",
"rabbitmqPassword",
]
def __init__(
self,
enabled=None,
rabbitmq_username=None,
rabbitmq_password=<PASSWORD>,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
resources=None,
persistence=None,
node_selector=None,
affinity=None,
tolerations=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
resources=resources,
persistence=persistence,
node_selector=node_selector,
affinity=affinity,
tolerations=tolerations,
)
self.rabbitmq_username = rabbitmq_username
self.rabbitmq_password = <PASSWORD>
class DockerRegistrySchema(ThirdPartyServiceSchema):
registry_user = fields.Str(allow_none=True)
registry_password = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return DockerRegistryConfig
class DockerRegistryConfig(ThirdPartyV1Service):
SCHEMA = DockerRegistrySchema
REDUCED_ATTRIBUTES = ThirdPartyV1Service.REDUCED_ATTRIBUTES + [
"registryUser",
"registryPassword",
]
def __init__(
self,
enabled=None,
registry_user=None,
registry_password=<PASSWORD>,
image=None,
image_tag=None,
image_pull_policy=None,
replicas=None,
resources=None,
persistence=None,
node_selector=None,
affinity=None,
tolerations=None,
):
super().__init__(
enabled=enabled,
image=image,
image_tag=image_tag,
image_pull_policy=image_pull_policy,
replicas=replicas,
resources=resources,
persistence=persistence,
node_selector=node_selector,
affinity=affinity,
tolerations=tolerations,
)
self.registry_user = registry_user
self.registry_password = <PASSWORD>
class ExternalServiceSchema(BaseCamelSchema):
user = fields.Str(allow_none=True)
password = fields.Str(allow_none=True)
host = fields.Str(allow_none=True)
port = fields.Int(allow_none=True)
database = fields.Str(allow_none=True)
use_password = fields.Bool(allow_none=True)
conn_max_age = fields.Int(allow_none=True)
@staticmethod
def schema_config():
return ExternalV1Service
class ExternalV1Service(BaseConfig):
SCHEMA = ExternalServiceSchema
REDUCED_ATTRIBUTES = [
"user",
"password",
"host",
"port",
"database",
"usePassword",
"connMaxAge",
]
def __init__(
self,
user=None,
password=<PASSWORD>,
host=None,
port=None,
database=None,
use_password=None,
conn_max_age=None,
):
self.user = user
self.password = password
self.host = host
self.port = port
self.database = database
self.use_password = <PASSWORD>
self.conn_max_age = conn_max_age
class ExternalServicesSchema(BaseCamelSchema):
redis = fields.Nested(ExternalServiceSchema, allow_none=True)
rabbitmq = fields.Nested(ExternalServiceSchema, allow_none=True)
postgresql = fields.Nested(ExternalServiceSchema, allow_none=True)
@staticmethod
def schema_config():
return ExternalServicesConfig
class ExternalServicesConfig(BaseConfig):
SCHEMA = ExternalServicesSchema
REDUCED_ATTRIBUTES = ["redis", "rabbitmq", "postgresql"]
def __init__(self, redis=None, rabbitmq=None, postgresql=None):
self.redis = redis
self.rabbitmq = rabbitmq
self.postgresql = postgresql
|
gregmbi/polyaxon | core/polyaxon/polyflow/init/init_container.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import polyaxon_sdk
from marshmallow import ValidationError, fields, validates_schema
from polyaxon.containers.names import INIT_CONTAINER
from polyaxon.k8s import k8s_schemas
from polyaxon.schemas.base import BaseCamelSchema, BaseConfig
from polyaxon.schemas.fields.swagger import SwaggerField
from polyaxon.schemas.types import (
ArtifactsTypeSchema,
DockerfileTypeSchema,
GitTypeSchema,
)
class InitSchema(BaseCamelSchema):
artifacts = fields.Nested(ArtifactsTypeSchema, allow_none=True)
git = fields.Nested(GitTypeSchema, allow_none=True)
dockerfile = fields.Nested(DockerfileTypeSchema, allow_none=True)
connection = fields.Str(allow_none=True)
path = fields.Str(allow_none=True)
container = SwaggerField(
cls=k8s_schemas.V1Container,
defaults={"name": INIT_CONTAINER.format(random.randint(1, 100))},
allow_none=True,
)
@staticmethod
def schema_config():
return V1Init
@validates_schema
def validate_init(self, data, **kwargs):
artifacts = data.get("artifacts")
git = data.get("git")
dockerfile = data.get("dockerfile")
connection = data.get("connection")
schemas = 0
if artifacts:
schemas += 1
if git:
schemas += 1
if dockerfile:
schemas += 1
if schemas > 1:
raise ValidationError("One of artifacts, git, or dockerfile can be set")
if not connection and git and not git.url:
raise ValidationError(
"git field without a valid url requires a connection is required to be passed."
)
class V1Init(BaseConfig, polyaxon_sdk.V1Init):
IDENTIFIER = "init"
SCHEMA = InitSchema
REDUCED_ATTRIBUTES = [
"artifacts",
"git",
"dockerfile",
"connection",
"path",
"container",
]
def has_connection(self):
return any([self.connection, self.git, self.dockerfile, self.artifacts])
|
gregmbi/polyaxon | core/polyaxon/connections/azure/azure_blobstore.py | <gh_stars>0
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from concurrent import futures
from typing import Optional
from azure.common import AzureHttpError
from azure.storage.blob import BlockBlobService
from azure.storage.blob.models import BlobPrefix
from polyaxon.connections.azure.base import (
AzureService,
get_account_key,
get_account_name,
get_connection_string,
)
from polyaxon.connections.reader import get_connection_context_path
from polyaxon.exceptions import (
PolyaxonPathException,
PolyaxonSchemaError,
PolyaxonStoresException,
)
from polyaxon.parser import parser
from polyaxon.stores.base_store import StoreMixin
from polyaxon.utils.date_utils import file_modified_since
from polyaxon.utils.path_utils import (
append_basename,
check_dirname_exists,
get_files_in_path_context,
)
def get_blob_service_connection(
account_name=None,
account_key=None,
connection_string=None,
context_path: Optional[str] = None,
):
account_name = account_name or get_account_name(context_path=context_path)
account_key = account_key or get_account_key(context_path=context_path)
connection_string = connection_string or get_connection_string(
context_path=context_path
)
return BlockBlobService(
account_name=account_name,
account_key=account_key,
connection_string=connection_string,
)
class AzureBlobStoreService(AzureService, StoreMixin):
"""
Azure store Service.
"""
@property
def connection(self):
if self._connection is None:
self.set_connection(
account_name=self._account_name,
account_key=self._account_key,
connection_string=self._connection_string,
)
return self._connection
def set_connection(
self,
connection=None,
connection_type=None,
account_name=None,
account_key=None,
connection_string=None,
):
"""
Sets a new Blob service connection.
Args:
account_name: `str`. The storage account name.
account_key: `str`. The storage account key.
connection_string: `str`. If specified, this will override all other parameters besides
request session.
Returns:
BlockBlobService instance
"""
connection_type = connection_type or self._connection_type
connection_name = connection_type.name if connection_type else None
context_path = get_connection_context_path(name=connection_name)
self._connection = get_blob_service_connection(
account_name=account_name,
account_key=account_key,
connection_string=connection_string,
context_path=context_path,
)
def set_env_vars(self):
if self._account_name:
os.environ["AZURE_ACCOUNT_NAME"] = self._account_name
if self._account_key:
os.environ["AZURE_ACCOUNT_KEY"] = self._account_key
if self._connection_string:
os.environ["AZURE_CONNECTION_STRING"] = self._connection_string
@staticmethod
def parse_wasbs_url(wasbs_url):
"""
Parses and validates a wasbs url.
Returns:
tuple(container, storage_account, path).
"""
try:
spec = parser.parse_wasbs_path(wasbs_url)
return spec.container, spec.storage_account, spec.path
except PolyaxonSchemaError as e:
raise PolyaxonStoresException("Connection error: %s" % e) from e
def check_blob(self, blob, container_name=None):
"""
Checks if a blob exists.
Args:
blob: `str`. Name of existing blob.
container_name: `str`. Name of existing container.
Returns:
bool
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
try:
return self.connection.get_blob_properties(container_name, blob)
except AzureHttpError:
return None
def ls(self, path):
results = self.list(key=path)
return {"files": results["blobs"], "dirs": results["prefixes"]}
def list(self, key, container_name=None, path=None, delimiter="/", marker=None):
"""
Checks if a blob exists.
Args:
key: `str`. key prefix.
container_name: `str`. Name of existing container.
path: `str`. an extra path to append to the key.
delimiter: `str`. the delimiter marks key hierarchy.
marker: `str`. An opaque continuation token.
"""
if not container_name:
container_name, _, key = self.parse_wasbs_url(key)
if key and not key.endswith("/"):
key += "/"
prefix = key
if path:
prefix = os.path.join(prefix, path)
if prefix and not prefix.endswith("/"):
prefix += "/"
list_blobs = []
list_prefixes = []
while True:
results = self.connection.list_blobs(
container_name, prefix=prefix, delimiter=delimiter, marker=marker
)
for r in results:
if isinstance(r, BlobPrefix):
name = r.name[len(key) :]
list_prefixes.append(name)
else:
name = r.name[len(key) :]
list_blobs.append((name, r.properties.content_length))
if results.next_marker:
marker = results.next_marker
else:
break
return {"blobs": list_blobs, "prefixes": list_prefixes}
def upload_file(self, filename, blob, container_name=None, use_basename=True):
"""
Uploads a local file to Google Cloud Storage.
Args:
filename: `str`. the file to upload.
blob: `str`. blob to upload to.
container_name: `str`. the name of the container.
use_basename: `bool`. whether or not to use the basename of the filename.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
if use_basename:
blob = append_basename(blob, filename)
self.connection.create_blob_from_path(container_name, blob, filename)
def upload_dir(
self,
dirname,
blob,
container_name=None,
use_basename=True,
workers=0,
last_time=None,
):
"""
Uploads a local directory to to Google Cloud Storage.
Args:
dirname: `str`. name of the directory to upload.
blob: `str`. blob to upload to.
container_name: `str`. the name of the container.
use_basename: `bool`. whether or not to use the basename of the directory.
last_time: `datetime`. If provided will only upload the file if changed after last_time.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
if use_basename:
blob = append_basename(blob, dirname)
pool, future_results = self.init_pool(workers)
# Turn the path to absolute paths
dirname = os.path.abspath(dirname)
with get_files_in_path_context(dirname) as files:
for f in files:
# If last time is provided we check if we should re-upload the file
if last_time and not file_modified_since(
filepath=f, last_time=last_time
):
continue
file_blob = os.path.join(blob, os.path.relpath(f, dirname))
future_results = self.submit_pool(
workers=workers,
pool=pool,
future_results=future_results,
fn=self.upload_file,
filename=f,
blob=file_blob,
container_name=container_name,
use_basename=False,
)
if workers:
futures.wait(future_results)
self.close_pool(pool=pool)
def download_file(self, blob, local_path, container_name=None, use_basename=True):
"""
Downloads a file from Google Cloud Storage.
Args:
blob: `str`. blob to download.
local_path: `str`. the path to download to.
container_name: `str`. the name of the container.
use_basename: `bool`. whether or not to use the basename of the blob.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
local_path = os.path.abspath(local_path)
if use_basename:
local_path = append_basename(local_path, blob)
try:
check_dirname_exists(local_path)
except PolyaxonPathException as e:
raise PolyaxonStoresException("Connection error: %s" % e) from e
try:
self.connection.get_blob_to_path(container_name, blob, local_path)
except AzureHttpError as e:
raise PolyaxonStoresException("Connection error: %s" % e) from e
def download_dir(
self, blob, local_path, container_name=None, use_basename=True, workers=0
):
"""
Download a directory from Google Cloud Storage.
Args:
blob: `str`. blob to download.
local_path: `str`. the path to download to.
container_name: `str`. the name of the container.
use_basename: `bool`. whether or not to use the basename of the key.
workers: number of workers threads to use for parallel execution.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
local_path = os.path.abspath(local_path)
if use_basename:
local_path = append_basename(local_path, blob)
try:
check_dirname_exists(local_path, is_dir=True)
except PolyaxonPathException:
os.makedirs(local_path)
results = self.list(container_name=container_name, key=blob, delimiter="/")
# Create directories
for prefix in sorted(results["prefixes"]):
direname = os.path.join(local_path, prefix)
prefix = os.path.join(blob, prefix)
# Download files under
self.download_dir(
blob=prefix,
local_path=direname,
container_name=container_name,
use_basename=False,
)
pool, future_results = self.init_pool(workers)
# Download files
for file_key in results["blobs"]:
file_key = file_key[0]
filename = os.path.join(local_path, file_key)
file_key = os.path.join(blob, file_key)
future_results = self.submit_pool(
workers=workers,
pool=pool,
future_results=future_results,
fn=self.download_file,
blob=file_key,
local_path=filename,
container_name=container_name,
use_basename=False,
)
if workers:
futures.wait(future_results)
self.close_pool(pool=pool)
def delete(self, blob, container_name=None, workers=0):
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
results = self.list(container_name=container_name, key=blob, delimiter="/")
if not any([results["prefixes"], results["blobs"]]):
self.delete_file(blob=blob, container_name=container_name)
# Delete directories
for prefix in sorted(results["prefixes"]):
prefix = os.path.join(blob, prefix)
# Download files under
self.delete(blob=prefix, container_name=container_name)
pool, future_results = self.init_pool(workers)
# Delete files
for file_key in results["blobs"]:
file_key = file_key[0]
file_key = os.path.join(blob, file_key)
future_results = self.submit_pool(
workers=workers,
pool=pool,
future_results=future_results,
fn=self.delete_file,
blob=file_key,
container_name=container_name,
)
if workers:
futures.wait(future_results)
self.close_pool(pool=pool)
def delete_file(self, blob, container_name=None):
"""
Deletes if a blob exists.
Args:
blob: `str`. Name of existing blob.
container_name: `str`. Name of existing container.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
try:
self.connection.delete_blob(container_name, blob)
except AzureHttpError:
pass
|
gregmbi/polyaxon | core/polyaxon/cli/operations.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import click
from polyaxon_sdk.rest import ApiException
from urllib3.exceptions import HTTPError
from polyaxon.cli.errors import handle_cli_error
from polyaxon.cli.upload import upload
from polyaxon.client import RunClient
from polyaxon.client.run import get_run_logs
from polyaxon.config_reader import reader
from polyaxon.env_vars.getters import get_project_or_local, get_project_run_or_local
from polyaxon.exceptions import PolyaxonClientException
from polyaxon.logger import clean_outputs
from polyaxon.managers.run import RunManager
from polyaxon.utils import cache
from polyaxon.utils.formatting import (
Printer,
dict_tabulate,
dict_to_tabulate,
get_meta_response,
get_runs_with_keys,
list_dicts_to_tabulate,
)
from polyaxon.utils.validation import validate_tags
def get_run_details(run): # pylint:disable=redefined-outer-name
if run.description:
Printer.print_header("Run description:")
click.echo("{}\n".format(run.description))
if run.inputs:
Printer.print_header("Run inputs:")
dict_tabulate(run.inputs)
if run.outputs:
Printer.print_header("Run outputs:")
dict_tabulate(run.outputs)
response = Printer.add_status_color(run.to_dict())
response = dict_to_tabulate(
response,
humanize_values=True,
exclude_attrs=[
"project",
"description",
"readme",
"content",
"inputs",
"outputs",
"is_managed",
],
)
Printer.print_header("Run info:")
dict_tabulate(response)
@click.group()
@click.option(
"--project", "-p", type=str, help="The project name, e.g. 'mnist' or 'adam/mnist'."
)
@click.option("--uid", "-uid", type=str, help="The run uuid.")
@click.pass_context
@clean_outputs
def ops(ctx, project, uid):
"""Commands for ops/runs."""
ctx.obj = ctx.obj or {}
ctx.obj["project"] = project
if ctx.invoked_subcommand not in ["ls"]:
ctx.obj["run_uuid"] = uid
@ops.command()
@click.option(
"--io",
"-io",
is_flag=True,
help="List runs with their inputs/outputs (params, metrics, results, ...).",
)
@click.option(
"--query", "-q", type=str, help="To filter the runs based on this query spec."
)
@click.option("--sort", "-s", type=str, help="To change order by of the runs.")
@click.option("--limit", type=int, help="To limit the list of runs.")
@click.option("--offset", type=int, help="To offset the list of runs.")
@click.pass_context
@clean_outputs
@clean_outputs
def ls(ctx, io, query, sort, limit, offset):
"""List runs for this project.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
Get all runs:
\b
```bash
$ polyaxon project runs
```
Get all runs with with status {created or running}, and
creation date between 2018-01-01 and 2018-01-02, and params activation equal to sigmoid
and metric loss less or equal to 0.2
\b
```bash
$ polyaxon project runs \
-q "status:created|running, started_at:2018-01-01..2018-01-02, \
params.activation:sigmoid, metric.loss:<=0.2"
```
Get all runs sorted by update date
\b
```bash
$ polyaxon project runs -s "-updated_at"
```
"""
owner, project_name = get_project_or_local(ctx.obj.get("project"), is_cli=True)
try:
polyaxon_client = RunClient(owner=owner, project=project_name)
response = polyaxon_client.list(
limit=limit, offset=offset, query=query, sort=sort
)
except (ApiException, HTTPError) as e:
handle_cli_error(
e, message="Could not get runs for project `{}`.".format(project_name)
)
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header(
"Experiments for project `{}/{}`.".format(owner, project_name)
)
Printer.print_header("Navigation:")
dict_tabulate(meta)
else:
Printer.print_header(
"No runs found for project `{}/{}`.".format(owner, project_name)
)
objects = [Printer.add_status_color(o.to_dict()) for o in response.results]
if io:
objects = get_runs_with_keys(objects=objects, params_keys=["inputs", "outputs"])
objects = list_dicts_to_tabulate(
objects,
exclude_attrs=[
"owner",
"project",
"description",
"content",
"deleted",
"readme",
"kind",
],
)
else:
objects = list_dicts_to_tabulate(
objects,
exclude_attrs=[
"owner",
"project",
"description",
"content",
"deleted",
"readme",
"inputs",
"outputs",
"kind",
],
)
if objects:
Printer.print_header("Runs:")
objects.pop("project_name", None)
dict_tabulate(objects, is_list_dict=True)
@ops.command()
@click.pass_context
@clean_outputs
def get(ctx):
"""Get run.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples for getting a run:
\b
```bash
$ polyaxon runs get # if run is cached
```
\b
```bash
$ polyaxon runs --uid=8aac02e3a62a4f0aaa257c59da5eab80 get # project is cached
```
\b
```bash
$ polyaxon runs --project=cats-vs-dogs -id 8aac02e3a62a4f0aaa257c59da5eab80 get
```
\b
```bash
$ polyaxon runs -p alain/cats-vs-dogs --uid=8aac02e3a62a4f0aaa257c59da5eab80 get
```
"""
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
try:
polyaxon_client = RunClient(
owner=owner, project=project_name, run_uuid=run_uuid
)
polyaxon_client.refresh_data()
config = polyaxon_client.client.sanitize_for_serialization(
polyaxon_client.run_data
)
cache.cache(config_manager=RunManager, config=config)
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not load run `{}` info.".format(run_uuid))
sys.exit(1)
get_run_details(polyaxon_client.run_data)
@ops.command()
@click.pass_context
@clean_outputs
def delete(ctx):
"""Delete a run.
Uses [Caching](/references/polyaxon-cli/#caching)
Example:
\b
```bash
$ polyaxon runs delete
```
\b
```bash
$ polyaxon runs --uid=8aac02e3a62a4f0aaa257c59da5eab80 delete # project is cached
```
\b
```bash
$ polyaxon runs --project=cats-vs-dogs -id 8aac02e3a62a4f0aaa257c59da5eab80 delete
```
"""
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
if not click.confirm("Are sure you want to delete run `{}`".format(run_uuid)):
click.echo("Existing without deleting the run.")
sys.exit(1)
try:
polyaxon_client = RunClient(
owner=owner, project=project_name, run_uuid=run_uuid
)
polyaxon_client.delete()
# Purge caching
RunManager.purge()
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not delete run `{}`.".format(run_uuid))
sys.exit(1)
Printer.print_success("Run `{}` was delete successfully".format(run_uuid))
@ops.command()
@click.option("--name", type=str, help="Name of the run (optional).")
@click.option("--description", type=str, help="Description of the run (optional).")
@click.option(
"--tags", type=str, help="Tags of the run, comma separated values (optional)."
)
@click.pass_context
@clean_outputs
def update(ctx, name, description, tags):
"""Update run.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon runs --uid=8aac02e3a62a4f0aaa257c59da5eab80 update
--description="new description for my runs"
```
\b
```bash
$ polyaxon runs --project=cats-vs-dogs -id 8aac02e3a62a4f0aaa257c59da5eab80 update
--tags="foo, bar" --name="unique-name"
```
"""
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
update_dict = {}
if name:
update_dict["name"] = name
if description:
update_dict["description"] = description
tags = validate_tags(tags)
if tags:
update_dict["tags"] = tags
if not update_dict:
Printer.print_warning("No argument was provided to update the run.")
sys.exit(0)
try:
polyaxon_client = RunClient(
owner=owner, project=project_name, run_uuid=run_uuid
)
response = polyaxon_client.update(update_dict)
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not update run `{}`.".format(run_uuid))
sys.exit(1)
Printer.print_success("Run updated.")
get_run_details(response)
@ops.command()
@click.option(
"--yes",
"-y",
is_flag=True,
default=False,
help="Automatic yes to prompts. "
'Assume "yes" as answer to all prompts and run non-interactively.',
)
@click.pass_context
@clean_outputs
def stop(ctx, yes):
"""Stop run.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon runs stop
```
\b
```bash
$ polyaxon runs --uid=8aac02e3a62a4f0aaa257c59da5eab80 stop
```
"""
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
if not yes and not click.confirm(
"Are sure you want to stop " "run `{}`".format(run_uuid)
):
click.echo("Existing without stopping run.")
sys.exit(0)
try:
polyaxon_client = RunClient(
owner=owner, project=project_name, run_uuid=run_uuid
)
polyaxon_client.stop()
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not stop run `{}`.".format(run_uuid))
sys.exit(1)
Printer.print_success("Run is being stopped.")
@ops.command()
@click.option(
"--copy",
"-c",
is_flag=True,
default=False,
help="To copy the run before restarting.",
)
@click.option(
"-f",
"--file",
"polyaxonfile",
multiple=True,
type=click.Path(exists=True),
help="The polyaxonfiles to update with.",
)
@click.option(
"-u", is_flag=True, default=False, help="To upload the repo before restarting."
)
@click.pass_context
@clean_outputs
def restart(ctx, copy, polyaxonfile, u):
"""Restart run.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon run --uid=8aac02e3a62a4f0aaa257c59da5eab80 restart
```
"""
content = None
if polyaxonfile:
content = "{}".format(reader.read(polyaxonfile))
# Check if we need to upload
if u:
ctx.invoke(upload, sync=False)
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
try:
polyaxon_client = RunClient(
owner=owner, project=project_name, run_uuid=run_uuid
)
response = polyaxon_client.restart(override_config=content, copy=copy)
Printer.print_success(
"Run was {} with uid {}".format(
"copied" if copy else "restarted", response.uuid
)
)
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not restart run `{}`.".format(run_uuid))
sys.exit(1)
@ops.command()
@click.option(
"-f",
"--file",
"polyaxonfile",
multiple=True,
type=click.Path(exists=True),
help="The polyaxonfiles to update with.",
)
@click.option(
"-u", is_flag=True, default=False, help="To upload the repo before resuming."
)
@click.pass_context
@clean_outputs
def resume(ctx, polyaxonfile, u):
"""Resume run.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon runs --uid=8aac02e3a62a4f0aaa257c59da5eab80 resume
```
"""
content = None
if polyaxonfile:
content = "{}".format(reader.read(polyaxonfile))
# Check if we need to upload
if u:
ctx.invoke(upload, sync=False)
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
try:
polyaxon_client = RunClient(
owner=owner, project=project_name, run_uuid=run_uuid
)
response = polyaxon_client.resume(override_config=content)
Printer.print_success("Run was resumed with uid {}".format(response.uuid))
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not resume run `{}`.".format(run_uuid))
sys.exit(1)
@ops.command()
@click.pass_context
@clean_outputs
def invalidate_run(ctx):
"""Invalidate runs' cache inside this project.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon invalidate_builds
```
"""
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
try:
polyaxon_client = RunClient(
owner=owner, project=project_name, run_uuid=run_uuid
)
response = polyaxon_client.invalidate()
Printer.print_success("Run was invalidated with uid {}".format(response.uuid))
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not invalidate run `{}`.".format(run_uuid))
sys.exit(1)
@ops.command()
@click.option("--watch", "-w", is_flag=True, help="Watch statuses.")
@click.pass_context
@clean_outputs
def statuses(ctx, watch):
"""Get run or run job statuses.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples getting run statuses:
\b
```bash
$ polyaxon runs statuses
```
\b
```bash
$ polyaxon runs -uid=8aac02e3a62a4f0aaa257c59da5eab80 statuses
```
"""
def _handle_run_statuses():
if not conditions:
return
Printer.print_header("Latest status:")
latest_status = Printer.add_status_color(
{"status": status}, status_key="status"
)
click.echo("{}\n".format(latest_status["status"]))
objects = list_dicts_to_tabulate(
[
Printer.add_status_color(o.to_dict(), status_key="type")
for o in conditions
]
)
if objects:
Printer.print_header("Conditions:")
dict_tabulate(objects, is_list_dict=True)
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
client = RunClient(owner=owner, project=project_name, run_uuid=run_uuid)
if watch:
try:
for status, conditions in client.watch_statuses():
_handle_run_statuses()
except (ApiException, HTTPError, PolyaxonClientException) as e:
handle_cli_error(
e, message="Could get status for run `{}`.".format(run_uuid)
)
sys.exit(1)
else:
try:
status, conditions = client.get_statuses()
_handle_run_statuses()
except (ApiException, HTTPError, PolyaxonClientException) as e:
handle_cli_error(
e, message="Could get status for run `{}`.".format(run_uuid)
)
sys.exit(1)
# @ops.command()
# @click.option("--gpu", "-g", is_flag=True, help="List run GPU resources.")
# @click.pass_context
# @clean_outputs
# def resources(ctx, gpu):
# """Get run or run job resources.
#
# Uses [Caching](/references/polyaxon-cli/#caching)
#
# Examples for getting run resources:
#
# \b
# ```bash
# $ polyaxon runs -uid=8aac02e3a62a4f0aaa257c59da5eab80 resources
# ```
#
# For GPU resources
#
# \b
# ```bash
# $ polyaxon runs -uid=8aac02e3a62a4f0aaa257c59da5eab80 resources --gpu
# ```
# """
#
# def get_run_resources():
# try:
# message_handler = Printer.gpu_resources if gpu else Printer.resources
# PolyaxonClient().run.resources(
# owner, project_name, run_uuid, message_handler=message_handler
# )
# except (ApiException, HTTPError) as e:
# handle_cli_error(
# e, message="Could not get resources for run `{}`.".format(run_uuid)
# )
# sys.exit(1)
#
# owner, project_name, run_uuid = get_project_run_or_local(
# ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
# )
#
# get_run_resources()
@ops.command()
@click.option(
"--follow",
"-f",
is_flag=True,
default=True,
help="Stream logs after showing past logs.",
)
@click.option(
"--hide_time",
"--hide-time",
is_flag=True,
default=False,
help="Whether or not to hide timestamps from the log stream.",
)
@click.option(
"--all_info",
"--all-info",
is_flag=True,
default=False,
help="Whether to stream logs from all containers.",
)
@click.pass_context
@clean_outputs
def logs(ctx, follow, hide_time, all_info):
"""Get run or run job logs.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples for getting run logs:
\b
```bash
$ polyaxon run logs
```
\b
```bash
$ polyaxon runs -uid=8aac02e3a62a4f0aaa257c59da5eab80 -p mnist logs
```
"""
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
client = RunClient(owner=owner, project=project_name, run_uuid=run_uuid)
get_run_logs(
client=client, hide_time=hide_time, all_info=all_info, follow=follow,
)
@ops.command()
@click.pass_context
@clean_outputs
def artifacts(ctx):
"""Download outputs/artifacts for run.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
\b
```bash
$ polyaxon runs -uid=8aac02e3a62a4f0aaa257c59da5eab80 artifacts
```
"""
owner, project_name, run_uuid = get_project_run_or_local(
ctx.obj.get("project"), ctx.obj.get("run_uuid"), is_cli=True
)
try:
client = RunClient(owner=owner, project=project_name, run_uuid=run_uuid)
client.download_artifacts()
except (ApiException, HTTPError) as e:
handle_cli_error(
e, message="Could not download outputs for run `{}`.".format(run_uuid)
)
sys.exit(1)
Printer.print_success("Files downloaded.")
|
gregmbi/polyaxon | core/polyaxon/connections/schemas/connections.py | <filename>core/polyaxon/connections/schemas/connections.py
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import polyaxon_sdk
from marshmallow import ValidationError, fields
from polyaxon.connections.kinds import V1ConnectionKind
from polyaxon.schemas.base import BaseCamelSchema, BaseConfig, BaseOneOfSchema
class BucketConnectionSchema(BaseCamelSchema):
bucket = fields.Str(required=True)
@staticmethod
def schema_config():
return V1BucketConnection
class V1BucketConnection(BaseConfig, polyaxon_sdk.V1BucketConnection):
SCHEMA = BucketConnectionSchema
IDENTIFIER = "bucket"
def patch(self, schema: "V1BucketConnection"):
self.bucket = schema.bucket or self.bucket
class ClaimConnectionSchema(BaseCamelSchema):
volume_claim = fields.Str(required=True)
mount_path = fields.Str(required=True)
read_only = fields.Bool(allow_none=True)
@staticmethod
def schema_config():
return V1ClaimConnection
class V1ClaimConnection(BaseConfig, polyaxon_sdk.V1ClaimConnection):
SCHEMA = ClaimConnectionSchema
IDENTIFIER = "volume_claim"
def patch(self, schema: "V1ClaimConnection"):
self.volume_claim = schema.volume_claim or self.volume_claim
self.mount_path = schema.mount_path or self.mount_path
self.read_only = schema.read_only or self.read_only
class HostPathConnectionSchema(BaseCamelSchema):
host_path = fields.Str(required=True)
mount_path = fields.Str(required=True)
read_only = fields.Bool(allow_none=True)
@staticmethod
def schema_config():
return V1HostPathConnection
class V1HostPathConnection(BaseConfig, polyaxon_sdk.V1HostPathConnection):
SCHEMA = HostPathConnectionSchema
IDENTIFIER = "host_path"
def patch(self, schema: "V1HostPathConnection"):
self.host_path = schema.host_path or self.host_path
self.mount_path = schema.mount_path or self.mount_path
self.read_only = schema.read_only or self.read_only
class HostConnectionSchema(BaseCamelSchema):
url = fields.Str(required=True)
insecure = fields.Bool(allow_none=True)
@staticmethod
def schema_config():
return V1HostConnection
class V1HostConnection(BaseConfig, polyaxon_sdk.V1HostConnection):
SCHEMA = HostConnectionSchema
IDENTIFIER = "host"
def patch(self, schema: "V1HostConnection"):
self.url = schema.url or self.url
self.insecure = schema.insecure or self.insecure
class GitConnectionSchema(BaseCamelSchema):
url = fields.Str(allow_none=True)
revision = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return V1GitConnection
class V1GitConnection(BaseConfig, polyaxon_sdk.V1GitConnection):
SCHEMA = GitConnectionSchema
IDENTIFIER = "git"
REDUCED_ATTRIBUTES = ["url", "revision"]
def get_name(self):
if self.url:
return self.url.split("/")[-1].split(".")[0]
return None
def patch(self, schema: "GitConnectionSchema"):
self.url = schema.url or self.url
self.revision = schema.revision or self.revision
def validate_connection(kind, definition):
if kind not in V1ConnectionKind.VALUES:
raise ValidationError("Connection with kind {} is not supported.".format(kind))
if kind in V1ConnectionKind.BLOB_VALUES:
V1BucketConnection.from_dict(definition)
if kind == V1ConnectionKind.VOLUME_CLAIM:
V1ClaimConnection.from_dict(definition)
if kind == V1ConnectionKind.HOST_PATH:
V1HostPathConnection.from_dict(definition)
if kind == V1ConnectionKind.REGISTRY:
V1HostConnection.from_dict(definition)
if kind == V1ConnectionKind.GIT:
V1GitConnection.from_dict(definition)
class ConnectionSchema(BaseOneOfSchema):
TYPE_FIELD = "kind"
TYPE_FIELD_REMOVE = True
SCHEMAS = {
V1BucketConnection.IDENTIFIER: BucketConnectionSchema,
V1ClaimConnection.IDENTIFIER: ClaimConnectionSchema,
V1HostPathConnection.IDENTIFIER: HostPathConnectionSchema,
V1HostConnection.IDENTIFIER: HostConnectionSchema,
V1GitConnection.IDENTIFIER: GitConnectionSchema,
}
|
gregmbi/polyaxon | core/polyaxon/sidecar/__init__.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from kubernetes.client.rest import ApiException
from polyaxon.client import RunClient
from polyaxon.env_vars.getters import get_run_info
from polyaxon.exceptions import PolyaxonClientException, PolyaxonContainerException
from polyaxon.k8s.manager import K8SManager
from polyaxon.k8s.monitor import is_pod_running
from polyaxon.logger import logger
from polyaxon.settings import CLIENT_CONFIG
from polyaxon.sidecar.intervals import get_sync_interval
from polyaxon.sidecar.logging import sync_logs
from polyaxon.sidecar.outputs import sync_artifacts, sync_summaries
def start_sidecar(
container_id: str,
sleep_interval: int,
sync_interval: int,
monitor_outputs: bool,
monitor_logs: bool,
):
sync_interval = get_sync_interval(
interval=sync_interval, sleep_interval=sleep_interval
)
try:
owner, project, run_uuid = get_run_info()
except PolyaxonClientException as e:
raise PolyaxonContainerException(e)
client = RunClient(owner=owner, project=project, run_uuid=run_uuid)
pod_id = CLIENT_CONFIG.pod_id
if not pod_id:
raise PolyaxonContainerException(
"Please make sure that this job has been "
"started by Polyaxon with all required context."
)
k8s_manager = K8SManager(namespace=CLIENT_CONFIG.namespace, in_cluster=True)
retry = 1
is_running = True
counter = 0
state = {
"last_artifacts_check": None,
"last_logs_check": None,
}
def monitor():
if monitor_outputs:
last_check = state["last_artifacts_check"]
state["last_artifacts_check"] = sync_artifacts(
last_check=last_check, run_uuid=run_uuid,
)
sync_summaries(
last_check=last_check, run_uuid=run_uuid, client=client,
)
if monitor_logs:
state["last_logs_check"] = sync_logs(
k8s_manager=k8s_manager,
client=client,
last_check=state["last_logs_check"],
run_uuid=run_uuid,
pod_id=pod_id,
container_id=container_id,
owner=owner,
project=project,
)
while is_running and retry <= 3:
time.sleep(sleep_interval)
try:
is_running = is_pod_running(k8s_manager, pod_id, container_id)
except ApiException as e:
retry += 1
time.sleep(1 * retry)
logger.info("Exception %s" % repr(e))
logger.info("Sleeping ...")
logger.debug("Syncing ...")
if is_running:
retry = 1
counter += 1
if counter == sync_interval:
counter = 0
try:
monitor()
except Exception as e:
logger.warning("Polyaxon sidecar error: %e", e)
monitor()
logger.info("Cleaning non main containers")
|
gregmbi/polyaxon | core/tests/test_tracking/test_events/test_event_recorder.py | <reponame>gregmbi/polyaxon
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
import pytest
from tests.utils import BaseTestCase
from polyaxon.polyboard.events.schemas import (
LoggedEventListSpec,
LoggedEventSpec,
V1Event,
V1Events,
)
from polyaxon.tracking.events.writer import (
EventAsyncManager,
EventFileWriter,
EventWriter,
)
@pytest.mark.tracking_mark
class TestEventWriter(BaseTestCase):
def test_event_file(self):
run_path = tempfile.mkdtemp()
ew = EventWriter(run_path=run_path, backend=EventWriter.EVENTS_BACKEND)
assert ew._get_event_path(
kind="kind", name="name"
) == "{}/events/kind/name.plx".format(run_path)
def test_init_events(self):
events = LoggedEventListSpec(name="test", kind="metric", events=[])
run_path = tempfile.mkdtemp()
ew = EventWriter(run_path=run_path, backend=EventWriter.EVENTS_BACKEND)
event_file = ew._get_event_path(name=events.name, kind=events.kind)
assert os.path.exists(event_file) is False
ew._init_events(events)
assert os.path.exists(event_file) is True
expected_events = V1Events.read(kind="metric", name="test", data=event_file)
assert expected_events.name == events.name
assert expected_events.kind == events.kind
# Init same file
ew._init_events(events)
assert os.path.exists(event_file) is True
# New file
events = LoggedEventListSpec(name="new", kind="text", events=[])
new_event_file = ew._get_event_path(name=events.name, kind=events.kind)
assert os.path.exists(new_event_file) is False
ew._init_events(events)
assert os.path.exists(new_event_file) is True
expected_events = V1Events.read(kind="text", name="new", data=new_event_file)
assert expected_events.name == events.name
assert expected_events.kind == events.kind
# Previous file should still be there
assert os.path.exists(event_file) is True
def test_add_event_create_files_and_batch_events(self):
run_path = tempfile.mkdtemp()
ew = EventWriter(run_path=run_path, backend=EventWriter.EVENTS_BACKEND)
events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=1, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=2, metric=1.13)
),
LoggedEventSpec(
name="test2", kind="metric", event=V1Event.make(step=2, metric=1.13)
),
LoggedEventSpec(
name="test", kind="text", event=V1Event.make(step=1, text="text")
),
LoggedEventSpec(
name="test", kind="html", event=V1Event.make(step=1, html="html")
),
]
ew.write(events)
for event in events:
new_event_file = ew._get_event_path(kind=event.kind, name=event.name)
assert os.path.exists(new_event_file) is True
assert len(os.listdir(run_path + "/events")) == 3
# Check the queues
assert len(ew._files) == 4
assert ew._files["metric.test"].name == "test"
assert ew._files["metric.test"].kind == "metric"
assert [e.to_dict() for e in ew._files["metric.test"].events] == [
events[0].event.to_dict(),
events[1].event.to_dict(),
]
assert ew._files["metric.test2"].name == "test2"
assert ew._files["metric.test2"].kind == "metric"
assert [e.to_dict() for e in ew._files["metric.test2"].events] == [
events[2].event.to_dict()
]
assert ew._files["text.test"].name == "test"
assert ew._files["text.test"].kind == "text"
assert [e.to_dict() for e in ew._files["text.test"].events] == [
events[3].event.to_dict()
]
assert ew._files["html.test"].name == "test"
assert ew._files["html.test"].kind == "html"
assert [e.to_dict() for e in ew._files["html.test"].events] == [
events[4].event.to_dict()
]
def test_expect_closing_flushes_data_to_files(self):
run_path = tempfile.mkdtemp()
ew = EventWriter(run_path=run_path, backend=EventWriter.EVENTS_BACKEND)
events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=1, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=12, metric=1.12)
),
]
ew.write(events)
ew.close()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test",
kind="metric",
data=ew._get_event_path(kind="metric", name="test"),
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 2
assert results.get_event_at(0).to_dict() == events[0].event.to_dict()
assert results.get_event_at(1).to_dict() == events[1].event.to_dict()
# all flushed
assert ew._files["metric.test"].events == []
# Adding more events
new_events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=14, metric=1.12)
),
LoggedEventSpec(
name="test2", kind="metric", event=V1Event.make(step=14, metric=1.12)
),
LoggedEventSpec(
name="test", kind="html", event=V1Event.make(step=12, html="some div")
),
]
ew.write(new_events)
ew.close()
assert len(os.listdir(run_path + "/events")) == 2 # metric and html
assert len(os.listdir(run_path + "/events/metric")) == 2
results = V1Events.read(
name="test",
kind="metric",
data=ew._get_event_path(kind="metric", name="test"),
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 4
assert results.get_event_at(0).to_dict() == events[0].event.to_dict()
assert results.get_event_at(1).to_dict() == events[1].event.to_dict()
assert results.get_event_at(2).to_dict() == new_events[0].event.to_dict()
assert results.get_event_at(3).to_dict() == new_events[1].event.to_dict()
@pytest.mark.tracking_mark
class TestEventFileWriter(BaseTestCase):
def test_event_file_writer_initializes_paths(self):
some_path = tempfile.mkdtemp()
assert os.path.exists(some_path + "/run_uid") is False
EventFileWriter(some_path + "/run_uid")
assert os.path.exists(some_path + "/run_uid") is True
assert os.path.exists(some_path + "/run_uid/events") is True
assert os.path.exists(some_path + "/run_uid/assets") is True
def test_event_file_writer(self):
run_path = tempfile.mkdtemp()
ew = EventFileWriter(run_path)
events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=1, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=12, metric=1.12)
),
]
for e in events:
ew.add_event(e)
ew.flush()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 2
assert results.get_event_at(0).to_dict() == events[0].event.to_dict()
assert results.get_event_at(1).to_dict() == events[1].event.to_dict()
# Adding more events
new_events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=14, metric=1.12)
),
LoggedEventSpec(
name="test2", kind="metric", event=V1Event.make(step=14, metric=1.12)
),
LoggedEventSpec(
name="test", kind="html", event=V1Event.make(step=12, html="some div")
),
]
for e in new_events:
ew.add_event(e)
ew.flush()
assert len(os.listdir(run_path + "/events")) == 2 # metric and html
assert len(os.listdir(run_path + "/events/metric")) == 2
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 4
assert results.get_event_at(0).to_dict() == events[0].event.to_dict()
assert results.get_event_at(1).to_dict() == events[1].event.to_dict()
assert results.get_event_at(2).to_dict() == new_events[0].event.to_dict()
assert results.get_event_at(3).to_dict() == new_events[1].event.to_dict()
def test_write_batch_events_file_writer(self):
run_path = tempfile.mkdtemp()
ew = EventFileWriter(run_path)
events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=1, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=12, metric=1.12)
),
]
ew.add_events(events)
ew.flush()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 2
assert results.get_event_at(0).to_dict() == events[0].event.to_dict()
assert results.get_event_at(1).to_dict() == events[1].event.to_dict()
def test_event_file_writer_append_after_close_reopen(self):
run_path = tempfile.mkdtemp()
ew = EventFileWriter(run_path)
events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=1, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=12, metric=1.12)
),
]
for e in events:
ew.add_event(e)
ew.close()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 2
assert results.get_event_at(0).to_dict() == events[0].event.to_dict()
assert results.get_event_at(1).to_dict() == events[1].event.to_dict()
# New writer should resume work
ew = EventFileWriter(run_path)
# Adding more events
new_events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=14, metric=1.12)
),
LoggedEventSpec(
name="test2", kind="metric", event=V1Event.make(step=14, metric=1.12)
),
LoggedEventSpec(
name="test", kind="html", event=V1Event.make(step=12, html="some div")
),
]
for e in new_events:
ew.add_event(e)
ew.close()
assert len(os.listdir(run_path + "/events")) == 2 # metric and html
assert len(os.listdir(run_path + "/events/metric")) == 2
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 4
assert results.get_event_at(0).to_dict() == events[0].event.to_dict()
assert results.get_event_at(1).to_dict() == events[1].event.to_dict()
assert results.get_event_at(2).to_dict() == new_events[0].event.to_dict()
assert results.get_event_at(3).to_dict() == new_events[1].event.to_dict()
def test_event_file_writer_raise_after_close(self):
run_path = tempfile.mkdtemp()
ew = EventFileWriter(run_path)
events = [
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=1, metric=1.12)
),
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=12, metric=1.12)
),
]
for e in events:
ew.add_event(e)
ew.close()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 2
assert results.get_event_at(0).to_dict() == events[0].event.to_dict()
assert results.get_event_at(1).to_dict() == events[1].event.to_dict()
# Adding event raises
with self.assertRaises(OSError):
ew.add_event(
LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
)
)
def test_async_writer_without_write(self):
run_path = tempfile.mkdtemp()
ew = EventFileWriter(run_path)
ew.close()
assert len(os.listdir(run_path + "/events")) == 0
@pytest.mark.tracking_mark
class TestEventAsyncManager(BaseTestCase):
def test_async_writer_write_once(self):
run_path = tempfile.mkdtemp()
ew = EventAsyncManager(
event_writer=EventWriter(run_path, backend=EventWriter.EVENTS_BACKEND)
)
event = LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
)
ew.write(event)
ew.close()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 1
assert results.get_event_at(0).to_dict() == event.event.to_dict()
def test_async_writer_write_queue_full(self):
run_path = tempfile.mkdtemp()
ew = EventAsyncManager(
event_writer=EventWriter(run_path, backend=EventWriter.EVENTS_BACKEND)
)
event = LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
)
repeat = 100
for i in range(repeat):
ew.write(event)
ew.close()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 100
assert results.get_event_at(0).to_dict() == event.event.to_dict()
def test_async_writer_write_one_slot_queue(self):
run_path = tempfile.mkdtemp()
ew = EventAsyncManager(
event_writer=EventWriter(run_path, backend=EventWriter.EVENTS_BACKEND),
max_queue_size=1,
)
event = LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
)
repeat = 10
for i in range(repeat):
ew.write(event)
ew.close()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 10
assert results.get_event_at(0).to_dict() == event.event.to_dict()
def test_async_writer_close_triggers_flush(self):
run_path = tempfile.mkdtemp()
ew = EventAsyncManager(
event_writer=EventWriter(run_path, backend=EventWriter.EVENTS_BACKEND),
max_queue_size=1,
)
event = LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
)
ew.write(event)
ew.close()
assert len(os.listdir(run_path + "/events")) == 1
assert len(os.listdir(run_path + "/events/metric")) == 1
results = V1Events.read(
name="test", kind="metric", data=run_path + "/events/metric/test.plx"
)
assert results.name == "test"
assert results.kind == "metric"
assert len(results.df.values) == 1
assert results.get_event_at(0).to_dict() == event.event.to_dict()
def test_write_after_async_writer_closed(self):
run_path = tempfile.mkdtemp()
ew = EventAsyncManager(
event_writer=EventWriter(run_path, backend=EventWriter.EVENTS_BACKEND),
max_queue_size=1,
)
event = LoggedEventSpec(
name="test", kind="metric", event=V1Event.make(step=13, metric=1.12)
)
ew.close()
with self.assertRaises(IOError):
ew.write(event)
# nothing is written to the file after close
assert len(os.listdir(run_path)) == 0
|
gregmbi/polyaxon | core/polyaxon/polyaxonfile/specs/libs/parser.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import copy
import jinja2
from collections import Mapping
from typing import Dict
from polyaxon.config_reader.utils import deep_update
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.polyaxonfile.specs.sections import Sections
from polyaxon.polyflow import ParamSpec
try:
import numpy as np
except (ImportError, ModuleNotFoundError):
np = None
class Parser(object):
"""Parses the Polyaxonfile."""
env = jinja2.Environment()
@staticmethod
def _get_section_data(section_data):
if hasattr(section_data, "to_dict"):
return section_data.to_dict()
return section_data
@classmethod
def _get_section(cls, config, section):
if not hasattr(config, section):
return None
section_data = getattr(config, section)
if isinstance(section_data, list):
return [cls._get_section_data(d) for d in section_data]
return cls._get_section_data(section_data)
@classmethod
def parse(
cls, config, params: Dict[str, ParamSpec]
): # pylint:disable=too-many-branches
params = params or {}
parsed_params = {param: params[param].display_value for param in params}
parsed_data = {Sections.VERSION: config.version, Sections.KIND: config.kind}
if config.name:
parsed_data[Sections.NAME] = config.name
if config.description:
parsed_data[Sections.DESCRIPTION] = config.description
if config.tags:
parsed_data[Sections.TAGS] = config.tags
inputs = getattr(config, Sections.INPUTS)
if inputs:
parsed_data[Sections.INPUTS] = [io.to_dict() for io in inputs]
outputs = getattr(config, Sections.OUTPUTS)
if outputs:
parsed_data[Sections.OUTPUTS] = [
cls.parse_expression(io.to_dict(), parsed_params) for io in outputs
]
# Check workflow
parallel_section = cls._get_section(config, Sections.PARALLEL)
if parallel_section:
parsed_data[Sections.PARALLEL] = cls.parse_expression(
parallel_section, parsed_params
)
parallel_params = copy.copy(parsed_data[Sections.PARALLEL])
if parallel_params:
parsed_params = deep_update(parallel_params, parsed_params)
for section in Sections.PARSING_SECTIONS:
config_section = cls._get_section(config, section)
if config_section:
parsed_data[section] = cls.parse_expression(
config_section, parsed_params
)
for section in Sections.OP_PARSING_SECTIONS:
config_section = cls._get_section(config, section)
if config_section:
parsed_data[section] = cls.parse_expression(
config_section, parsed_params
)
config_section = cls._get_section(config, Sections.RUN)
if config_section:
parsed_data[Sections.RUN] = config_section
return parsed_data
@classmethod
def parse_run(cls, parsed_data, params: Dict[str, ParamSpec]):
config_section = cls.parse_section(
parsed_data.get(Sections.RUN), params=params, parse_params=True
)
if config_section:
parsed_data[Sections.RUN] = config_section
return parsed_data
@classmethod
def parse_section(
cls, config_section, params: Dict[str, ParamSpec], parse_params: bool = True
):
params = params or {}
if parse_params:
params = {param: params[param].display_value for param in params}
if config_section:
return cls.parse_expression(config_section, params)
return config_section
@classmethod
def parse_expression( # pylint:disable=too-many-branches
cls, expression, params: Dict, check_operators: bool = False
):
try:
return cls._parse_expression(expression, params, check_operators)
except jinja2.exceptions.TemplateError as e:
raise PolyaxonSchemaError(
"An problem parsing the template, please make sure your variables are resolvable. "
"Error: {}".format(repr(e))
)
@classmethod
def _parse_expression( # pylint:disable=too-many-branches
cls, expression, params: Dict, check_operators: bool = False
):
if isinstance(expression, (int, float, complex, type(None))):
return expression
if np and isinstance(expression, np.integer):
return int(expression)
if np and isinstance(expression, np.floating):
return float(expression)
if isinstance(expression, Mapping):
if len(expression) == 1:
old_key, value = list(expression.items())[0]
# always parse the keys, they must be base object or evaluate to base objects
key = cls._parse_expression(old_key, params)
if check_operators and cls.is_operator(key):
return cls._parse_operator({key: value}, params)
else:
return {key: cls.parse_expression(value, params, check_operators)}
new_expression = {}
for k, v in expression.items():
new_expression.update(
cls._parse_expression({k: v}, params, check_operators)
)
return new_expression
if isinstance(expression, list):
return list(
cls.parse_expression(v, params, check_operators) for v in expression
)
if isinstance(expression, tuple):
return tuple(
cls.parse_expression(v, params, check_operators) for v in expression
)
if isinstance(expression, str):
return cls._evaluate_expression(expression, params, check_operators)
@classmethod
def _evaluate_expression(cls, expression, params, check_operators):
result = cls.env.from_string(expression).render(**params)
if result == expression:
try:
return ast.literal_eval(result)
except (ValueError, SyntaxError):
pass
return result
return cls.parse_expression(result, params, check_operators)
@classmethod
def _parse_operator(cls, expression, params):
k, v = list(expression.items())[0]
op = Sections.OPERATORS[k].from_dict(v)
return op.parse(parser=cls, params=params)
@staticmethod
def is_operator(key):
return key in Sections.OPERATORS
|
gregmbi/polyaxon | core/polyaxon/config_reader/reader.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import Mapping
from polyaxon.config_reader.spec import ConfigSpec
from polyaxon.config_reader.utils import deep_update
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.utils.list_utils import to_list
def read(config_values, config_type=None):
"""Reads an ordered list of configuration values and deep merge the values in reverse order."""
if not config_values:
raise PolyaxonSchemaError(
"Cannot read config_value: `{}`".format(config_values)
)
config_values = to_list(config_values, check_none=True)
config = {}
for config_value in config_values:
config_value = ConfigSpec.get_from(value=config_value, config_type=config_type)
config_value.check_type()
config_results = config_value.read()
if config_results and isinstance(config_results, Mapping):
config = deep_update(config, config_results)
elif config_value.check_if_exists:
raise PolyaxonSchemaError(
"Cannot read config_value: `{}`".format(config_value)
)
return config
|
gregmbi/polyaxon | core/polyaxon/streams/tasks/logs.py | <reponame>gregmbi/polyaxon
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from typing import List
from starlette import status
from starlette.exceptions import HTTPException
from polyaxon import settings
from polyaxon.polyboard.logging import V1Log, V1Logs
from polyaxon.streams.stores.async_manager import download_file, upload_data
async def upload_logs(run_uuid: str, logs: List[V1Log]):
if not settings.AGENT_CONFIG.artifacts_store:
raise HTTPException(
detail="Run's logs was not collected, resource was not found.",
status_code=status.HTTP_400_BAD_REQUEST,
)
for c_logs in V1Logs.chunk_logs(logs):
last_file = datetime.timestamp(c_logs.logs[-1].timestamp)
subpath = "{}/logs/{}".format(run_uuid, last_file)
await upload_data(subpath=subpath, data=c_logs.to_dict(dump=True))
async def download_logs_file(run_uuid: str, last_file: str) -> str:
subpath = "{}/logs/{}".format(run_uuid, last_file)
return await download_file(subpath)
|
gregmbi/polyaxon | core/polyaxon/polypod/compiler/config.py | <filename>core/polyaxon/polypod/compiler/config.py
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
from polyaxon import settings
from polyaxon.containers.containers import (
get_default_init_container,
get_default_sidecar_container,
)
from polyaxon.exceptions import PolyaxonCompilerError
from polyaxon.polyflow import V1CompiledOperation, V1Init
from polyaxon.schemas.cli.agent_config import AgentConfig
class PolypodConfig:
def __init__(self, internal_auth: bool = False):
self.polyaxon_sidecar = None
self.polyaxon_init = None
self.namespace = None
self.secrets = None
self.config_maps = None
self.connection_by_names = {}
self.artifacts_store = None
self.internal_auth = internal_auth
def resolve(
self, compiled_operation: V1CompiledOperation, agent_config: AgentConfig = None
):
agent_config = agent_config or settings.AGENT_CONFIG
if not agent_config:
raise PolyaxonCompilerError("Polypod is not configured.")
self._resolve_run_connections(
compiled_operation=compiled_operation, agent_config=agent_config
)
self.artifacts_store = agent_config.artifacts_store
self.secrets = agent_config.secrets
self.config_maps = agent_config.config_maps
self.polyaxon_sidecar = agent_config.sidecar or get_default_sidecar_container()
self.polyaxon_init = agent_config.init or get_default_init_container()
self.namespace = agent_config.namespace
def _resolve_run_connections(
self, compiled_operation: V1CompiledOperation, agent_config: AgentConfig
):
if agent_config.artifacts_store: # Resolve default artifacts store
self.connection_by_names[
agent_config.artifacts_store.name
] = agent_config.artifacts_store
if compiled_operation.is_job_run or compiled_operation.is_service_run:
self._resolve_replica_connections(
compiled_operation=compiled_operation, agent_config=agent_config
)
if compiled_operation.is_dag_run:
self._resolve_connections(
connections=compiled_operation.run.connections,
agent_config=agent_config,
)
if compiled_operation.is_notifier:
self._resolve_notification_connections(
connections=compiled_operation.run.connections,
agent_config=agent_config,
)
def _get_init_connections(self, init: List[V1Init]):
init = init or []
return [i.connection for i in init if i.connection]
def _resolve_connections(self, connections: List[str], agent_config: AgentConfig):
if connections:
connection_by_names = {
c: agent_config.connections_by_names[c] for c in connections
}
self.connection_by_names.update(connection_by_names)
def _resolve_notification_connections(
self, connections: List[str], agent_config: AgentConfig
):
if connections:
connection_by_names = {
c: agent_config.notification_connections_by_names[c]
for c in connections
}
self.connection_by_names.update(connection_by_names)
def _resolve_replica_connections(
self, compiled_operation: V1CompiledOperation, agent_config: AgentConfig
):
self._resolve_connections(
connections=compiled_operation.run.connections, agent_config=agent_config
)
init_connections = self._get_init_connections(compiled_operation.run.init)
self._resolve_connections(
connections=init_connections, agent_config=agent_config
)
|
gregmbi/polyaxon | core/polyaxon/containers/names.py | MAIN_JOB_CONTAINER = "polyaxon-main"
INIT_AUTH_CONTAINER = "polyaxon-init-auth"
INIT_DOCKERFILE_CONTAINER = "polyaxon-init-dockerfile-{}"
INIT_GIT_CONTAINER = "polyaxon-init-git-{}"
INIT_ARTIFACTS_CONTAINER = "polyaxon-init-artifacts-{}"
INIT_CONTAINER = "polyaxon-init-{}"
SIDECAR_CONTAINER = "polyaxon-sidecar"
TFJOBS_CONTAINER = "tensorflow"
PYTORCHJOBS_CONTAINER = "pytorch"
|
gregmbi/polyaxon | examples/in_cluster/tensorflow/cifar10/run.py | <filename>examples/in_cluster/tensorflow/cifar10/run.py
import os
import argparse
from cifar10_main import train
from generate_cifar10_tfrecords import generate_data
# Polyaxon
from polyaxon_client.tracking import get_data_paths, get_outputs_path
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--variable-strategy',
choices=['CPU', 'GPU'],
type=str,
default='CPU',
help='Where to locate variable operations')
parser.add_argument(
'--num-gpus',
type=int,
default=0,
help='The number of gpus used. Uses only CPU if set to 0.')
parser.add_argument(
'--num-layers',
type=int,
default=44,
help='The number of layers of the model.')
parser.add_argument(
'--train-steps',
type=int,
default=80000,
help='The number of steps to use for training.')
parser.add_argument(
'--train-batch-size',
type=int,
default=128,
help='Batch size for training.')
parser.add_argument(
'--eval-batch-size',
type=int,
default=100,
help='Batch size for validation.')
parser.add_argument(
'--momentum',
type=float,
default=0.9,
help='Momentum for MomentumOptimizer.')
parser.add_argument(
'--weight-decay',
type=float,
default=2e-4,
help='Weight decay for convolutions.')
parser.add_argument(
'--learning-rate',
type=float,
default=0.1,
help="""\
This is the inital learning rate value. The learning rate will decrease
during training. For more details check the model_fn implementation in
this file.\
""")
parser.add_argument(
'--use-distortion-for-training',
type=bool,
default=True,
help='If doing image distortion for training.')
parser.add_argument(
'--sync',
action='store_true',
default=False,
help="""\
If present when running in a distributed environment will run on sync mode.\
""")
parser.add_argument(
'--num-intra-threads',
type=int,
default=0,
help="""\
Number of threads to use for intra-op parallelism. When training on CPU
set to 0 to have the system pick the appropriate number or alternatively
set it to the number of physical CPU cores.\
""")
parser.add_argument(
'--num-inter-threads',
type=int,
default=0,
help="""\
Number of threads to use for inter-op parallelism. If set to 0, the
system will pick an appropriate number.\
""")
parser.add_argument(
'--data-format',
type=str,
default=None,
help="""\
If not set, the data format best for the training device is used.
Allowed values: channels_first (NCHW) channels_last (NHWC).\
""")
parser.add_argument(
'--log-device-placement',
action='store_true',
default=False,
help='Whether to log device placement.')
parser.add_argument(
'--batch-norm-decay',
type=float,
default=0.997,
help='Decay for batch norm.')
parser.add_argument(
'--batch-norm-epsilon',
type=float,
default=1e-5,
help='Epsilon for batch norm.')
args = parser.parse_args()
if args.num_gpus < 0:
raise ValueError(
'Invalid GPU count: \"--num-gpus\" must be 0 or a positive integer.')
if args.num_gpus == 0 and args.variable_strategy == 'GPU':
raise ValueError('num-gpus=0, CPU must be used as parameter server. Set'
'--variable-strategy=CPU.')
if (args.num_layers - 2) % 6 != 0:
raise ValueError('Invalid --num-layers parameter.')
if args.num_gpus != 0 and args.train_batch_size % args.num_gpus != 0:
raise ValueError('--train-batch-size must be multiple of --num-gpus.')
if args.num_gpus != 0 and args.eval_batch_size % args.num_gpus != 0:
raise ValueError('--eval-batch-size must be multiple of --num-gpus.')
# Polyaxon
data_dir = os.path.join(list(get_data_paths().values())[0], 'cifar-10-data')
# We create data for the project if it does not exists
if not os.path.exists(os.path.join(data_dir, 'train.tfrecords')):
generate_data(data_dir)
# Polyaxon
train(job_dir=get_outputs_path(), data_dir=data_dir, **vars(args))
|
gregmbi/polyaxon | core/polyaxon/polyflow/io/io.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, List
import polyaxon_sdk
from marshmallow import ValidationError, fields, validate, validates_schema
from polyaxon import types
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.parser import parser
from polyaxon.schemas.base import BaseCamelSchema, BaseConfig
def validate_io_value(
name: str,
iotype: str,
value: Any,
default: Any,
is_optional: bool,
is_list: bool,
options: List[Any],
parse: bool = True,
):
try:
parsed_value = parser.TYPE_MAPPING[iotype](
key=name,
value=value,
is_list=is_list,
is_optional=is_optional,
default=default,
options=options,
)
if parse:
return parsed_value
# Return the original value, the parser will return specs sometimes
if value is not None:
return value
return default
except PolyaxonSchemaError as e:
raise ValidationError(
"Could not parse value `%s`, an error was encountered: %s" % (value, e)
)
def validate_io(name, iotype, value, is_optional, is_list, is_flag, options):
if iotype and value:
validate_io_value(
name=name,
iotype=iotype,
value=value,
default=None,
is_list=is_list,
is_optional=is_optional,
options=options,
)
if not is_optional and value:
raise ValidationError(
"IO `{}` is not optional and has default value `{}`. "
"Please either make it optional or remove the default value.".format(
name, value
)
)
if is_flag and iotype != types.BOOL:
raise ValidationError(
"IO type `{}` cannot be a flag, iut must be a `{}`".format(
iotype, types.BOOL
)
)
class IOSchema(BaseCamelSchema):
name = fields.Str(required=True)
description = fields.Str(allow_none=True)
iotype = fields.Str(
allow_none=True, data_key="type", validate=validate.OneOf(types.VALUES)
)
value = fields.Raw(allow_none=True)
is_optional = fields.Bool(allow_none=True)
is_list = fields.Bool(allow_none=True)
is_flag = fields.Bool(allow_none=True)
delay_validation = fields.Bool(allow_none=True)
options = fields.List(fields.Raw(), allow_none=True)
@staticmethod
def schema_config():
return V1IO
@validates_schema
def validate_io(self, values, **kwargs):
validate_io(
name=values.get("name"),
iotype=values.get("iotype"),
value=values.get("value"),
is_list=values.get("is_list"),
is_optional=values.get("is_optional"),
is_flag=values.get("is_flag"),
options=values.get("options"),
)
class V1IO(BaseConfig, polyaxon_sdk.V1IO):
SCHEMA = IOSchema
IDENTIFIER = "io"
REDUCED_ATTRIBUTES = [
"description",
"type",
"value",
"isOptional",
"isFlag",
"isList",
"delayValidation",
"options",
]
def validate_value(self, value: Any, parse: bool = True):
if self.iotype is None:
return value
return validate_io_value(
name=self.name,
iotype=self.iotype,
value=value,
default=self.value,
is_list=self.is_list,
is_optional=self.is_optional,
options=self.options,
parse=parse,
)
def get_repr_from_value(self, value):
"""A string representation that is used to create hash cache"""
value = self.validate_value(value=value, parse=False)
io_dict = self.to_light_dict(include_attrs=["name", "type"])
io_dict["value"] = value
return io_dict
def get_repr(self):
"""A string representation that is used to create hash cache"""
io_dict = self.to_light_dict(include_attrs=["name", "type", "value"])
return io_dict
|
gregmbi/polyaxon | core/polyaxon/polyflow/schedule/interval.py | <filename>core/polyaxon/polyflow/schedule/interval.py
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import polyaxon_sdk
from marshmallow import fields, validate
from polyaxon.schemas.base import BaseCamelSchema, BaseConfig
class IntervalScheduleSchema(BaseCamelSchema):
kind = fields.Str(allow_none=True, validate=validate.Equal("interval"))
start_at = fields.DateTime(required=True)
end_at = fields.DateTime(allow_none=True)
frequency = fields.TimeDelta(required=True)
depends_on_past = fields.Bool(allow_none=True)
@staticmethod
def schema_config():
return V1IntervalSchedule
class V1IntervalSchedule(BaseConfig, polyaxon_sdk.V1IntervalSchedule):
SCHEMA = IntervalScheduleSchema
IDENTIFIER = "interval"
|
gregmbi/polyaxon | core/polyaxon/polypod/mixins.py | <reponame>gregmbi/polyaxon<gh_stars>0
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from polyaxon.k8s.custom_resources import operation
from polyaxon.polyflow import V1RunKind
class BaseMixin:
SPEC_KIND = operation.KIND
API_VERSION = operation.API_VERSION
PLURAL = operation.PLURAL
GROUP = operation.GROUP
K8S_LABELS_PART_OF = "polyaxon-runs"
class JobMixin(BaseMixin):
K8S_LABELS_NAME = V1RunKind.JOB
K8S_LABELS_COMPONENT = "polyaxon-jobs"
class NotifierMixin(BaseMixin):
K8S_LABELS_NAME = V1RunKind.NOTIFIER
K8S_LABELS_COMPONENT = "polyaxon-notifiers"
class ServiceMixin(BaseMixin):
K8S_LABELS_NAME = V1RunKind.SERVICE
K8S_LABELS_COMPONENT = "polyaxon-services"
class TFJobMixin(BaseMixin):
K8S_LABELS_NAME = V1RunKind.TFJOB
K8S_LABELS_COMPONENT = "polyaxon-tfjobs"
class PytorchJobMixin(BaseMixin):
K8S_LABELS_NAME = V1RunKind.PYTORCHJOB
K8S_LABELS_COMPONENT = "polyaxon-pytorch-jobs"
class MPIJobMixin(BaseMixin):
K8S_LABELS_NAME = V1RunKind.MPIJOB
K8S_LABELS_COMPONENT = "polyaxon-mpi-jobs"
MIXIN_MAPPING = {
V1RunKind.NOTIFIER: NotifierMixin,
V1RunKind.JOB: JobMixin,
V1RunKind.SERVICE: ServiceMixin,
V1RunKind.TFJOB: TFJobMixin,
V1RunKind.PYTORCHJOB: PytorchJobMixin,
V1RunKind.MPIJOB: MPIJobMixin,
}
|
gregmbi/polyaxon | core/polyaxon/config_reader/spec.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
import yaml
from collections import Mapping
from yaml.parser import ParserError # noqa
from yaml.scanner import ScannerError # noqa
from polyaxon.exceptions import PolyaxonSchemaError
class ConfigSpec(object):
def __init__(self, value, config_type=None, check_if_exists=True):
self.value = value
self.config_type = config_type
self.check_if_exists = check_if_exists
@classmethod
def get_from(cls, value, config_type=None):
if isinstance(value, cls):
return value
return cls(value=value, config_type=config_type)
def check_type(self):
type_check = self.config_type is None and not isinstance(
self.value, (Mapping, str)
)
if type_check:
raise PolyaxonSchemaError(
"Expects Mapping, string, or list of Mapping/string instances, "
"received {} instead".format(type(self.value))
)
def read(self):
if isinstance(self.value, Mapping):
config_results = self.value
elif os.path.isfile(self.value):
config_results = _read_from_file(self.value, self.config_type)
else:
# try a python file
if isinstance(self.value, str) and ".py" in self.value:
_f_path, _f_module = _get_python_file_def(self.value)
if _f_path and _f_module:
return _read_from_python(_f_path, _f_module)
# try reading a stream of yaml or json
try:
config_results = _read_from_stream(self.value)
except (ScannerError, ParserError):
raise PolyaxonSchemaError(
"Received an invalid yaml stream: `{}`".format(self.value)
)
return config_results
def _read_from_stream(stream):
results = _read_from_yml(stream, is_stream=True)
if not results:
results = _read_from_json(stream, is_stream=True)
return results
def _get_python_file_def(f_path):
results = f_path.split(":")
if len(results) != 2 or not results[1]:
return None, None
_f_path = results[0].strip("")
_module_name = results[1].strip("")
if not os.path.exists(_f_path):
raise PolyaxonSchemaError(
"Received non existing python file: `{}`".format(f_path)
)
if not _module_name:
raise PolyaxonSchemaError(
"Received an invalid python module: `{}`".format(f_path)
)
return _f_path, _module_name
def _import_py_module(f_path, f_module):
import importlib.util
spec = importlib.util.spec_from_file_location(f_module, f_path)
if sys.modules.get(spec.name) and sys.modules[
spec.name
].__file__ == os.path.abspath(spec.origin):
module = sys.modules[spec.name]
else:
module = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = module
spec.loader.exec_module(module)
return module
def _read_from_python(f_path, f_module):
f_path = os.path.abspath(f_path)
file_directory = os.path.dirname(f_path)
if file_directory not in sys.path:
sys.path.append(file_directory)
module_name = os.path.splitext(os.path.basename(f_path))[0]
module = _import_py_module(f_path, module_name)
return getattr(module, f_module)
def _read_from_file(f_path, file_type):
_, ext = os.path.splitext(f_path)
if ext in (".yml", ".yaml") or file_type in (".yml", ".yaml"):
return _read_from_yml(f_path)
elif ext == ".json" or file_type == ".json":
return _read_from_json(f_path)
raise PolyaxonSchemaError(
"Expects a file with extension: `.yml`, `.yaml`, or `json`, "
"received instead `{}`".format(ext)
)
def _read_from_yml(f_path, is_stream=False):
try:
if is_stream:
return yaml.safe_load(f_path)
with open(f_path) as f:
return yaml.safe_load(f)
except (ScannerError, ParserError):
raise PolyaxonSchemaError("Received non valid yaml: `{}`".format(f_path))
def _read_from_json(f_path, is_stream=False):
if is_stream:
try:
return json.loads(f_path)
except ValueError as e:
raise PolyaxonSchemaError("Json error: %s" % e) from e
try:
return json.loads(open(f_path).read())
except ValueError as e:
raise PolyaxonSchemaError("Json error: %s" % e) from e
|
gregmbi/polyaxon | core/polyaxon/proxies/schemas/api/uwsgi.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from polyaxon.proxies.schemas.base import get_config
UWSGI_OPTIONS = """
location / {{
include /etc/nginx/uwsgi_params;
uwsgi_pass <PASSWORD>;
uwsgi_param Host $host;
uwsgi_param X-Real-IP $remote_addr;
uwsgi_param X-Forwarded-For $proxy_add_x_forwarded_for;
uwsgi_param X-Forwarded-Proto $http_x_forwarded_proto;
}}
"""
def get_uwsgi_config():
return get_config(options=UWSGI_OPTIONS, indent=0)
|
gregmbi/polyaxon | core/tests/test_notifiers/test_hipchat_webhook.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.test_notifiers.test_webhook_notification import TestWebHookNotification
from polyaxon.connections.kinds import V1ConnectionKind
# pylint:disable=protected-access
from polyaxon.exceptions import PolyaxonNotificationException
from polyaxon.notifiers.hipchat_webhook import HipChatWebHookNotifier
class TestHipChatWebHookNotifier(TestWebHookNotification):
webhook = HipChatWebHookNotifier
def test_attrs(self):
assert self.webhook.notification_key == V1ConnectionKind.HIPCHAT
assert self.webhook.name == "HipChat WebHook"
def test_prepare(self):
with self.assertRaises(PolyaxonNotificationException):
self.webhook._prepare(None)
with self.assertRaises(PolyaxonNotificationException):
self.webhook._prepare({})
context = {"message": "message"}
assert self.webhook._prepare(context) == {
"message": context.get("message"),
"message_format": context.get("message_format", "html"),
"color": context.get("color"),
"from": "Polyaxon",
"attach_to": context.get("attach_to"),
"notify": context.get("notify", False),
"card": context.get("card"),
}
del TestWebHookNotification
|
gregmbi/polyaxon | core/polyaxon/managers/ignore.py | <reponame>gregmbi/polyaxon
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
from collections import namedtuple
from pathlib import PurePath
from polyaxon.logger import logger
from polyaxon.managers.base import BaseConfigManager
from polyaxon.utils import constants
from polyaxon.utils.path_utils import unix_style_path
class Pattern(namedtuple("Pattern", "pattern is_exclude re")):
@staticmethod
def create(pattern):
if pattern[0:1] == "!":
is_exclude = False
pattern = pattern[1:]
else:
if pattern[0:1] == "\\":
pattern = pattern[1:]
is_exclude = True
return Pattern(
pattern=pattern,
is_exclude=is_exclude,
re=re.compile(translate(pattern), re.IGNORECASE),
)
def match(self, path):
return bool(self.re.match(path))
def translate(pat):
def _translate_segment():
# pylint:disable=undefined-loop-variable
if segment == "*":
return "[^/]+"
res = ""
i, n = 0, len(segment)
while i < n:
c = segment[i : i + 1]
i = i + 1
if c == "*":
res += "[^/]*"
elif c == "?":
res += "[^/]"
elif c == "[":
j = i
if j < n and segment[j : j + 1] == "!":
j = j + 1
if j < n and segment[j : j + 1] == "]":
j = j + 1
while j < n and segment[j : j + 1] != "]":
j = j + 1
if j >= n:
res += "\\["
else:
stuff = segment[i:j].replace("\\", "\\\\")
i = j + 1
if stuff.startswith("!"):
stuff = "^" + stuff[1:]
elif stuff.startswith("^"):
stuff = "\\" + stuff
res += "[" + stuff + "]"
else:
res += re.escape(c)
return res
res = "(?ms)"
if "/" not in pat[:-1]:
res += "(.*/)?"
if pat.startswith("**/"):
pat = pat[2:]
res += "(.*/)?"
if pat.startswith("/"):
pat = pat[1:]
for i, segment in enumerate(pat.split("/")):
if segment == "**":
res += "(/.*)?"
continue
else:
res += (re.escape("/") if i > 0 else "") + _translate_segment()
if not pat.endswith("/"):
res += "/?"
return res + "\\Z"
class IgnoreManager(BaseConfigManager):
"""Manages .plxignore file in the current directory"""
IS_GLOBAL = False
CONFIG_FILE_NAME = ".polyaxonignore"
@staticmethod
def _is_empty_or_comment(line):
return not line or line.startswith("#")
@staticmethod
def _remove_trailing_spaces(line):
"""Remove trailing spaces unless they are quoted with a backslash."""
while line.endswith(" ") and not line.endswith("\\ "):
line = line[:-1]
return line.replace("\\ ", " ")
@classmethod
def init_config(cls):
cls.set_config(constants.DEFAULT_IGNORE_LIST, init=True)
@classmethod
def find_matching(cls, path, patterns):
"""Yield all matching patterns for path."""
for pattern in patterns:
if pattern.match(path):
yield pattern
@classmethod
def is_ignored(cls, path, patterns):
"""Check whether a path is ignored. For directories, include a trailing slash."""
status = None
for pattern in cls.find_matching(path, patterns):
status = pattern.is_exclude
return status
@classmethod
def read_file(cls, ignore_file):
for line in ignore_file:
line = line.rstrip("\r\n")
if cls._is_empty_or_comment(line):
continue
yield cls._remove_trailing_spaces(line)
@classmethod
def get_patterns(cls, ignore_file):
return [Pattern.create(line) for line in cls.read_file(ignore_file)]
@classmethod
def get_config(cls):
config_filepath = cls.get_config_filepath()
if not os.path.isfile(config_filepath):
return []
with open(config_filepath) as ignore_file:
return cls.get_patterns(ignore_file)
@classmethod
def get_unignored_filepaths(cls):
config = cls.get_config()
unignored_files = []
for root, dirs, files in os.walk("."):
logger.debug("Root:%s, Dirs:%s", root, dirs)
if cls.is_ignored(unix_style_path(root), config):
dirs[:] = []
logger.debug("Ignoring directory : %s", root)
continue
for file_name in files:
filepath = unix_style_path(os.path.join(root, file_name))
if cls.is_ignored(filepath, config):
logger.debug("Ignoring file : %s", file_name)
continue
unignored_files.append(os.path.join(root, file_name))
return unignored_files
@staticmethod
def _matches_patterns(path, patterns):
"""Given a list of patterns, returns a if a path matches any pattern."""
for glob in patterns:
try:
if PurePath(path).match(glob):
return True
except TypeError:
pass
return False
@classmethod
def _ignore_path(cls, path, ignore_list=None, white_list=None):
"""Returns a whether a path should be ignored or not."""
ignore_list = ignore_list or []
white_list = white_list or []
return cls._matches_patterns(path, ignore_list) and not cls._matches_patterns(
path, white_list
)
@classmethod
def get_value(cls, key):
pass
|
gregmbi/polyaxon | core/polyaxon/cli/init.py | <filename>core/polyaxon/cli/init.py<gh_stars>0
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import click
from polyaxon_sdk.rest import ApiException
from urllib3.exceptions import HTTPError
from polyaxon.cli.errors import handle_cli_error
from polyaxon.client import PolyaxonClient
from polyaxon.env_vars.getters import get_project_or_local
from polyaxon.logger import clean_outputs
from polyaxon.managers.ignore import IgnoreManager
from polyaxon.managers.project import ProjectManager
from polyaxon.polyaxonfile import PolyaxonFile
from polyaxon.utils import constants, indentation
from polyaxon.utils.formatting import Printer
from polyaxon.utils.path_utils import create_debug_file, create_init_file
def create_polyaxonfile():
if os.path.isfile(constants.INIT_FILE_PATH):
try:
_ = PolyaxonFile(constants.INIT_FILE_PATH).specification # noqa
Printer.print_success("A valid polyaxonfile.yaml was found in the project.")
except Exception as e:
handle_cli_error(e, message="A Polyaxonfile was found but it is not valid.")
sys.exit(1)
else:
create_init_file(constants.INIT_FILE)
# if we are here the file was not created
if not os.path.isfile(constants.INIT_FILE_PATH):
Printer.print_error(
"Something went wrong, init command did not create a file.\n"
"Possible reasons: you don't have enough rights to create the file."
)
sys.exit(1)
Printer.print_success(
"{} was created successfully.".format(constants.INIT_FILE_PATH)
)
def create_debug_polyaxonfile():
if os.path.isfile(constants.DEBUG_FILE_PATH):
Printer.print_success("A polyaxonfile.debug.yaml was found in the project.")
else:
create_debug_file(constants.DEBUG_FILE)
# if we are here the file was not created
if not os.path.isfile(constants.DEBUG_FILE_PATH):
Printer.print_error(
"Something went wrong, init command did not create a debug file.\n"
"Possible reasons: you don't have enough rights to create the file."
)
sys.exit(1)
Printer.print_success(
"{} was created successfully.".format(constants.DEBUG_FILE_PATH)
)
@click.command()
@click.argument("project", type=str)
@click.option(
"--polyaxonfile",
is_flag=True,
default=False,
show_default=False,
help="Init a polyaxon file in this project.",
)
@click.option(
"--purge",
is_flag=True,
default=False,
show_default=False,
help="Purge previous configs before calling init.",
)
@clean_outputs
def init(project, polyaxonfile, purge):
"""Initialize a new polyaxonfile specification."""
owner, project_name = get_project_or_local(project, is_cli=True)
try:
polyaxon_client = PolyaxonClient()
project_config = polyaxon_client.projects_v1.get_project(owner, project_name)
except (ApiException, HTTPError) as e:
Printer.print_error(
"Make sure you have a project with this name `{}`".format(project)
)
handle_cli_error(
e,
message="You can a create new project with this command: "
"polyaxon project create "
"--name={} [--description=...] [--tags=...]".format(project_name),
)
sys.exit(1)
if purge:
ProjectManager.purge()
IgnoreManager.purge()
init_project = False
if ProjectManager.is_initialized():
local_project = ProjectManager.get_config()
click.echo(
"Warning! This project is already initialized with the following project:"
)
with indentation.indent(4):
indentation.puts("User: {}".format(local_project.user))
indentation.puts("Project: {}".format(local_project.name))
if click.confirm(
"Would you like to override this current config?", default=False
):
init_project = True
else:
init_project = True
if init_project:
ProjectManager.purge()
config = polyaxon_client.sanitize_for_serialization(project_config)
ProjectManager.set_config(config, init=True)
Printer.print_success("Project was initialized")
else:
Printer.print_header("Project config was not changed.")
init_ignore = False
if IgnoreManager.is_initialized():
click.echo("Warning! Found a .polyaxonignore file.")
if click.confirm("Would you like to override it?", default=False):
init_ignore = True
else:
init_ignore = True
if init_ignore:
IgnoreManager.init_config()
Printer.print_success("New .polyaxonignore file was created.")
else:
Printer.print_header(".polyaxonignore file was not changed.")
if polyaxonfile:
create_polyaxonfile()
create_debug_polyaxonfile()
|
gregmbi/polyaxon | core/polyaxon/cli/projects.py | #!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import click
from polyaxon_sdk import V1Project
from polyaxon_sdk.rest import ApiException
from urllib3.exceptions import HTTPError
from polyaxon.cli.errors import handle_cli_error
from polyaxon.cli.init import init as init_project
from polyaxon.client import ProjectClient
from polyaxon.env_vars.getters import get_project_or_local
from polyaxon.logger import clean_outputs
from polyaxon.managers.auth import AuthConfigManager
from polyaxon.managers.project import ProjectManager
from polyaxon.utils import cache
from polyaxon.utils.formatting import (
Printer,
dict_tabulate,
dict_to_tabulate,
get_meta_response,
list_dicts_to_tabulate,
)
def get_project_details(project):
if project.description:
Printer.print_header("Project description:")
click.echo("{}\n".format(project.description))
response = dict_to_tabulate(
project.to_dict(), humanize_values=True, exclude_attrs=["description"]
)
Printer.print_header("Project info:")
dict_tabulate(response)
@click.group()
@click.option("--project", "-p", type=str)
@click.pass_context
@clean_outputs
def project(ctx, project): # pylint:disable=redefined-outer-name
"""Commands for projects."""
if ctx.invoked_subcommand not in ["create", "ls"]:
ctx.obj = ctx.obj or {}
ctx.obj["project"] = project
@project.command()
@click.option(
"--name",
required=True,
type=str,
help="Name of the project, must be unique for the owner namespace.",
)
@click.option(
"--owner",
type=str,
help="Name of the owner/namespace, "
"if not provided it will default to the namespace of the current user.",
)
@click.option("--description", type=str, help="Description of the project.")
@click.option(
"--private", is_flag=True, help="Set the visibility of the project to private."
)
@click.option("--init", is_flag=True, help="Initialize the project after creation.")
@click.pass_context
@clean_outputs
def create(ctx, name, owner, description, private, init):
"""Create a new project.
Uses [Caching](/references/polyaxon-cli/#caching)
Example:
\b
```bash
$ polyaxon project create --name=cats-vs-dogs --description="Image Classification with DL"
```
"""
owner = owner or AuthConfigManager.get_value("username")
if not owner:
Printer.print_error(
"Please login first or provide a valid owner --owner. "
"`polyaxon login --help`"
)
sys.exit(1)
try:
project_config = V1Project(
name=name, description=description, is_public=not private
)
_project = ProjectClient(owner=owner).create(project_config)
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not create project `{}`.".format(name))
sys.exit(1)
Printer.print_success(
"Project `{}` was created successfully.".format(_project.name)
)
if init:
ctx.obj = {}
ctx.invoke(init_project, project=name)
@project.command()
@click.option(
"--owner",
type=str,
help="Name of the owner/namespace, "
"if not provided it will default to the namespace of the current user.",
)
@click.option("--limit", type=int, help="To limit the list of projects.")
@click.option("--offset", type=int, help="To offset the list of projects.")
@clean_outputs
def ls(owner, limit, offset):
"""List projects.
Uses [Caching](/references/polyaxon-cli/#caching)
"""
owner = owner or AuthConfigManager.get_value("username")
if not owner:
Printer.print_error(
"Please login first or provide a valid owner --owner. "
"`polyaxon login --help`"
)
sys.exit(1)
try:
polyaxon_client = ProjectClient(owner=owner)
response = polyaxon_client.list(limit=limit, offset=offset)
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not get list of projects.")
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header("Projects for current user")
Printer.print_header("Navigation:")
dict_tabulate(meta)
else:
Printer.print_header("No projects found for current user")
objects = list_dicts_to_tabulate(
[o.to_dict() for o in response.results],
humanize_values=True,
exclude_attrs=["uuid", "description"],
)
if objects:
Printer.print_header("Projects:")
dict_tabulate(objects, is_list_dict=True)
@project.command()
@click.pass_context
@clean_outputs
def get(ctx):
"""Get info for current project, by project_name, or user/project_name.
Uses [Caching](/references/polyaxon-cli/#caching)
Examples:
To get current project:
\b
```bash
$ polyaxon project get
```
To get a project by name
\b
```bash
$ polyaxon project get user/project
```
"""
owner, project_name = get_project_or_local(ctx.obj.get("project"), is_cli=True)
try:
polyaxon_client = ProjectClient(owner=owner, project=project_name)
polyaxon_client.refresh_data()
config = polyaxon_client.client.sanitize_for_serialization(
polyaxon_client.project_data
)
cache.cache(config_manager=ProjectManager, config=config)
except (ApiException, HTTPError) as e:
handle_cli_error(e, message="Could not get project `{}`.".format(project_name))
sys.exit(1)
get_project_details(polyaxon_client.project_data)
@project.command()
@click.pass_context
@clean_outputs
def delete(ctx):
"""Delete project.
Uses [Caching](/references/polyaxon-cli/#caching)
"""
owner, project_name = get_project_or_local(ctx.obj.get("project"), is_cli=True)
if not click.confirm(
"Are sure you want to delete project `{}/{}`".format(owner, project_name)
):
click.echo("Existing without deleting project.")
sys.exit(1)
try:
polyaxon_client = ProjectClient(owner=owner, project=project_name)
response = polyaxon_client.delete()
local_project = ProjectManager.get_config()
if local_project and (owner, project_name) == (
local_project.user,
local_project.name,
):
# Purge caching
ProjectManager.purge()
except (ApiException, HTTPError) as e:
handle_cli_error(
e, message="Could not delete project `{}/{}`.".format(owner, project_name)
)
sys.exit(1)
if response.status_code == 204:
Printer.print_success(
"Project `{}/{}` was delete successfully".format(owner, project_name)
)
@project.command()
@click.option(
"--name", type=str, help="Name of the project, must be unique for the same user."
)
@click.option("--description", type=str, help="Description of the project.")
@click.option(
"--private", type=bool, help="Set the visibility of the project to private/public."
)
@click.pass_context
@clean_outputs
def update(ctx, name, description, private):
"""Update project.
Uses [Caching](/references/polyaxon-cli/#caching)
Example:
\b
```bash
$ polyaxon update foobar --description="Image Classification with DL using TensorFlow"
```
\b
```bash
$ polyaxon update mike1/foobar --description="Image Classification with DL using TensorFlow"
```
\b
```bash
$ polyaxon update --tags="foo, bar"
```
"""
owner, project_name = get_project_or_local(ctx.obj.get("project"), is_cli=True)
update_dict = {}
if name:
update_dict["name"] = name
if description:
update_dict["description"] = description
if private is not None:
update_dict["is_public"] = not private
if not update_dict:
Printer.print_warning("No argument was provided to update the project.")
sys.exit(1)
try:
polyaxon_client = ProjectClient(owner=owner)
response = polyaxon_client.update(update_dict)
except (ApiException, HTTPError) as e:
handle_cli_error(
e, message="Could not update project `{}`.".format(project_name)
)
sys.exit(1)
Printer.print_success("Project updated.")
get_project_details(response)
|
gregmbi/polyaxon | core/polyaxon/schemas/cli/cli_config.py | <reponame>gregmbi/polyaxon<filename>core/polyaxon/schemas/cli/cli_config.py
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from marshmallow import fields
from polyaxon.schemas.api.log_handler import LogHandlerSchema
from polyaxon.schemas.base import BaseConfig, BaseSchema
class CliConfigurationSchema(BaseSchema):
check_count = fields.Int(allow_none=True)
current_version = fields.Str(allow_none=True)
server_versions = fields.Dict(allow_none=True)
log_handler = fields.Nested(LogHandlerSchema, allow_none=True)
@staticmethod
def schema_config():
return CliConfigurationConfig
class CliConfigurationConfig(BaseConfig):
SCHEMA = CliConfigurationSchema
IDENTIFIER = "cli"
MIN = "0.0.0"
LATEST = "9.9.9"
def __init__(
self,
check_count=0,
current_version=None,
server_versions=None,
log_handler=None,
):
self.check_count = check_count
self.current_version = current_version
self.server_versions = server_versions
self.log_handler = log_handler
@property
def min_version(self):
if not self.server_versions or "cli" not in self.server_versions:
return self.MIN
cli_version = self.server_versions["cli"] or {}
return cli_version.get("min_version", self.MIN)
@property
def latest_version(self):
if not self.server_versions or "cli" not in self.server_versions:
return self.LATEST
cli_version = self.server_versions["cli"] or {}
return cli_version.get("latest_version", self.LATEST)
|
gregmbi/polyaxon | core/polyaxon/k8s/monitor.py | <filename>core/polyaxon/k8s/monitor.py
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from polyaxon.k8s.events import get_container_status, get_container_statuses_by_name
from polyaxon.k8s.pods import PodLifeCycle
def is_container_terminated(status, container_id):
container_statuses = status.get("container_statuses") or []
statuses_by_name = get_container_statuses_by_name(container_statuses)
statuses = get_container_status(statuses_by_name, (container_id,))
statuses = statuses or {}
return statuses.get("state", {}).get("terminated")
def is_pod_running(k8s_manager, pod_id, container_id):
event = k8s_manager.k8s_api.read_namespaced_pod_status(
pod_id, k8s_manager.namespace
)
event = event.to_dict()
event_status = event.get("status", {})
is_terminated = is_container_terminated(
status=event_status, container_id=container_id
)
return (
event_status.get("phase")
in {PodLifeCycle.RUNNING, PodLifeCycle.PENDING, PodLifeCycle.CONTAINER_CREATING}
and not is_terminated
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.