hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a3fe9c901be68d7a575fda5944667c98f252847d
| 25,500
|
py
|
Python
|
hw2/stud/utils.py
|
LeonardoEmili/Aspect-Based-Sentiment-Analysis
|
d6a37132931401c1a6d4a53208055cc507e2a7a8
|
[
"MIT"
] | 6
|
2021-09-19T20:35:54.000Z
|
2022-03-20T08:07:30.000Z
|
hw2/stud/utils.py
|
LeonardoEmili/Aspect-Based-Sentiment-Analysis
|
d6a37132931401c1a6d4a53208055cc507e2a7a8
|
[
"MIT"
] | null | null | null |
hw2/stud/utils.py
|
LeonardoEmili/Aspect-Based-Sentiment-Analysis
|
d6a37132931401c1a6d4a53208055cc507e2a7a8
|
[
"MIT"
] | null | null | null |
from typing import *
from stud.constants import *
from stud.torch_utils import gpus, compute_scatter_mask
from tqdm.notebook import tqdm
from torchtext.vocab import build_vocab_from_iterator, Vocab, vocab
from collections import OrderedDict, namedtuple, Counter
from torch.nn.utils.rnn import pad_sequence
from torch.utils.data import Dataset
from dataclasses import dataclass, asdict, field
from transformers import PreTrainedTokenizerFast, logging
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import WandbLogger
from transformers import BertTokenizer
from operator import itemgetter
from random import sample
import math
import torchtext.vocab
import torch
import torch.nn as nn
import os
import nltk
# Set verbosity level to ERROR, ignoring warnings
logging.set_verbosity_error()
@dataclass
class HParams:
output_vocab: Vocab
input_dim: int = 768
pos_embedding_dim: int = 50
batch_size: int = 256
hidden_dim: int = 256
dropout: float = 0.6
lr: int = 0.1
model_name: str = 'bert_lstm'
bert_name: str = 'bert-base-uncased'
cached_bert_path: str = 'model/bert-base-uncased'
layers_to_merge: List[int] = field(default_factory=lambda: [-1, -2, -3, -4])
strategy: str = 'cat'
epochs: int = 10
ner_model_path: Optional[str] = None
polarity_model_path: Optional[str] = None
device: str = 'cuda'
test_only: bool = False
extended_bio: bool = False
pos_embedding_dim: int = 30
w2v_embedding_dim: int = 300
sentence_encoder: str = 'lstm'
num_heads: int = 2
use_crf: bool = True
Sentence = namedtuple('Sentence', 'id spans categories polarities')
Span = namedtuple('Span', 'text tokens aspect polarity')
Token = namedtuple('Token', 'text lemma pos')
class Vectors(object):
'''
Extend torchtext.Vectors to support Word2Vec pretrained word embeddings:
https://pytorch.org/text/stable/_modules/torchtext/vocab.html#Vectors
'''
def __init__(
self,
name: str,
cache: Optional[str] = None,
url: Optional[str] = None,
unk_init: Optional[Callable[..., torch.Tensor]] = torch.Tensor.zero_,
max_vectors: Optional[int] = None,
train_vocab: Optional[Vocab] = None,
):
self.name = name
if name == 'Word2Vec':
url = WORD2VEC_BIN_PATH if url is None else url
self.vocab, self.vectors = load_pretrained_embeddings(
train_vocab=train_vocab, path=url,
words_limit=max_vectors, tag=name,
)
else:
vocab.Vectors(name, cache, url, unk_init, max_vectors)
def __len__(self):
return len(self.vectors)
@staticmethod
def from_cached(path: str = WORD2VEC_CACHE_PATH):
return torch.load(path)
@property
def dim(self):
return self.vectors.shape[1]
@property
def itos(self):
return self.vocab.get_itos()
@property
def stoi(self):
return self.vocab.get_stoi()
def __getitem__(self, token: str):
return self.vectors[self.vocab[token]]
def get_vecs_by_tokens(
self,
tokens: List[str],
lower_case_backup: bool = False
):
to_reduce = False
if not isinstance(tokens, list):
tokens = [tokens]
to_reduce = True
if not lower_case_backup:
indices = [self[token] for token in tokens]
else:
indices = [self[token] if token in self.stoi
else self[token.lower()]
for token in tokens]
vecs = torch.stack(indices)
return vecs[0] if to_reduce else vecs
def build_vocab(
symbols: List[str],
specials: List[str] = [PAD_TOKEN],
min_freq: int = 1
) -> Vocab:
''' Returns a torchtext.Vocab object from input symbols. '''
vocab = build_vocab_from_iterator(
[symbols], specials=specials,
special_first=True, min_freq=min_freq
)
vocab.set_default_index(PADDING_INDEX)
return vocab
def load_pretrained_embeddings(
train_vocab: Optional[Vocab] = None,
path: str = WORD2VEC_BIN_PATH,
words_limit: int = 1_000_000,
tag: str = 'Word2Vec',
delim: str = ' ',
) -> Tuple[Vocab, torch.Tensor]:
''' Loads pretrained embeddings from file and maps vocabulary words to vectors. '''
if tag == 'Word2Vec':
# Word2Vec are originally stored as binary file, parse it into plain file to be used later
decode_word2vec_binaries(path)
# Define the mapping to vectorize sentences and the embedding tensor
vocab_words = [PAD_TOKEN, UNK_TOKEN]
vectors_store = []
with open(f'{path}.txt', 'r') as f:
if tag == 'Word2Vec':
n, embedding_size = map(int, next(f).split())
elif tag == 'GloVe':
n, embedding_size = (None, 300)
else:
raise Exception('Supported embeddings are Word2Vec and GloVe.')
# Initialize three vectors: respectively for <PAD> and <UNK> tokens
vectors_store.append(torch.zeros(embedding_size))
vectors_store.append(torch.zeros(embedding_size))
n = min(n, words_limit)
progress = tqdm(f, total=n, desc=f'Loading pretrained {tag} embeddings')
for i, line in enumerate(progress):
# Read up to words_limit elements (special tokens excluded)
if words_limit is not None and len(vocab_words) >= words_limit + 2: break
word, *embedding = line.split(delim)
# It is important to only use words that are present in the training set
if train_vocab is not None and word not in train_vocab: continue
vocab_words.append(word)
embedding = torch.tensor([float(c) for c in embedding])
vectors_store.append(embedding)
out_vocab = vocab(OrderedDict([(w,1) for w in vocab_words]))
out_vocab.set_default_index(PADDING_INDEX)
vectors_store = torch.stack(vectors_store)
return out_vocab, vectors_store
# TODO: remove this function
def decode_word2vec_binaries(path: str) -> None:
''' Utility function used to decode Word2Vec embeddings from .bin file. '''
if not os.path.exists(f'{path}.txt'):
from gensim.models.keyedvectors import KeyedVectors
# Import KeyedVectors to extract the word2vec data structure and save it into .txt file
word2vec = KeyedVectors.load_word2vec_format(f'{path}.bin', binary=True)
word2vec.save_word2vec_format(f'{path}.txt', binary=False)
del word2vec # Free it from memory and build the vocab by ourselves for the sake of the hw
def simple_collate_fn(
batch: List[Tuple[torch.Tensor,Dict[str, Union[torch.Tensor, List[int]]]]],
padding_value: int = PADDING_INDEX
) -> Tuple[torch.Tensor, torch.Tensor, Dict[str, torch.Tensor]]:
''' A simple collate function that expects pre-tokenized input sentences.'''
X, Y = zip(*batch)
X, X_pos_tags, tokens, aspect_indexes = zip(*[(
x['indices'],x['pos_indices'], x['tokens'],x['aspect_indexes']) for x in X])
lengths = torch.tensor([len(x) for x in X])
X = pad_sequence(X, batch_first=True, padding_value=padding_value)
X_pos_tags = pad_sequence(X_pos_tags, batch_first=True, padding_value=padding_value)
ner_labels, polarity_indexes, aspect_labels, polarity_labels = zip(*[
(y['ner'],y['polarity'],y['aspect'], y['polarity_labels']) for y in Y])
ner_labels = pad_sequence(ner_labels, batch_first=True, padding_value=padding_value)
polarity_indexes = pad_sequence(polarity_indexes, batch_first=True, padding_value=padding_value)
return {
'indices': X,
'pos_indices': X_pos_tags,
'lengths': lengths,
'tokens': tokens,
'aspect_indexes': aspect_indexes
}, {
'ner': ner_labels,
'polarity': polarity_indexes,
'aspect': aspect_labels,
'polarity_labels': polarity_labels
}
def collate_fn(
tokenizer: PreTrainedTokenizerFast,
batch: List[Tuple[torch.Tensor,Dict[str, Union[torch.Tensor, List[int]]]]],
padding_value: int = PADDING_INDEX
) -> Tuple[torch.Tensor, torch.Tensor, Dict[str, torch.Tensor]]:
'''
Efficient collate function to pad pre-tokenized input sentences in a batch,
it's default when using BERT-like tokenizers.
'''
X, Y = zip(*batch)
X, X_pos_tags, tokens, mask, aspect_indexes = zip(*[(
x['indices'], x['pos_indices'], x['tokens'], x['mask'], x['aspect_indexes']) for x in X])
lengths = torch.tensor([len(t) for t in tokens])
X = pad_sequence(X, batch_first=True, padding_value=padding_value)
X_pos_tags = pad_sequence(X_pos_tags, batch_first=True, padding_value=padding_value)
padding_mask = torch.arange(max(lengths))[None, :] < lengths[:, None]
batch = tokenizer(
[[x.text for x in t] for t in tokens], is_split_into_words = True,
padding = True, truncation = True, return_tensors = 'pt'
)
ner_labels, polarity_indexes, aspect_labels, polarity_labels = zip(*[
(y['ner'], y['polarity'], y['aspect'], y['polarity_labels']) for y in Y])
category_labels, category_polarities_labels, category_indices, category_idxs, category_polarity_idxs = zip(*[
(y['category_labels'], y['category_polarities_labels'], y['category_indices'],
y['category_idxs'], y['category_polarity_idxs']) for y in Y])
category_indices = torch.stack(category_indices)
category_idxs = torch.stack(category_idxs)
category_polarity_idxs = torch.stack(category_polarity_idxs)
ner_labels = pad_sequence(ner_labels, batch_first=True, padding_value=padding_value)
polarity_indexes = pad_sequence(polarity_indexes, batch_first=True, padding_value=padding_value)
return {
'indices': X,
'batch': batch,
'mask': mask,
'pos_indices': X_pos_tags,
'lengths': lengths,
'tokens': tokens,
'aspect_indexes': aspect_indexes,
'padding_mask': padding_mask
}, {
'ner': ner_labels,
'polarity': polarity_indexes,
'aspect': aspect_labels,
'polarity_labels': polarity_labels,
'category_labels': category_labels,
'category_polarities_labels': category_polarities_labels,
'category_indices': category_indices,
'category_idxs': category_idxs,
'category_polarity_idxs': category_polarity_idxs
}
def extract_ner_labels(
X: Dict[str, torch.Tensor],
Y: torch.Tensor,
ner_vocab: Vocab
) -> List[List[str]]:
outputs = []
for k, (y, l, tokens) in enumerate(zip(Y, X['lengths'], X['tokens'])):
span_tokens, tmp_tokens = [], []
for j, label in enumerate(y[:l]):
if ner_vocab['B'] == label or ner_vocab['I'] == label:
tmp_tokens.append(tokens[j].text)
elif len(tmp_tokens) > 0:
span_tokens.append(' '.join(tmp_tokens))
tmp_tokens = []
outputs.append(span_tokens)
return outputs
def log_n_samples(
predicted_labels: List[List[str]],
gold_labels: List[List[str]],
n: int = 10
) -> None:
''' Debugging function to log model predictions. '''
assert len(predicted_labels) == len(gold_labels) and len(predicted_labels) >= n
for gold, pred in sample(list(zip(gold_labels, predicted_labels)), n):
print(f'Gold: {gold}, predicted {pred}')
print('\n======================\n')
def download_nltk_resources() -> None:
''' Downloader function for NLTK resources. '''
success = True
success &= nltk.download('wordnet')
success &= nltk.download('punkt')
success &= nltk.download('averaged_perceptron_tagger')
success &= nltk.download('universal_tagset')
if not success:
raise Exception('Some of the needed resources cannot be downloaded, please try again.')
def wordpiece_mask(
sent: List[List[str]],
tokenizer: PreTrainedTokenizerFast,
add_special_tokens: bool = True,
pad_max_length: Optional[int] = None
) -> torch.Tensor:
''' Utility function used to apply wordpiece-level tokenization to a list of tokens. '''
# Get wordpiece embeddings for each token in the input sentence
encoded_span = tokenizer(
sent, add_special_tokens=False, return_tensors='pt',
padding=True, truncation=True
)
# Compute the mask to identify tokens that are from the same input token
mask = compute_scatter_mask(encoded_span.input_ids, add_special_tokens)
if pad_max_length and len(mask) > 1:
mask = mask[:-1]
n = pad_max_length + 2 if add_special_tokens else pad_max_length
if n - len(mask) > 0:
padding_mask = torch.arange(n - len(mask)) + mask[-1] + 1
mask = torch.cat([mask, padding_mask])
return mask
def merge_layers(
x: Tuple[torch.Tensor], # tuple of n layers, usually n=12 with BERT
layers_to_merge: List[int] = [-1, -2, -3, -4],
strategy: str = 'cat'
) -> torch.Tensor:
''' Applies a pooling strategy to the input layers. '''
if strategy == 'cat':
return torch.cat([x[l] for l in layers_to_merge], dim=-1)
elif strategy == 'sum':
return sum([x[l] for l in layers_to_merge])
elif strategy == 'mean':
raise NotImplementedError('TODO: missing implementation of the mean strategy.')
else:
raise NotImplementedError('Use `cat` or `sum` as strategy.')
def pl_trainer(
monitor: str = LOGGER_VALID_LOSS,
mode: str = 'min',
dirpath: str = TRAINER_DIRPATH,
max_epochs: int = 50,
log_every_n_steps: int = 5,
deterministic: bool = True,
use_cuda: bool = True,
precision: int = 32
) -> Trainer:
''' Returns a pytorch_lightning trainer object according to the specified params. '''
return Trainer(
callbacks=[ModelCheckpoint(monitor=monitor, mode=mode, dirpath=dirpath)],
logger=WandbLogger(),
gpus=gpus(use_cuda=use_cuda),
max_epochs=max_epochs,
deterministic=deterministic,
log_every_n_steps=log_every_n_steps,
precision=precision
)
def safe_index(
arr: List[Any],
obj: Any,
k: int = 0,
fallback_fn: Callable[..., int] = len
) -> int:
'''
Safer implementation of the List `index()` function that allows a fallback function
to be called in case the object is not in the array. Its safety now relies on
the safety of the fallback_fn function, when provided.
:param arr: the input array
:param obj: the object to search for
:param k: the offset from which starting to search for
:param fallback_fn: the fallback function used in case of fails
:return int
'''
return k + arr[k:].index(obj) if obj in arr[k:] else fallback_fn(arr)
def safe_indices(
arr: List[Any],
objs: List[Any],
k: int = 0,
fallback_fn: Callable[..., int] = len
) -> int:
'''
Allows the function `safe_index` to be called on every objs, returning the first
index found by the function.
:param arr: the input array
:param objs: the objects to search for
:param k: the offset from which starting to search for
:param fallback_fn: the fallback function used in case of fails
:return int
'''
return min([safe_index(arr, obj, k, fallback_fn) for obj in objs])
def extract_aspect_indices(
indices: Union[torch.Tensor, List[str], List[int]],
length: List[int],
b_tokens: List[Union[str, int]],
i_tokens: List[Union[str, int]],
o_token: Union[str, int],
enforce_bio_schema: bool = True,
return_tensors: bool = False
) -> Union[List[List[int]], List[torch.Tensor]]:
'''
Extracts indexes and predicted labels for aspect terms in the input sentence.
:param indices: the list of BIO tags that denote the presence of NER entities
:param b_tokens: the begin tokens (either the token itself or its identifier)
:param i_tokens: the inside tokens (either the token itself or its identifier)
:param o_token: the outside token (either the token itself or its identifier)
:param enforce_bio_schema: whether the BIO schema should apply to the input indices (e.g. predictions)
:param return_tensors: whether to return the output as pt tensor or list
:return a list of lists (or tensors) specifying the position of NER entities
'''
assert len(indices) >= 1
# Match function signature
if isinstance(indices, torch.Tensor):
indices = indices.tolist()
if isinstance(b_tokens, int):
b_tokens = [b_tokens]
if isinstance(i_tokens, int):
i_tokens = [i_tokens]
if isinstance(length, torch.Tensor):
length = length.item()
indices = indices[:length]
if enforce_bio_schema:
new_indices = [indices[0] if indices[0] in b_tokens or indices[0] == o_token else o_token]
last_idx = new_indices[0]
for idx in indices[1:]:
if idx in i_tokens and not (last_idx in b_tokens or last_idx in i_tokens):
last_idx = o_token
else:
last_idx = idx
new_indices.append(last_idx)
indices = new_indices
aspect_indexes = [[k] + [u for u in range(k+1, safe_indices(indices, b_tokens, k+1))
if indices[u] in i_tokens]
for k,idx in enumerate(indices)
if idx in b_tokens]
aspect_labels = [[indices[idx] for idx in idxs] for idxs in aspect_indexes]
if return_tensors:
aspect_indexes = [torch.tensor(aspect_idx) for aspect_idx in aspect_indexes]
return aspect_indexes, aspect_labels
def vocab_tokens_startswith(vocab: Vocab, pattern: str):
''' Utility function used to lookup for indices starting with `pattern`. '''
return vocab.lookup_indices([t for t in vocab.get_itos() if t.startswith(pattern)])
def build_train_vocab(
train_ds: Dataset,
min_freq: int = 1,
pad_token: str = PAD_TOKEN
) -> Vocab:
''' Returns the vocabulary computed on the train dataset. '''
return build_vocab(
(getattr(t, train_ds.token_field) for sent in train_ds for s in sent.spans for t in s.tokens),
specials=[pad_token],
min_freq=min_freq
)
def build_target_vocabs(
specials: List[str] = [NONE_TOKEN],
pad_token: str = PAD_TOKEN
) -> Dict[str, Vocab]:
''' Builds output vocabularies for each subtask. '''
polarity_vocab = build_vocab(POLARITY_TAGS, specials=[pad_token]+specials)
ner_vocab = build_vocab(BIO_TAGS, specials=[pad_token])
pos_vocab = build_vocab(POS_TAGS, specials=[pad_token])
category_vocab = build_vocab(CATEGORY_TAGS, specials=[])
ner_vocab_ext = build_vocab(
[f'{b}_{p}' for b in ['B', 'I'] for p in POLARITY_TAGS if str(p) != NONE_TOKEN] + ['O'],
specials=[pad_token])
category_vocab_ext = build_vocab(
[f'{c}_{p}' for c in CATEGORY_TAGS for p in POLARITY_TAGS if str(p) != NONE_TOKEN],
specials=[])
return {
'ner': ner_vocab,
'ner_ext': ner_vocab_ext,
'polarity': polarity_vocab,
'pos': pos_vocab,
'category': category_vocab,
'category_ext': category_vocab_ext
}
def get_bert_path(hparams: HParams) -> str:
''' Prevents downloading BERT weights if already available. '''
return (hparams.cached_bert_path
if os.path.exists(hparams.cached_bert_path)
else hparams.bert_name)
def load_hparams_dict(path: str, strict: bool = False, **kwargs) -> HParams:
''' Retrieves multiple hyperparams from file, conveniently packed into an utility function. '''
hparams_dict = {k: HParams(**v, **kwargs) if isinstance(v, dict) else v for k,v in torch.load(path).items()}
hparams_dict['strict'] = strict
return hparams_dict
def load_hparams(path: str, **kwargs) -> HParams:
''' Retrieves the hyperparams dict from file. '''
return HParams(**torch.load(path), **kwargs)
def load_tokenizer(hparams: HParams) -> BertTokenizer:
return BertTokenizer.from_pretrained(get_bert_path(hparams))
def aggregate_polarities(
polarity_indexes: List[int],
polarity_vocab: Vocab,
strategy: str = 'first'
) -> str:
''' Aggregates polarity predictions for MWE. '''
polarities = polarity_vocab.lookup_tokens(polarity_indexes)
if len(polarities) == 0: return polarities
if strategy == 'first':
polarity = polarities[0]
elif strategy == 'frequent':
polarity, _ = Counter(polarities).most_common(1)[0]
return polarity if polarity == 'O' else polarity[2:]
def safe_itemgetter(*items, fallback_value: Optional = None):
'''
Implementation of safe itemgetter from operator that returns elements from
a collection of *valid* keys. Whenever a given key is not present in obj, it
is simply ignored instead of throwing KeyError.
'''
if len(items) == 1:
item = items[0]
def g(obj):
return obj[item]
else:
def g(obj):
return tuple(obj[item] if item in obj else fallback_value for item in items)
return g
def pad_sentence(
spans: List[Span],
pad_max_length: Optional[int],
pad_token: str,
trailing: bool = True,
truncation: bool = True
) -> List[Span]:
''' Pad the input sentence adding `None` tokens to reach pad_max_length tokens. '''
if pad_max_length:
n_tokens = len([t for s in spans for t in s.tokens])
pad_tokens = [Token(pad_token, pad_token, 'X')] * (pad_max_length - n_tokens)
pad_text = ' '.join([t.text for t in pad_tokens])
if len(spans) and spans[-1].polarity == NONE_TOKEN:
span_text = spans[-1].text + ' ' + pad_text if trailing else pad_text + ' ' + spans[-1].text
span_tokens = spans[-1].tokens + pad_tokens if trailing else pad_tokens + spans[-1].tokens
spans[-1] = Span(span_text, span_tokens, spans[-1].aspect, spans[-1].polarity)
else:
spans.append(Span(pad_text, pad_tokens, NONE_TOKEN, NONE_TOKEN))
if truncation:
new_spans = []
cnt = 0
for span in spans:
new_tokens = []
for token in span.tokens:
if cnt >= pad_max_length:
continue
cnt += 1
new_tokens.append(token)
new_text = ' '.join([t.text for t in new_tokens])
if len(new_tokens) > 0:
new_spans.append(Span(new_text, new_tokens, span.aspect, span.polarity))
spans = new_spans
if cnt > pad_max_length:
print(f'Sequence of length {cnt} was truncated to maximum length {pad_max_length}.')
return spans
def compute_class_weight(
train_ds: Dataset,
aspects_vocab: Vocab,
categories_vocab: Vocab
) -> Tuple[torch.Tensor, torch.Tensor]:
aspect_polarities = dict(Counter([p for _, s in train_ds for p in s['polarity_labels']]))
category_polarities = dict(Counter([p for _, s in train_ds for p in s['category_polarities_labels']]))
total_aspects = sum(aspect_polarities.values())
total_categories = sum(category_polarities.values())
ner_symbols = aspects_vocab.get_itos()
aspect_weights = {f'{t}_{k}':total_aspects/v for k,v in aspect_polarities.items() for t in ['B', 'I']}
aspect_weights['O'] = min(aspect_weights.values())
aspect_weights[PAD_TOKEN] = PADDING_INDEX
polarity_simbols = categories_vocab.get_itos()
category_weights = {k:total_categories/v for k,v in category_polarities.items()}
category_weights[NONE_TOKEN] = min(category_weights.values())
category_weights[PAD_TOKEN] = PADDING_INDEX
aspect_weights = torch.tensor(itemgetter(*ner_symbols)(aspect_weights), dtype=torch.float)
category_weights = torch.tensor(itemgetter(*polarity_simbols)(category_weights), dtype=torch.float)
return aspect_weights, category_weights
def _mask_duplicated(
grouped: List[Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]],
pad_value: int = PADDING_INDEX,
ignore_value: int = 9
) -> List[Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]]:
''' Applies masking to duplicated sample to avoid inconsistent samples. '''
first = grouped[0][1]['ner']
# Stack grouped samples and check which elements differ using an arithmetic trick
stacked = torch.stack([y['ner'] for _, y in grouped])
mask = torch.tensor([sum(stacked[:, k]) != t*stacked.shape[0] for k,t in enumerate(first)])
# Store valid values **before** masking
prev_mask = stacked != ignore_value
prev_values = stacked[prev_mask]
# Apply padding and restore previous valid values
stacked[:, mask] = pad_value
stacked[prev_mask] = prev_values
# Save masked ner indices as ground truth
for k in range(len(grouped)): grouped[k][1]['ner'] = stacked[k]
return grouped
def mask_duplicated(
samples: List[Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]]
) -> List[Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]]:
# Group duplicated to see the differences among them
grouped_by_id = []
for x, y in samples:
if x['id'] > len(grouped_by_id) - 1:
grouped_by_id.append([])
grouped_by_id[-1].append((x,y))
masked_samples = [masked_sample for grouped in grouped_by_id for masked_sample in _mask_duplicated(grouped)]
return masked_samples
| 39.291217
| 113
| 0.659765
|
99868c002776d3d8b8d89146a048b98642af3f02
| 3,269
|
py
|
Python
|
sigmou/cogs/info.py
|
Sigmanificient/30d_challenge_discord_bot
|
f308c2a8834c57ae9363557732131af3e4041ad0
|
[
"MIT"
] | 1
|
2021-11-08T11:17:52.000Z
|
2021-11-08T11:17:52.000Z
|
sigmou/cogs/info.py
|
Sigmanificient/30d_challenge_discord_bot
|
f308c2a8834c57ae9363557732131af3e4041ad0
|
[
"MIT"
] | 7
|
2021-11-11T20:23:50.000Z
|
2022-03-03T20:23:19.000Z
|
sigmou/cogs/info.py
|
Sigmanificient/Sigmou-Bot
|
f308c2a8834c57ae9363557732131af3e4041ad0
|
[
"MIT"
] | 1
|
2021-06-21T13:00:35.000Z
|
2021-06-21T13:00:35.000Z
|
from os import listdir
from typing import Dict, Tuple
import psutil
from pincer import command
from pincer.objects import Embed
from sigmou.bot import Bot
from sigmou.constants import TEST_GUILD_ID
class InfoCog:
"""A simple commands cog template."""
def __init__(self, client: Bot):
"""Link to bot instance."""
self.client: Bot = client
# Preloading file content
self.files_info: Dict[str, str] = {}
folders: Tuple[str, ...] = (".", "sigmou", "sigmou/cogs")
for file, path in {
_f: path for path in folders
for _f in listdir(path) if _f.endswith(".py")
}.items():
with open(f"{path}/{file}", encoding="utf-8") as f:
self.files_info[file] = f.read()
self.files_info['Total'] = "\n".join(self.files_info.values())
@command(name="code_stats", guild=TEST_GUILD_ID)
async def code_command(self) -> Embed:
return Embed(
title="Code Structure",
description=(
"> This is the whole code structure of "
f"{self.client.bot.username}!"
)
).add_fields(
self.files_info,
map_title=lambda name: (
f"📝 {name}" if name != "Total" else "📊 Total"
),
map_values=lambda file: (
f"`{len(file)}` characters"
f"\n `{len(file.splitlines())}` lines"
),
)
@command(
name="panel",
guild=TEST_GUILD_ID
)
async def panel_stats(self) -> Embed:
mb: int = 1024 ** 2
vm = psutil.virtual_memory()
cpu_freq = psutil.cpu_freq()
cpu_percent = psutil.cpu_percent()
disk = psutil.disk_usage('.')
stats = {
'ram': (
100 * (vm.used / vm.total),
f'{(vm.total / mb) / 1000:,.3f}',
'Gb'
),
'cpu': (
cpu_percent,
f"{cpu_freq.current / 1000:.1f}`/`{cpu_freq.max / 1000:.1f}",
'Ghz'
),
'disk': (
100 * (disk.used / disk.total),
f'{disk.total / mb:,.0f}', 'Mb'
)
}
return Embed(
title="Server Report",
description="The bot is hosted on a private vps."
).add_fields(
stats.items(),
map_title=lambda name: name.upper(),
map_values=lambda percent, info, unit: (
f"> `{percent:.3f}` **%**\n- `{info}` **{unit}**"
)
)
@command(
name="invite",
guild=TEST_GUILD_ID
# aliases=("inv", "i"),
# brief="A link to invite the bot"
)
async def invite(self) -> Embed:
"""Command to get bot invitation link."""
return Embed(
title="Invite the Bot !",
description=(
"> Click this link to invite this bot on your servers !\n"
"You need to have the required permissions on the server.\n"
"[invite me now](https://discord.com/api/oauth2/authorize"
f"?client_id={self.client.bot}&permissions=8&scope=bot)"
)
)
setup = InfoCog
| 29.1875
| 77
| 0.494341
|
614d0c23171100b73c4ee8f729ae8a21248ef7ae
| 1,206
|
py
|
Python
|
game/common/player.py
|
HagenSR/byte_le_engine
|
5895d8559627594dd3c222b4c4754caad2817841
|
[
"MIT"
] | 1
|
2020-04-05T05:06:32.000Z
|
2020-04-05T05:06:32.000Z
|
game/common/player.py
|
HagenSR/byte_le_engine
|
5895d8559627594dd3c222b4c4754caad2817841
|
[
"MIT"
] | 2
|
2021-01-27T03:54:41.000Z
|
2021-05-17T05:11:16.000Z
|
game/common/player.py
|
HagenSR/byte_le_engine
|
5895d8559627594dd3c222b4c4754caad2817841
|
[
"MIT"
] | 5
|
2021-01-27T02:43:27.000Z
|
2022-03-04T04:34:57.000Z
|
import uuid
from game.common.action import Action
from game.common.game_object import GameObject
from game.common.enums import *
class Player(GameObject):
def __init__(self, code=None, team_name=None, action=None):
super().__init__()
self.object_type = ObjectType.player
self.functional = True
self.error = None
self.team_name = team_name
self.code = code
self.action = action
def to_json(self):
data = super().to_json()
data['functional'] = self.functional
data['error'] = self.error
data['team_name'] = self.team_name
data['action'] = self.action.to_json() if self.action is not None else None
return data
def from_json(self, data):
super().from_json(data)
self.functional = data['functional']
self.error = data['error']
self.team_name = data['team_name']
act = Action()
self.action = act.from_json(data['action']) if data['action'] is not None else None
def __str__(self):
p = f"""ID: {self.id}
Team name: {self.team_name}
Action: {self.action}
"""
return p
| 28.046512
| 91
| 0.593698
|
564bc3f028abac92637760c65792d12b6b49f959
| 17,061
|
py
|
Python
|
tools/replay/unlogger.py
|
eyx092/openpilot
|
ebc294296aa17a5aabeb9d1bb6f9cc38970173ba
|
[
"MIT"
] | 3
|
2021-06-14T22:23:03.000Z
|
2021-09-18T08:08:50.000Z
|
tools/replay/unlogger.py
|
eyx092/openpilot
|
ebc294296aa17a5aabeb9d1bb6f9cc38970173ba
|
[
"MIT"
] | null | null | null |
tools/replay/unlogger.py
|
eyx092/openpilot
|
ebc294296aa17a5aabeb9d1bb6f9cc38970173ba
|
[
"MIT"
] | 1
|
2021-06-15T22:52:29.000Z
|
2021-06-15T22:52:29.000Z
|
#!/usr/bin/env python3
import argparse
import os
import sys
import zmq
import time
import signal
import multiprocessing
from uuid import uuid4
from collections import namedtuple
from collections import deque
from datetime import datetime
from cereal import log as capnp_log
from cereal.services import service_list
from cereal.messaging import pub_sock, MultiplePublishersError
from cereal.visionipc.visionipc_pyx import VisionIpcServer, VisionStreamType # pylint: disable=no-name-in-module, import-error
from common import realtime
from common.transformations.camera import eon_f_frame_size, tici_f_frame_size
from tools.lib.kbhit import KBHit
from tools.lib.logreader import MultiLogIterator
from tools.lib.route import Route
from tools.lib.route_framereader import RouteFrameReader
# Commands.
SetRoute = namedtuple("SetRoute", ("name", "start_time", "data_dir"))
SeekAbsoluteTime = namedtuple("SeekAbsoluteTime", ("secs",))
SeekRelativeTime = namedtuple("SeekRelativeTime", ("secs",))
TogglePause = namedtuple("TogglePause", ())
StopAndQuit = namedtuple("StopAndQuit", ())
VIPC_RGB = "rgb"
VIPC_YUV = "yuv"
class UnloggerWorker(object):
def __init__(self):
self._frame_reader = None
self._cookie = None
self._readahead = deque()
def run(self, commands_address, data_address, pub_types):
zmq.Context._instance = None
commands_socket = zmq.Context.instance().socket(zmq.PULL)
commands_socket.connect(commands_address)
data_socket = zmq.Context.instance().socket(zmq.PUSH)
data_socket.connect(data_address)
poller = zmq.Poller()
poller.register(commands_socket, zmq.POLLIN)
# We can't publish frames without roadEncodeIdx, so add when it's missing.
if "roadCameraState" in pub_types:
pub_types["roadEncodeIdx"] = None
# gc.set_debug(gc.DEBUG_LEAK | gc.DEBUG_OBJECTS | gc.DEBUG_STATS | gc.DEBUG_SAVEALL |
# gc.DEBUG_UNCOLLECTABLE)
# TODO: WARNING pycapnp leaks memory all over the place after unlogger runs for a while, gc
# pauses become huge because there are so many tracked objects solution will be to switch to new
# cython capnp
try:
route = None
while True:
while poller.poll(0.) or route is None:
cookie, cmd = commands_socket.recv_pyobj()
route = self._process_commands(cmd, route, pub_types)
# **** get message ****
self._read_logs(cookie, pub_types)
self._send_logs(data_socket)
finally:
if self._frame_reader is not None:
self._frame_reader.close()
data_socket.close()
commands_socket.close()
def _read_logs(self, cookie, pub_types):
fullHEVC = capnp_log.EncodeIndex.Type.fullHEVC
lr = self._lr
while len(self._readahead) < 1000:
route_time = lr.tell()
msg = next(lr)
typ = msg.which()
if typ not in pub_types:
continue
# **** special case certain message types ****
if typ == "roadEncodeIdx" and msg.roadEncodeIdx.type == fullHEVC:
# this assumes the roadEncodeIdx always comes before the frame
self._frame_id_lookup[
msg.roadEncodeIdx.frameId] = msg.roadEncodeIdx.segmentNum, msg.roadEncodeIdx.segmentId
#print "encode", msg.roadEncodeIdx.frameId, len(self._readahead), route_time
self._readahead.appendleft((typ, msg, route_time, cookie))
def _send_logs(self, data_socket):
while len(self._readahead) > 500:
typ, msg, route_time, cookie = self._readahead.pop()
smsg = msg.as_builder()
if typ == "roadCameraState":
frame_id = msg.roadCameraState.frameId
# Frame exists, make sure we have a framereader.
# load the frame readers as needed
s1 = time.time()
try:
img = self._frame_reader.get(frame_id, pix_fmt="rgb24")
except Exception:
img = None
fr_time = time.time() - s1
if fr_time > 0.05:
print("FRAME(%d) LAG -- %.2f ms" % (frame_id, fr_time*1000.0))
if img is not None:
img = img[:, :, ::-1] # Convert RGB to BGR, which is what the camera outputs
img = img.flatten()
bts = img.tobytes()
smsg.roadCameraState.image = bts
extra = (smsg.roadCameraState.frameId, smsg.roadCameraState.timestampSof, smsg.roadCameraState.timestampEof)
data_socket.send_pyobj((cookie, VIPC_RGB, msg.logMonoTime, route_time, extra), flags=zmq.SNDMORE)
data_socket.send(bts, copy=False)
img_yuv = self._frame_reader.get(frame_id, pix_fmt="yuv420p")
if img_yuv is not None:
data_socket.send_pyobj((cookie, VIPC_YUV, msg.logMonoTime, route_time, extra), flags=zmq.SNDMORE)
data_socket.send(img_yuv.flatten().tobytes(), copy=False)
data_socket.send_pyobj((cookie, typ, msg.logMonoTime, route_time), flags=zmq.SNDMORE)
data_socket.send(smsg.to_bytes(), copy=False)
def _process_commands(self, cmd, route, pub_types):
seek_to = None
if route is None or (isinstance(cmd, SetRoute) and route.name != cmd.name):
seek_to = cmd.start_time
route = Route(cmd.name, cmd.data_dir)
self._lr = MultiLogIterator(route.log_paths(), wraparound=True)
if self._frame_reader is not None:
self._frame_reader.close()
if "roadCameraState" in pub_types or "roadEncodeIdx" in pub_types:
# reset frames for a route
self._frame_id_lookup = {}
self._frame_reader = RouteFrameReader(
route.camera_paths(), None, self._frame_id_lookup, readahead=True)
# always reset this on a seek
if isinstance(cmd, SeekRelativeTime):
seek_to = self._lr.tell() + cmd.secs
elif isinstance(cmd, SeekAbsoluteTime):
seek_to = cmd.secs
elif isinstance(cmd, StopAndQuit):
exit()
if seek_to is not None:
print("seeking", seek_to)
if not self._lr.seek(seek_to):
print("Can't seek: time out of bounds")
else:
next(self._lr) # ignore one
return route
def _get_address_send_func(address):
sock = pub_sock(address)
return sock.send
def _get_vipc_server(length):
w, h = {3 * w * h: (w, h) for (w, h) in [tici_f_frame_size, eon_f_frame_size]}[length]
vipc_server = VisionIpcServer("camerad")
vipc_server.create_buffers(VisionStreamType.VISION_STREAM_RGB_BACK, 4, True, w, h)
vipc_server.create_buffers(VisionStreamType.VISION_STREAM_YUV_BACK, 40, False, w, h)
vipc_server.start_listener()
return vipc_server
def unlogger_thread(command_address, forward_commands_address, data_address, run_realtime,
address_mapping, publish_time_length, bind_early, no_loop, no_visionipc):
# Clear context to avoid problems with multiprocessing.
zmq.Context._instance = None
context = zmq.Context.instance()
command_sock = context.socket(zmq.PULL)
command_sock.bind(command_address)
forward_commands_socket = context.socket(zmq.PUSH)
forward_commands_socket.bind(forward_commands_address)
data_socket = context.socket(zmq.PULL)
data_socket.bind(data_address)
# Set readahead to a reasonable number.
data_socket.setsockopt(zmq.RCVHWM, 10000)
poller = zmq.Poller()
poller.register(command_sock, zmq.POLLIN)
poller.register(data_socket, zmq.POLLIN)
if bind_early:
send_funcs = {
typ: _get_address_send_func(address)
for typ, address in address_mapping.items()
}
# Give subscribers a chance to connect.
time.sleep(0.1)
else:
send_funcs = {}
start_time = float("inf")
printed_at = 0
generation = 0
paused = False
reset_time = True
prev_msg_time = None
vipc_server = None
while True:
evts = dict(poller.poll())
if command_sock in evts:
cmd = command_sock.recv_pyobj()
if isinstance(cmd, TogglePause):
paused = not paused
if paused:
poller.modify(data_socket, 0)
else:
poller.modify(data_socket, zmq.POLLIN)
else:
# Forward the command the the log data thread.
# TODO: Remove everything on data_socket.
generation += 1
forward_commands_socket.send_pyobj((generation, cmd))
if isinstance(cmd, StopAndQuit):
return
reset_time = True
elif data_socket in evts:
msg_generation, typ, msg_time, route_time, *extra = data_socket.recv_pyobj(flags=zmq.RCVMORE)
msg_bytes = data_socket.recv()
if msg_generation < generation:
# Skip packets.
continue
if no_loop and prev_msg_time is not None and prev_msg_time > msg_time + 1e9:
generation += 1
forward_commands_socket.send_pyobj((generation, StopAndQuit()))
return
prev_msg_time = msg_time
msg_time_seconds = msg_time * 1e-9
if reset_time:
msg_start_time = msg_time_seconds
real_start_time = realtime.sec_since_boot()
start_time = min(start_time, msg_start_time)
reset_time = False
if publish_time_length and msg_time_seconds - start_time > publish_time_length:
generation += 1
forward_commands_socket.send_pyobj((generation, StopAndQuit()))
return
# Print time.
if abs(printed_at - route_time) > 5.:
print("at", route_time)
printed_at = route_time
if typ not in send_funcs and typ not in [VIPC_RGB, VIPC_YUV]:
if typ in address_mapping:
# Remove so we don't keep printing warnings.
address = address_mapping.pop(typ)
try:
print("binding", typ)
send_funcs[typ] = _get_address_send_func(address)
except Exception as e:
print("couldn't replay {}: {}".format(typ, e))
continue
else:
# Skip messages that we are not registered to publish.
continue
# Sleep as needed for real time playback.
if run_realtime:
msg_time_offset = msg_time_seconds - msg_start_time
real_time_offset = realtime.sec_since_boot() - real_start_time
lag = msg_time_offset - real_time_offset
if lag > 0 and lag < 30: # a large jump is OK, likely due to an out of order segment
if lag > 1:
print("sleeping for", lag)
time.sleep(lag)
elif lag < -1:
# Relax the real time schedule when we slip far behind.
reset_time = True
# Send message.
try:
if typ in [VIPC_RGB, VIPC_YUV]:
if not no_visionipc:
if vipc_server is None:
vipc_server = _get_vipc_server(len(msg_bytes))
i, sof, eof = extra[0]
stream = VisionStreamType.VISION_STREAM_RGB_BACK if typ == VIPC_RGB else VisionStreamType.VISION_STREAM_YUV_BACK
vipc_server.send(stream, msg_bytes, i, sof, eof)
else:
send_funcs[typ](msg_bytes)
except MultiplePublishersError:
del send_funcs[typ]
def timestamp_to_s(tss):
return time.mktime(datetime.strptime(tss, '%Y-%m-%d--%H-%M-%S').timetuple())
def absolute_time_str(s, start_time):
try:
# first try if it's a float
return float(s)
except ValueError:
# now see if it's a timestamp
return timestamp_to_s(s) - start_time
def _get_address_mapping(args):
if args.min is not None:
services_to_mock = [
'deviceState', 'can', 'pandaState', 'sensorEvents', 'gpsNMEA', 'roadCameraState', 'roadEncodeIdx',
'modelV2', 'liveLocation',
]
elif args.enabled is not None:
services_to_mock = args.enabled
else:
services_to_mock = service_list.keys()
address_mapping = {service_name: service_name for service_name in services_to_mock}
address_mapping.update(dict(args.address_mapping))
for k in args.disabled:
address_mapping.pop(k, None)
non_services = set(address_mapping) - set(service_list)
if non_services:
print("WARNING: Unknown services {}".format(list(non_services)))
return address_mapping
def keyboard_controller_thread(q, route_start_time):
print("keyboard waiting for input")
kb = KBHit()
while 1:
c = kb.getch()
if c == 'm': # Move forward by 1m
q.send_pyobj(SeekRelativeTime(60))
elif c == 'M': # Move backward by 1m
q.send_pyobj(SeekRelativeTime(-60))
elif c == 's': # Move forward by 10s
q.send_pyobj(SeekRelativeTime(10))
elif c == 'S': # Move backward by 10s
q.send_pyobj(SeekRelativeTime(-10))
elif c == 'G': # Move backward by 10s
q.send_pyobj(SeekAbsoluteTime(0.))
elif c == "\x20": # Space bar.
q.send_pyobj(TogglePause())
elif c == "\n":
try:
seek_time_input = input('time: ')
seek_time = absolute_time_str(seek_time_input, route_start_time)
# If less than 60, assume segment number
if seek_time < 60:
seek_time *= 60
q.send_pyobj(SeekAbsoluteTime(seek_time))
except Exception as e:
print("Time not understood: {}".format(e))
def get_arg_parser():
parser = argparse.ArgumentParser(
description="Mock openpilot components by publishing logged messages.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("route_name", type=(lambda x: x.replace("#", "|")), nargs="?",
help="The route whose messages will be published.")
parser.add_argument("data_dir", nargs='?', default=os.getenv('UNLOGGER_DATA_DIR'),
help="Path to directory in which log and camera files are located.")
parser.add_argument("--no-loop", action="store_true", help="Stop at the end of the replay.")
def key_value_pair(x):
return x.split("=")
parser.add_argument("address_mapping", nargs="*", type=key_value_pair,
help="Pairs <service>=<zmq_addr> to publish <service> on <zmq_addr>.")
def comma_list(x):
return x.split(",")
to_mock_group = parser.add_mutually_exclusive_group()
to_mock_group.add_argument("--min", action="store_true", default=os.getenv("MIN"))
to_mock_group.add_argument("--enabled", default=os.getenv("ENABLED"), type=comma_list)
parser.add_argument("--disabled", type=comma_list, default=os.getenv("DISABLED") or ())
parser.add_argument(
"--tl", dest="publish_time_length", type=float, default=None,
help="Length of interval in event time for which messages should be published.")
parser.add_argument(
"--no-realtime", dest="realtime", action="store_false", default=True,
help="Publish messages as quickly as possible instead of realtime.")
parser.add_argument(
"--no-interactive", dest="interactive", action="store_false", default=True,
help="Disable interactivity.")
parser.add_argument(
"--bind-early", action="store_true", default=False,
help="Bind early to avoid dropping messages.")
parser.add_argument(
"--no-visionipc", action="store_true", default=False,
help="Do not output video over visionipc")
parser.add_argument(
"--start-time", type=float, default=0.,
help="Seek to this absolute time (in seconds) upon starting playback.")
return parser
def main(argv):
args = get_arg_parser().parse_args(sys.argv[1:])
command_address = "ipc:///tmp/{}".format(uuid4())
forward_commands_address = "ipc:///tmp/{}".format(uuid4())
data_address = "ipc:///tmp/{}".format(uuid4())
address_mapping = _get_address_mapping(args)
command_sock = zmq.Context.instance().socket(zmq.PUSH)
command_sock.connect(command_address)
if args.route_name is not None:
route_name_split = args.route_name.split("|")
if len(route_name_split) > 1:
route_start_time = timestamp_to_s(route_name_split[1])
else:
route_start_time = 0
command_sock.send_pyobj(
SetRoute(args.route_name, args.start_time, args.data_dir))
else:
print("waiting for external command...")
route_start_time = 0
subprocesses = {}
try:
subprocesses["data"] = multiprocessing.Process(
target=UnloggerWorker().run,
args=(forward_commands_address, data_address, address_mapping.copy()))
subprocesses["control"] = multiprocessing.Process(
target=unlogger_thread,
args=(command_address, forward_commands_address, data_address, args.realtime,
_get_address_mapping(args), args.publish_time_length, args.bind_early, args.no_loop, args.no_visionipc))
subprocesses["data"].start()
subprocesses["control"].start()
# Exit if any of the children die.
def exit_if_children_dead(*_):
for _, p in subprocesses.items():
if not p.is_alive():
[p.terminate() for p in subprocesses.values()]
exit()
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
signal.signal(signal.SIGCHLD, exit_if_children_dead)
if args.interactive:
keyboard_controller_thread(command_sock, route_start_time)
else:
# Wait forever for children.
while True:
time.sleep(10000.)
finally:
for p in subprocesses.values():
if p.is_alive():
try:
p.join(3.)
except multiprocessing.TimeoutError:
p.terminate()
continue
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| 34.606491
| 127
| 0.678389
|
6ebab58e355c740b2ecc9d1c5f5101379eb00f9e
| 23,573
|
py
|
Python
|
code-experiments/build/python/python/__init__.py
|
nuaa-codemonkey/coco
|
11ce3ccdf3e19ddbd56b3f9f56dc0829fc47817b
|
[
"BSD-3-Clause"
] | 181
|
2016-01-21T16:42:21.000Z
|
2022-03-15T10:28:39.000Z
|
code-experiments/build/python/python/__init__.py
|
nuaa-codemonkey/coco
|
11ce3ccdf3e19ddbd56b3f9f56dc0829fc47817b
|
[
"BSD-3-Clause"
] | 1,019
|
2016-01-17T15:59:43.000Z
|
2022-03-31T19:51:03.000Z
|
code-experiments/build/python/python/__init__.py
|
nuaa-codemonkey/coco
|
11ce3ccdf3e19ddbd56b3f9f56dc0829fc47817b
|
[
"BSD-3-Clause"
] | 89
|
2016-01-18T09:48:30.000Z
|
2022-03-30T12:12:25.000Z
|
"""Experimentation module of the COCO - COmparing Continuous Optimizers -
framework.
The module provides benchmark test beds in the `Suite` class
and output data facilities in the `Observer` class.
See the documentation of the `Suite` class:
>>> import cocoex as ex
>>> help(ex.Suite) # doctest: +ELLIPSIS
Help on class Suite...
>>> print(ex.known_suite_names) # doctest: +ELLIPSIS
[...
A more complete example use case can be found in the `example_experiment.py`
file.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import solvers, utilities
try:
from . import _interface
from ._interface import Suite as _Suite, Observer as _Observer
from ._interface import known_suite_names, log_level
except Exception as _e:
from . import interface as _interface
# print("numbbo/code-experiments/build/python/python/__init__.py: could not import '_interface', trying 'interface'", _e)
from .interface import Suite as _Suite, Observer as _Observer
from .interface import known_suite_names, log_level
del absolute_import, division, print_function, unicode_literals
# from .utilities import about_equal
# from .exceptions import NoSuchProblemException, InvalidProblemException
import pkg_resources
__version__ = pkg_resources.require('cocoex')[0].version
del pkg_resources
__all__ = ['Observer', 'Suite', 'known_suite_names', 'default_observers']
def default_observers(update=None):
"""return a map from suite names to default observer names.
This function can also be used to update this map using
a `dict` or a `list` of key-value pairs.
"""
# this is a function only to make the doc available and
# because @property doesn't work on module level
_default_observers.update(update or {})
return _default_observers
_default_observers = {
'bbob': 'bbob',
'bbob-biobj': 'bbob-biobj',
'bbob-biobj-ext': 'bbob-biobj',
'bbob-constrained': 'bbob',
'bbob-largescale': 'bbob',
'bbob-mixint': 'bbob',
'bbob-biobj-mixint': 'bbob-biobj',
}
class Suite(_Suite):
"""Suite of benchmark problems.
Input arguments to `Suite` are `name: str`, `instance: str`, `options: str`,
and passed to the respective C code (see `coco.h`).
>>> import cocoex as ex
>>> suite = ex.Suite("bbob", "", "")
>>> f = suite.next_problem()
>>> assert f.number_of_objectives == 1
>>> assert f.evaluations == 0
>>> print("f([1,2]) = %.11f" % f([1,2]))
f([1,2]) = 90.00369408000
>>> assert f.evaluations == 1
Sweeping through all problems is as simple as:
>>> import cocoex as ex
>>> suite = ex.Suite("bbob-biobj", "", "")
>>> observer = ex.Observer("bbob-biobj", "result_folder:doctest")
>>> for fun in suite:
... if fun.index == 0:
... print("Number of objectives %d, %d, %d" %
... (fun.number_of_objectives,
... suite.number_of_objectives[0],
... suite.number_of_objectives[-1]))
... fun.observe_with(observer)
... assert fun.evaluations == 0
... assert fun.number_of_objectives == suite.number_of_objectives[0]
... # run run run using fun # doctest: +ELLIPSIS
Number of objectives 2, 2, 2...
In the example, an observer was added to produce output data for the
COCO post-processing.
The following example runs the entire bbob2009 benchmark suite
on random search:
>>> import numpy as np
>>> from cocoex import Suite, Observer
...
>>> MAX_FE = 22 # max f-evaluations
>>> def random_search(f, lb, ub, m): # don't use m >> 1e5 with this implementation
... candidates = lb + (ub - lb) * np.random.rand(m, len(lb))
... return candidates[np.argmin([f(x) for x in candidates])]
...
>>> solver = random_search
>>> suite = Suite("bbob", "year:2009", "")
>>> observer = Observer("bbob",
... "result_folder: %s_on_%s" % (solver.__name__, "bbob2009"))
>>> for fun in suite:
... assert fun.evaluations == 0
... if fun.dimension >= 10:
... break
... print('Current problem index = %d' % fun.index)
... fun.observe_with(observer)
... assert fun.evaluations == 0
... solver(fun, fun.lower_bounds, fun.upper_bounds, MAX_FE)
... # data should be now in the "exdata/random_search_on_bbob2009" folder
... assert fun.evaluations == MAX_FE # depends on the solver
... # doctest: +ELLIPSIS
Current problem index = 0...
>>> #
>>> # Exactly the same using another looping technique:
>>> for id in suite.ids():
... fun = suite.get_problem(id, observer)
... _ = solver(fun, fun.lower_bounds, fun.upper_bounds, MAX_FE)
... print("Evaluations on %s: %d" % (fun.name, fun.evaluations))
... fun.free() # this is absolutely necessary here
... # doctest: +ELLIPSIS
Evaluations on ...
We can select a single function, say BBOB f9 in 20D, of a given suite like:
>>> import cocoex as ex
>>> suite = ex.Suite("bbob", "", "dimensions:20 instance_indices:1")
>>> len(suite)
24
>>> f9 = suite.get_problem(8)
>>> x = f9.initial_solution # a copy of a feasible point
>>> all(x == 0)
True
See module attribute `cocoex.known_suite_names` for known suite names:
>>> import cocoex as ex
>>> for suite_name in ex.known_suite_names:
... suite = ex.Suite(suite_name, "", "")
... print(suite.dimensions)
... for f in suite:
... assert f.dimension in suite.dimensions
... assert f.evaluations == 0
... # doctest: +ELLIPSIS
[2, 3, 5, 10, 20, 40]...
See file `example_experiment.py` for a full example use case.
Details: depending on the benchmark suite and observer, only one problem can
be open at a time. Using `get_problem` without `free` or mixing the use of
`next_problem` and `get_problem` may not be possible. For example, in this
case the "bbob" observer is known to lead to a crash of the Python
interpreter.
See also `Observer` and `example_experiment.py`.
"""
def __init__(self, suite_name, suite_instance, suite_options):
"""``suite_instance`` and ``suite_options`` can be empty strings."""
# this __init__ defines the arguments for _Suite.__cinit__,
# which is called implicitly. Calling the super class init fails in Python 3.
# super(Suite, self).__init__(suite_name, suite_instance, suite_options)
# _Suite.__cinit__(self, suite_name, suite_instance, suite_options)
def reset(self):
"""reset to original state, affecting `next_problem()`,
`current_problem`, `current_index`"""
super(Suite, self).reset()
def next_problem(self, observer=None):
"""return the "next" problem in this `Suite`.
return the first problem on the first call or after
`reset` ().
`next_problem` serves to sweep through the `Suite` smoothly.
"""
return super(Suite, self).next_problem(observer)
def get_problem(self, id, observer=None):
"""return a `Problem` instance, by default unobserved, using ``id: str``
or index (where ``id: int``) to identify the desired problem.
All values between zero and `len(self) - 1` are valid index values:
>>> import cocoex as ex
>>> suite = ex.Suite("bbob-biobj", "", "")
>>> for index in range(len(suite)):
... problem = suite.get_problem(index)
... # work work work using problem
... problem.free()
A shortcut for `suite.get_problem(index)` is `suite[index]`, they are
synonym.
Details:
- Here an `index` takes values between 0 and `len(self) - 1` and can in
principle be different from the problem index in the benchmark suite.
- This call does not affect the state of the `current_problem` and
`current_index` attributes.
- For some suites and/or observers, the `free()` method of the problem
must be called before the next call of `get_problem`. Otherwise Python
might just silently die, which is e.g. a known issue of the "bbob"
observer.
See also `ids`.
"""
return super(Suite, self).get_problem(id, observer)
def get_problem_by_function_dimension_instance(self, function, dimension, instance, observer=None):
"""return a `Problem` instance, by default unobserved, using function,
dimension and instance to identify the desired problem.
If a suite contains multiple problems with the same function, dimension
and instance, the first corresponding problem is returned.
>>> import cocoex as ex
>>> suite = ex.Suite("bbob-biobj", "", "")
>>> problem = suite.get_problem_by_function_dimension_instance(1, 2, 3)
>>> # work work work using problem
>>> problem.free()
Details:
- Function, dimension and instance are integer values from 1 on.
- This call does not affect the state of the `current_problem` and
`current_index` attributes.
- For some suites and/or observers, the `free()` method of the problem
must be called before the next call of
`get_problem_by_function_dimension_instance`. Otherwise Python might
just silently die, which is e.g. a known issue of the "bbob" observer.
"""
return super(Suite, self).get_problem_by_function_dimension_instance(
function, dimension, instance, observer)
def __getitem__(self, key):
"""`self[i]` is a synonym for `self.get_problem(i)`, see `get_problem`
"""
return self.get_problem(key)
def free(self):
"""free underlying C structures"""
super(Suite, self).free()
def find_problem_ids(self, *args, **kwargs):
"""has been renamed to `ids`"""
raise NotImplementedError(
"`find_problem_ids()` has been renamed to `ids()`")
def ids(self, *id_snippets, **kwargs): # get_problem=False, verbose=False):
"""`ids(*id_snippets, get_problem=False, verbose=False)`
return all problem IDs that contain all of the `id_snippets`.
An ID can be used for indexing, that is, when calling the method
`get_problem(id)`.
If `get_problem is True`, the problem for the first matching ID is
returned.
>>> import cocoex as ex
>>> s = ex.Suite("bbob", "", "")
>>> s.ids("f001", "d10", "i01")
['bbob_f001_i01_d10']
We can sweep through all instances of the ellipsoidal function f10
in 20-D of the BBOB suite like this:
>>> import cocoex as ex
>>> suite = ex.Suite("bbob", "", "")
>>> ids = suite.ids("f010", "d20")
>>> used_indices = []
>>> for p in suite:
... if p.id in ids:
... # work work work with problem `p`
... used_indices.append(p.index)
>>> print(used_indices)
[1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586, 1587, 1588, 1589]
A desired problem can also be filtered out during creation:
>>> import cocoex as ex
>>> f9 = ex.Suite("bbob", "",
... "function_indices:9 dimensions:20 instance_indices:1-5")[0]
>>> print(f9.id)
bbob_f009_i01_d20
"""
return super(Suite, self).ids(*id_snippets, **kwargs)
@property
def current_problem(self):
"""current "open/active" problem to be benchmarked"""
return super(Suite, self).current_problem
@property
def current_index(self):
"""index in the enumerator of all problems in this suite.
Details: To get the index in the underlying C implementation, which
usually matches `current_index` one-to-one, use:
>>> import cocoex as ex
>>> suite = ex.Suite("bbob", "", "")
>>> suite.current_index is None
True
>>> suite.next_problem().id[-17:].lower()
'bbob_f001_i01_d02'
>>> suite.current_index, suite.indices[suite.current_index]
(0, 0)
"""
return super(Suite, self).current_index
@property
def problem_names(self):
"""list of problem names in this `Suite`, see also `ids`"""
return super(Suite, self).problem_names
@property
def dimensions(self):
"""list of problem dimensions occuring at least once in this `Suite`"""
return super(Suite, self).dimensions
@property
def number_of_objectives(self):
"""list of number of objectives occuring in this `Suite`"""
return super(Suite, self).number_of_objectives
@property
def indices(self):
"""list of all problem indices, deprecated.
These values are (only) used to call the underlying C structures.
Indices used in the Python interface run between 0 and `len(self)`.
"""
return super(Suite, self).indices
@property
def name(self):
"""name of this suite as used to instantiate the suite via `Suite(name, ...)`"""
return super(Suite, self).name
@property
def instance(self):
"""instance of this suite as used to instantiate the suite via
`Suite(name, instance, ...)`"""
return super(Suite, self).instance
@property
def options(self):
"""options for this suite as used to instantiate the suite via
`Suite(name, instance, options)`"""
return super(Suite, self).options
@property
def info(self):
return str(self)
class Observer(_Observer):
"""Observer which can be "attached to" one or several problems, however not
necessarily at the same time.
The typical observer records data to be used in the COCO post-processing
module `cocopp` afterwards.
>>> import cocoex as ex
>>> suite = ex.Suite("bbob", "", "")
>>> assert len(suite) == 2160
>>> f = suite.get_problem(33)
>>> assert f.id.endswith('f003_i04_d02')
>>> observer = ex.Observer("bbob",
... "result_folder: doctest")
>>> f.observe_with(observer) # the same as observer.observe(f) # doctest: +ELLIPSIS
<cocoex...
>>> # work work work with observed f
>>> f.free()
Details
-------
- ``f.free()`` in the above example must be called before to observe
another problem with the "bbob" observer. Otherwise the Python
interpreter will crash due to an error raised from the C code.
- Due to technical sublties between Python/Cython/C, the pointer to the
underlying C observer is passed by global assignment with
`_update_current_observer_global()`
"""
def __init__(self, name, options):
"""``options`` can be a string or a `dict`"""
# this __init__ defines the arguments for _Observer.__cinit__,
# which is called implicitly
# super(Observer, self).__init__(name, options) # fails (only) in Python 3
def observe(self, problem):
"""`observe(problem)` let `self` observe the `problem: Problem` by
calling `problem.observe_with(self)`.
"""
problem.observe_with(self)
return self
@property
def name(self):
"""name of the observer as used with `Observer(name, ...)` to instantiate
`self` before
"""
return super(Observer, self).name
@property
def options(self):
return super(Observer, self).options
@property
def state(self):
return super(Observer, self).state
@property
def result_folder(self):
"""name of the output folder.
This name may not be the same as input option `result_folder`.
"""
return super(Observer, self).result_folder
# this definition is copy-edited from interface, solely to pass docstrings to pydoctor
class Problem(_interface.Problem):
"""`Problem` instances are usually generated using class `Suite`.
The main feature of a problem instance is that it is callable, returning the
objective function value when called with a candidate solution as input.
It provides other useful properties and methods like `dimension`,
`number_of_constraints`, `observe_with`, `initial_solution_proposal`...
"""
def __init__(self):
super(Problem, self).__init__()
def constraint(self, x):
"""return constraint values for `x`.
By convention, constraints with values <= 0 are satisfied.
"""
return super(Problem, self).constraint(x)
def logger_biobj_feed_solution(self, evaluation, y):
"""Feed the given solution to logger_biobj in order to reconstruct its
output.
Return 1 if the given solution updated the archive and 0 otherwise.
Used by preprocessing when updating the .info, .dat and .tdat files
with new indicator reference values.
"""
return super(Problem, self).logger_biobj_feed_solution(evaluation, y)
def add_observer(self, observer):
"""`add_observer(self, observer: Observer)`, see `observe_with`.
"""
return self.observe_with(observer)
def observe_with(self, observer):
"""``observe_with(self, observer: Observer)`` attaches an `Observer`
instance to this problem.
Attaching an observer can be considered as wrapping the observer
around the problem. For the observer to be finalized, the problem
must be free'd (implictly or explicitly).
Return the observed problem `self`.
Details: `observer` can be `None`, in which case nothing is done.
See also: class `Observer`
"""
return super(Problem, self).observe_with(observer)
def _f0(self, x):
""""inofficial" interface to `self` with target f-value of zero. """
return self(x) - self.final_target_fvalue1
def initial_solution_proposal(self, restart_number=None):
"""return feasible initial solution proposals.
For unconstrained problems, the proposal is different for each
consecutive call without argument and for each `restart_number`
and may be different under repeated calls with the same
`restart_number`. ``self.initial_solution_proposal(0)`` is the
same as ``self.initial_solution``.
Conceptual example::
# given: a suite instance, a budget, and fmin
for problem in suite:
# restart until budget is (over-)exhausted
while problem.evaluations < budget and not problem.final_target_hit:
fmin(problem, problem.initial_solution_proposal())
Details: by default, the first proposal is the domain middle or
the (only) known feasible solution.
Subsequent proposals are coordinate-wise sampled as the sum
of two iid random variates uniformly distributed within the
domain boundaries. On the ``'bbob'`` suite their density is
0.2 * (x / 5 + 1) for x in [-5, 0] and
0.2 * (1 - x / 5) for x in [0, 5] and zero otherwise.
"""
return super(Problem, self).initial_solution_proposal(restart_number)
@property
def initial_solution(self):
"""return feasible initial solution"""
return super(Problem, self).initial_solution()
@property
def observers(self):
"""list of observers wrapped around this problem"""
return super(Problem, self).list_of_observers
@property
def is_observed(self):
"""problem ``p`` is observed ``p.is_observed`` times.
See also: the list of observers in property `observers`.
"""
return super(Problem, self).is_observed
@property
def number_of_variables(self): # this is cython syntax, not known in Python
# this is a class definition which is instantiated automatically!?
"""Number of variables this problem instance expects as input."""
return super(Problem, self).number_of_variables
@property
def dimension(self):
"""alias for `number_of_variables` of the input space"""
return self.number_of_variables
@property
def number_of_objectives(self):
"number of objectives, if equal to 1, call returns a scalar"
return super(Problem, self).number_of_objectives
@property
def number_of_constraints(self):
"number of constraints"
return super(Problem, self).number_of_constraints
@property
def lower_bounds(self):
"""depending on the test bed, these are not necessarily strict bounds
"""
return super(Problem, self).lower_bounds
@property
def upper_bounds(self):
"""depending on the test bed, these are not necessarily strict bounds
"""
return super(Problem, self).upper_bounds
@property
def evaluations(self):
"""number of times this `Problem` instance was evaluated"""
return super(Problem, self).evaluations()
@property
def final_target_hit(self):
"""return 1 if the final target is known and has been hit, 0 otherwise
"""
return super(Problem, self).final_target_hit(self.problem)
@property
def final_target_fvalue1(self):
return super(Problem, self).final_target_fvalue1(self.problem)
@property
def best_observed_fvalue1(self):
return super(Problem, self).best_observed_fvalue1()
def free(self, force=False):
"""Free the given test problem.
Not strictly necessary (unless, possibly, for the observer). `free`
ensures that all files associated with the problem are closed as
soon as possible and any memory is freed. After free()ing the
problem, all other operations are invalid and will raise an
exception.
"""
super(Problem, self).free(force)
@property
def id(self):
"ID as string without spaces or weird characters"
return super(Problem, self).id
@property
def id_function(self):
"function number inferred from `id`"
return super(Problem, self).id_function
@property
def id_instance(self):
"instance number inferred from `id`"
return super(Problem, self).id_instance
@property
def name(self):
"""human readible short description with spaces"""
return super(Problem, self).name
@property
def index(self):
"""problem index in the benchmark `Suite` of origin"""
return super(Problem, self).index
@property
def suite(self):
"""benchmark suite this problem is from"""
return super(Problem, self).suite
@property
def info(self):
"""human readible info, alias for ``str(self)``.
The format of this info string is not guarantied and may change
in future.
See also: ``repr(self)``
"""
return str(self)
def log_level(level=None):
"""``log_level(level=None)`` return current log level and
set new log level if ``level is not None and level``.
:param level: must be 'error' or 'warning' or 'info' or 'debug', listed
with increasing verbosity, or '' which doesn't change anything.
"""
return _interface.log_level(level)
| 37.596491
| 125
| 0.631612
|
1571837119c2324db71f872aa17ba048d8030662
| 707
|
py
|
Python
|
SoftLayer/CLI/sshkey/edit.py
|
dvzrv/softlayer-python
|
9a5f6c6981bcc370084537b4d1769383499ce90d
|
[
"MIT"
] | 126
|
2015-01-05T05:09:22.000Z
|
2021-07-02T00:16:35.000Z
|
SoftLayer/CLI/sshkey/edit.py
|
dvzrv/softlayer-python
|
9a5f6c6981bcc370084537b4d1769383499ce90d
|
[
"MIT"
] | 969
|
2015-01-05T15:55:31.000Z
|
2022-03-31T19:55:20.000Z
|
SoftLayer/CLI/sshkey/edit.py
|
dvzrv/softlayer-python
|
9a5f6c6981bcc370084537b4d1769383499ce90d
|
[
"MIT"
] | 176
|
2015-01-22T11:23:40.000Z
|
2022-02-11T13:16:58.000Z
|
"""Edits an SSH key."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import helpers
@click.command()
@click.argument('identifier')
@click.option('--label', '-k', help="The new label for the key")
@click.option('--note', help="New notes for the key")
@environment.pass_env
def cli(env, identifier, label, note):
"""Edits an SSH key."""
mgr = SoftLayer.SshKeyManager(env.client)
key_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'SshKey')
if not mgr.edit_key(key_id, label=label, notes=note):
raise exceptions.CLIAbort('Failed to edit SSH key')
| 27.192308
| 70
| 0.718529
|
a2573337d7d6defb261177f3f5e8ef1e7a996123
| 153
|
py
|
Python
|
tests/model_control/detailed/transf_Integration/model_control_one_enabled_Integration_PolyTrend_NoCycle_NoAR.py
|
shaido987/pyaf
|
b9afd089557bed6b90b246d3712c481ae26a1957
|
[
"BSD-3-Clause"
] | 377
|
2016-10-13T20:52:44.000Z
|
2022-03-29T18:04:14.000Z
|
tests/model_control/detailed/transf_Integration/model_control_one_enabled_Integration_PolyTrend_NoCycle_NoAR.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 160
|
2016-10-13T16:11:53.000Z
|
2022-03-28T04:21:34.000Z
|
tests/model_control/detailed/transf_Integration/model_control_one_enabled_Integration_PolyTrend_NoCycle_NoAR.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 63
|
2017-03-09T14:51:18.000Z
|
2022-03-27T20:52:57.000Z
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Integration'] , ['PolyTrend'] , ['NoCycle'] , ['NoAR'] );
| 38.25
| 80
| 0.745098
|
af64bf3758182bcf395ef3d4dfc376ac548c3525
| 451
|
py
|
Python
|
src/tests/order/test_limit.py
|
rvillebro/binance
|
1b92a35f8deb00afb904b4c25e84be064f1b07ca
|
[
"MIT"
] | 5
|
2021-11-02T10:16:38.000Z
|
2022-01-28T21:39:41.000Z
|
src/tests/order/test_limit.py
|
rvillebro/binance
|
1b92a35f8deb00afb904b4c25e84be064f1b07ca
|
[
"MIT"
] | null | null | null |
src/tests/order/test_limit.py
|
rvillebro/binance
|
1b92a35f8deb00afb904b4c25e84be064f1b07ca
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import pytest
from typing import TYPE_CHECKING
from binance.order.limit import Limit
if TYPE_CHECKING:
from binance.client import Client
def test_limit(client: 'Client'):
"""
"""
pytest.skip()
o = Limit(symbol='BTCUSDT',
side='BUY',
quantity=0.1,
price=1,
timeInForce='GTC')
client.trade.new_order(order=o)
print(o.dict(exclude_none=True))
| 20.5
| 37
| 0.611973
|
19c751a5b96ea4dc7fa8c2d593831de0108b3250
| 227,567
|
py
|
Python
|
cfdm/read_write/netcdf/netcdfread.py
|
sadielbartholomew/cfdm
|
04c1e6a271a569631826e08dc69dc2884000475e
|
[
"MIT"
] | null | null | null |
cfdm/read_write/netcdf/netcdfread.py
|
sadielbartholomew/cfdm
|
04c1e6a271a569631826e08dc69dc2884000475e
|
[
"MIT"
] | null | null | null |
cfdm/read_write/netcdf/netcdfread.py
|
sadielbartholomew/cfdm
|
04c1e6a271a569631826e08dc69dc2884000475e
|
[
"MIT"
] | null | null | null |
import logging
import operator
import os
import re
import struct
import subprocess
import tempfile
from ast import literal_eval
from collections import OrderedDict
from copy import deepcopy
from distutils.version import LooseVersion
from functools import reduce
import numpy
import netCDF4
import netcdf_flattener
from ...decorators import _manage_log_level_via_verbosity
from ...functions import is_log_level_debug
from .. import IORead
logger = logging.getLogger(__name__)
_cached_temporary_files = {}
_flattener_separator = netcdf_flattener._Flattener._Flattener__new_separator
class NetCDFRead(IORead):
"""A container for instantiating Fields from a netCDF dataset."""
_code0 = {
# Physically meaningful and corresponding to constructs
"Cell measures variable": 100,
"cell_measures attribute": 101,
"Bounds variable": 200,
"bounds attribute": 201,
"Ancillary variable": 120,
"ancillary_variables attribute": 121,
"Formula terms variable": 130,
"formula_terms attribute": 131,
"Bounds formula terms variable": 132,
"Bounds formula_terms attribute": 133,
"Auxiliary/scalar coordinate variable": 140,
"coordinates attribute": 141,
"Grid mapping variable": 150,
"grid_mapping attribute": 151,
"Grid mapping coordinate variable": 152,
"Cell method interval": 160,
"External variable": 170,
"Geometry variable": 180,
"geometry attribute": 181,
"Node coordinate variable": 190,
# Purely structural
"Compressed dimension": 300,
"compress attribute": 301,
"Instance dimension": 310,
"instance_dimension attribute": 311,
"Count dimension": 320,
"count_dimension attribute": 321,
}
_code1 = {
"is incorrectly formatted": 2,
"is not in file": 3,
"spans incorrect dimensions": 4,
(
"is not in file nor referenced by the external_variables global "
"attribute"
): 5,
"has incompatible terms": 6,
"that spans the vertical dimension has no bounds": 7,
(
"that does not span the vertical dimension is inconsistent with "
"the formula_terms of the parametric coordinate variable"
): 8,
"is not referenced in file": 9,
"exists in the file": 10,
"does not exist in file": 11,
"exists in multiple external files": 12,
"has incorrect size": 13,
"is missing": 14,
"is not used by data variable": 15,
"not in node_coordinates": 16,
"is not locatable in the group hierarchy": 17,
}
def cf_datum_parameters(self):
"""Datum-defining parameters names."""
return (
"earth_radius",
"geographic_crs_name",
"geoid_name",
"geopotential_datum_name",
"horizontal_datum_name",
"inverse_flattening",
"longitude_of_prime_meridian",
"prime_meridian_name",
"reference_ellipsoid_name",
"semi_major_axis",
"semi_minor_axis",
"towgs84",
)
def cf_coordinate_reference_coordinates(self):
"""Maps canonical names to applicable coordinates.
Specifically it is a mapping of each coordinate reference
canonical name to the coordinates to which it applies. The
coordinates are defined by their standard names.
A coordinate reference canonical name is either the value of the
grid_mapping_name attribute of a grid mapping variable (e.g.
'lambert_azimuthal_equal_area'), or the standard name of a
vertical coordinate variable with a formula_terms attribute
(e.g. ocean_sigma_coordinate').
"""
return {
"albers_conical_equal_area": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"azimuthal_equidistant": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"geostationary": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"lambert_azimuthal_equal_area": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"lambert_conformal_conic": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"lambert_cylindrical_equal_area": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"latitude_longitude": (
"latitude",
"longitude",
),
"mercator": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"orthographic": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"polar_stereographic": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"rotated_latitude_longitude": (
"grid_latitude",
"grid_longitude",
"latitude",
"longitude",
),
"sinusoidal": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"stereographic": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"transverse_mercator": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"vertical_perspective": (
"projection_x_coordinate",
"projection_y_coordinate",
"latitude",
"longitude",
),
"atmosphere_ln_pressure_coordinate": (
"atmosphere_ln_pressure_coordinate",
),
"atmosphere_sigma_coordinate": ("atmosphere_sigma_coordinate",),
"atmosphere_hybrid_sigma_pressure_coordinate": (
"atmosphere_hybrid_sigma_pressure_coordinate",
),
"atmosphere_hybrid_height_coordinate": (
"atmosphere_hybrid_height_coordinate",
),
"atmosphere_sleve_coordinate": ("atmosphere_sleve_coordinate",),
"ocean_sigma_coordinate": ("ocean_sigma_coordinate",),
"ocean_s_coordinate": ("ocean_s_coordinate",),
"ocean_sigma_z_coordinate": ("ocean_sigma_z_coordinate",),
"ocean_double_sigma_coordinate": (
"ocean_double_sigma_coordinate",
),
}
def _is_unreferenced(self, ncvar):
"""True if a netCDF variable is not referenced by another.
Return True if the netCDF variable is not referenced by any
other netCDF variable.
.. versionadded:: (cfdm) 1.7.0
.. seealso:: `_reference`
:Parameters:
ncvar: `str`
The netCDF variable name.
:Returns:
`bool`
**Examples:**
>>> x = r._is_unreferenced('tas')
"""
return self.read_vars["references"].get(ncvar, 0) <= 0
def _reference(self, ncvar, referencing_ncvar):
"""Increment by one the reference count to a netCDF variable.
.. versionadded:: (cfdm) 1.7.0
.. seealso:: _is_unreferenced
:Parameters:
ncvar: `str`
The netCDF variable name.
referencing_ncvar: `str`
The netCDF name of the the variable that is doing the
referencing.
.. versionaddedd:: (cfdm) 1.8.6.0
:Returns:
`int`
The new reference count.
**Examples:**
>>> r._reference('longitude')
"""
g = self.read_vars
count = g["references"].setdefault(ncvar, 0)
count += 1
g["references"][ncvar] = count
# Keep a note of which variables are doing the referencing
g["referencers"].setdefault(ncvar, set()).add(referencing_ncvar)
return count
def file_close(self):
"""Close all netCDF files that have been opened.
Includes the input file being read, any external files, and any
temporary flattened files.
:Returns:
`None`
**Examples:**
>>> r.file_close()
"""
for nc in self.read_vars["datasets"]:
nc.close()
# Close temporary flattened files
for flat_file in self.read_vars["flat_files"]:
flat_file.close()
# Close the original grouped file (v1.8.8.1)
if "nc_grouped" in self.read_vars:
self.read_vars["nc_grouped"].close()
def file_open(self, filename, flatten=True, verbose=None):
"""Open the netCDf file for reading.
If the file has hierarchical groups then a flattened version of it
is returned, and the original grouped file remains open.
.. versionadded:: (cfdm) 1.7.0
:Paramters:
filename: `str`
As for the *filename* parameter for initialising a
`netCDF.Dataset` instance.
flatten: `bool`, optional
If False then do not flatten a grouped file. Ignored if
the file has no groups.
.. versionadded:: (cfdm) 1.8.6
:Returns:
`netCDF4.Dataset`
A `netCDF4.Dataset` object for the file.
**Examples:**
>>> r.file_open('file.nc')
"""
try:
nc = netCDF4.Dataset(filename, "r")
except RuntimeError as error:
raise RuntimeError(f"{error}: {filename}")
# ------------------------------------------------------------
# If the file has a group structure then flatten it (CF>=1.8)
# ------------------------------------------------------------
g = self.read_vars
if flatten and nc.groups:
# Create a diskless, non-persistent container for the
# flattened file
flat_file = tempfile.NamedTemporaryFile(
mode="wb",
dir=tempfile.gettempdir(),
prefix="cfdm_flat_",
suffix=".nc",
delete=True,
)
flat_nc = netCDF4.Dataset(
flat_file, "w", diskless=True, persist=False
)
flat_nc.set_fill_off()
# Flatten the file
netcdf_flattener.flatten(
nc, flat_nc, lax_mode=True, _copy_data=False
)
# Store the original grouped file. This is primarily
# because the unlimited dimensions in the flattened
# dataset have size 0, since it contains no
# data. (v1.8.8.1)
g["nc_grouped"] = nc
nc = flat_nc
g["has_groups"] = True
g["flat_files"].append(flat_file)
g["nc"] = nc
return nc
@classmethod
def cdl_to_netcdf(cls, filename):
"""Create a temporary netCDF-4 file from a CDL text file.
:Parameters:
filename: `str`
The name of the CDL file.
:Returns:
`str`
The name of the new netCDF file.
"""
x = tempfile.NamedTemporaryFile(
mode="wb", dir=tempfile.gettempdir(), prefix="cfdm_", suffix=".nc"
)
tmpfile = x.name
# ----------------------------------------------------------------
# Need to cache the TemporaryFile object so that it doesn't get
# deleted too soon
# ----------------------------------------------------------------
_cached_temporary_files[tmpfile] = x
try:
subprocess.run(
["ncgen", "-knc4", "-o", tmpfile, filename], check=True
)
except subprocess.CalledProcessError as error:
msg = str(error)
if msg.startswith(
"Command '['ncgen', '-knc4', '-o'"
) and msg.endswith("returned non-zero exit status 1."):
raise ValueError(
"The CDL provided is invalid so cannot be converted "
"to netCDF."
)
else:
raise
return tmpfile
@classmethod
def is_netcdf_file(cls, filename):
"""Return `True` if the file is a netCDF file.
Note that the file type is determined by inspecting the file's
contents and any file suffix is not not considered.
:Parameters:
filename: `str`
The name of the file.
:Returns:
`bool`
`True` if the file is netCDF, otherwise `False`
**Examples:**
>>> {{package}}.{{class}}.is_netcdf_file('file.nc')
True
"""
# Assume that URLs are in netCDF format
if filename.startswith("http://"):
return True
# Read the magic number
try:
fh = open(filename, "rb")
magic_number = struct.unpack("=L", fh.read(4))[0]
except Exception:
magic_number = None
try:
fh.close()
except Exception:
pass
if magic_number in (
21382211,
1128547841,
1178880137,
38159427,
88491075,
):
return True
else:
return False
def is_cdl_file(cls, filename):
"""True if the file is in CDL format.
Return True if the file is a CDL text representation of a
netCDF file.
Note that the file type is determined by inspecting the file's
contents and any file suffix is not not considered. The file is
assumed to be a CDL file if it is a text file that starts with
"netcdf ".
.. versionaddedd:: 1.7.8
:Parameters:
filename: `str`
The name of the file.
:Returns:
`bool`
`True` if the file is CDL, otherwise `False`
**Examples:**
>>> {{package}}.{{class}}.is_cdl_file('file.nc')
False
"""
# Read the magic number
cdl = False
try:
fh = open(filename, "rt")
except UnicodeDecodeError:
pass
except Exception:
pass
else:
try:
line = fh.readline()
# Match comment and blank lines at the top of the file
while re.match(r"^\s*//|^\s*$", line):
line = fh.readline()
if line.startswith("netcdf "):
cdl = True
except UnicodeDecodeError:
pass
try:
fh.close()
except Exception:
pass
return cdl
def default_netCDF_fill_value(self, ncvar):
"""The default netCDF fill value for a variable.
:Parameters:
ncvar: `str`
The netCDF variable name of the variable.
:Returns:
The default fill value for the netCDF variable.
**Examples:**
>>> n.default_netCDF_fill_value('ua')
9.969209968386869e+36
"""
data_type = self.read_vars["variables"][ncvar].dtype.str[-2:]
return netCDF4.default_fillvals[data_type]
@_manage_log_level_via_verbosity
def read(
self,
filename,
extra=None,
default_version=None,
external=None,
extra_read_vars=None,
_scan_only=False,
verbose=None,
mask=True,
warnings=True,
warn_valid=False,
):
"""Reads a netCDF dataset from file or OPenDAP URL.
Read fields from a netCDF file on disk or from an OPeNDAP
server location.
The file may be big or little endian.
NetCDF dimension names are stored in the `ncdim` attributes of the
field's DomainAxis objects and netCDF variable names are stored in
the `ncvar` attributes of the field and its components
(coordinates, coordinate bounds, cell measures and coordinate
references, domain ancillaries, field ancillaries).
:Parameters:
filename: `str`
The file name or OPenDAP URL of the dataset.
Relative paths are allowed, and standard tilde and shell
parameter expansions are applied to the string.
*Parameter example:*
The file ``file.nc`` in the user's home directory could
be described by any of the following:
``'$HOME/file.nc'``, ``'${HOME}/file.nc'``,
``'~/file.nc'``, ``'~/tmp/../file.nc'``.
extra: sequence of `str`, optional
Create extra, independent fields from the particular types
of metadata constructs. The *extra* parameter may be one,
or a sequence, of:
========================== ================================
*extra* Metadata constructs
========================== ================================
``'field_ancillary'`` Field ancillary constructs
``'domain_ancillary'`` Domain ancillary constructs
``'dimension_coordinate'`` Dimension coordinate constructs
``'auxiliary_coordinate'`` Auxiliary coordinate constructs
``'cell_measure'`` Cell measure constructs
========================== ================================
*Parameter example:*
To create fields from auxiliary coordinate constructs:
``extra='auxiliary_coordinate'`` or
``extra=['auxiliary_coordinate']``.
*Parameter example:*
To create fields from domain ancillary and cell measure
constructs: ``extra=['domain_ancillary',
'cell_measure']``.
warnings: `bool`, optional
If False then do not print warnings when an output field
construct is incomplete due to "structural
non-CF-compliance" of the dataset. By default such
warnings are displayed.
Structural non-CF-compliance occurs when it is not
possible to unambiguously map an element of the netCDF
dataset to an element of the CF data model. Other type on
non-CF-compliance are not checked, for example, whether or
not controlled vocabularies have been adhered to is not
checked.
mask: `bool`, optional
If False then do not mask by convention when reading the
data of field or metadata constructs from disk. By default
data is masked by convention.
The masking by convention of a netCDF array depends on the
values of any of the netCDF variable attributes
``_FillValue`` and ``missing_value``,``valid_min``,
``valid_max``, ``valid_range``. See the CF conventions for
details.
.. versionadded:: (cfdm) 1.8.2
warn_valid: `bool`, optional
If True then print a warning for the presence of
``valid_min``, ``valid_max`` or ``valid_range`` properties
on field constructs and metadata constructs that have
data. By default no such warning is printed
"Out-of-range" data values in the file, as defined by any
of these properties, are by default automatically masked,
which may not be as intended. See the *mask* parameter for
turning off all automatic masking.
.. versionadded:: (cfdm) 1.8.3
:Returns:
`list`
The fields in the file.
"""
# ------------------------------------------------------------
# Initialise netCDF read parameters
# ------------------------------------------------------------
self.read_vars = {
"new_dimensions": {},
"formula_terms": {},
"compression": {},
# Verbose?
"verbose": verbose,
# Warnings?
"warnings": warnings,
"dataset_compliance": {None: {"non-compliance": {}}},
"component_report": {},
"auxiliary_coordinate": {},
"cell_measure": {},
"dimension_coordinate": {},
"domain_ancillary": {},
"domain_ancillary_key": None,
"field_ancillary": {},
"coordinates": {},
"bounds": {},
# --------------------------------------------------------
# Geometry containers, keyed by their netCDF geometry
# container variable names.
# --------------------------------------------------------
"geometries": {},
# Map data variables to their geometry variable names
"variable_geometry": {},
"do_not_create_field": set(),
"references": {},
"referencers": {},
# --------------------------------------------------------
# External variables
# --------------------------------------------------------
# Variables listed by the global external_variables
# attribute
"external_variables": set(),
# External variables that are actually referenced from
# within the parent file
"referenced_external_variables": set(),
# --------------------------------------------------------
# Coordinate references
# --------------------------------------------------------
# Grid mapping attributes that describe horizontal datum
"datum_parameters": self.cf_datum_parameters(),
# Vertical coordinate reference constructs, keyed by the
# netCDF variable name of their parent parametric vertical
# coordinate variable.
#
# E.g. {'ocean_s_coordinate':
# <CoordinateReference: ocean_s_coordinate>}
"vertical_crs": {},
#
"version": {},
# Auto mask?
"mask": bool(mask),
# Warn for the presence of valid_[min|max|range]
# attributes?
"warn_valid": bool(warn_valid),
"valid_properties": set(("valid_min", "valid_max", "valid_range")),
# Assume a priori that the dataset does not have a group
# structure
"has_groups": False,
# Keep a list of flattened file names
"flat_files": [],
}
g = self.read_vars
# Set versions
for version in ("1.6", "1.7", "1.8", "1.9"):
g["version"][version] = LooseVersion(version)
# ------------------------------------------------------------
# Add custom read vars
# ------------------------------------------------------------
if extra_read_vars:
g.update(deepcopy(extra_read_vars))
# ----------------------------------------------------------------
# Parse field parameter
# ----------------------------------------------------------------
g["get_constructs"] = {
"auxiliary_coordinate": self.implementation.get_auxiliary_coordinates,
"cell_measure": self.implementation.get_cell_measures,
"dimension_coordinate": self.implementation.get_dimension_coordinates,
"domain_ancillary": self.implementation.get_domain_ancillaries,
"field_ancillary": self.implementation.get_field_ancillaries,
}
# Parse the 'external' keyword parameter
if external:
if isinstance(external, str):
external = (external,)
else:
external = ()
g["external_files"] = set(external)
# Parse 'extra' keyword parameter
if extra:
if isinstance(extra, str):
extra = (extra,)
for f in extra:
if f not in g["get_constructs"]:
raise ValueError(
f"Can't read: Bad parameter value: extra={extra!r}"
)
g["extra"] = extra
filename = os.path.expanduser(os.path.expandvars(filename))
if os.path.isdir(filename):
raise IOError(f"Can't read directory {filename}")
if not os.path.isfile(filename):
raise IOError(f"Can't read non-existent file {filename}")
g["filename"] = filename
# ------------------------------------------------------------
# Open the netCDF file to be read
# ------------------------------------------------------------
nc = self.file_open(filename, flatten=True, verbose=None)
logger.info(f"Reading netCDF file: {filename}\n") # pragma: no cover
if is_log_level_debug(logger):
logger.debug(
f" Input netCDF dataset:\n {nc}\n"
) # pragma: no cover
# ----------------------------------------------------------------
# Put the file's global attributes into the global
# 'global_attributes' dictionary
# ----------------------------------------------------------------
global_attributes = {}
for attr in map(str, nc.ncattrs()):
try:
value = nc.getncattr(attr)
if isinstance(value, str):
try:
global_attributes[attr] = str(value)
except UnicodeEncodeError:
global_attributes[attr] = value.encode(errors="ignore")
else:
global_attributes[attr] = value
except UnicodeDecodeError:
pass
g["global_attributes"] = global_attributes
if is_log_level_debug(logger):
logger.debug(
f" Global attributes:\n {g['global_attributes']}"
) # pragma: no cover
# ------------------------------------------------------------
# Find the CF version for the file
# ------------------------------------------------------------
Conventions = g["global_attributes"].get("Conventions", "")
all_conventions = re.split(",", Conventions)
if all_conventions[0] == Conventions:
all_conventions = re.split(r"\s+", Conventions)
file_version = None
for c in all_conventions:
if not re.match(r"^CF-\d", c):
continue
file_version = re.sub("^CF-", "", c)
if not file_version:
if default_version is not None:
# Assume the default version provided by the user
file_version = default_version
else:
# Assume the file has the same version of the CFDM
# implementation
file_version = self.implementation.get_cf_version()
g["file_version"] = LooseVersion(file_version)
# Set minimum versions
for vn in ("1.6", "1.7", "1.8", "1.9"):
g["CF>=" + vn] = g["file_version"] >= g["version"][vn]
# ------------------------------------------------------------
# Create a dictionary keyed by netCDF variable names where
# each key's value is a dictionary of that variable's netCDF
# attributes. E.g. attributes['tas']['units']='K'
# ------------------------------------------------------------
variable_attributes = {}
variable_dimensions = {}
variable_dataset = {}
variable_filename = {}
variables = {}
variable_groups = {}
variable_group_attributes = {}
variable_basename = {}
variable_grouped_dataset = {}
dimension_groups = {}
dimension_basename = {}
dimension_isunlimited = {}
# ------------------------------------------------------------
# For grouped files (CF>=1.8) map:
#
# * each flattened variable name to its absolute path
# * each flattened dimension name to its absolute path
# * each group to its group attributes
#
# ------------------------------------------------------------
has_groups = g["has_groups"]
flattener_variables = {}
flattener_dimensions = {}
flattener_attributes = {}
if has_groups:
flattener_name_mapping_variables = getattr(
nc, "__flattener_name_mapping_variables", None
)
if flattener_name_mapping_variables is not None:
if isinstance(flattener_name_mapping_variables, str):
flattener_name_mapping_variables = [
flattener_name_mapping_variables
]
flattener_variables = dict(
tuple(x.split(": "))
for x in flattener_name_mapping_variables
)
flattener_name_mapping_dimensions = getattr(
nc, "__flattener_name_mapping_dimensions", None
)
if flattener_name_mapping_dimensions is not None:
if isinstance(flattener_name_mapping_dimensions, str):
flattener_name_mapping_dimensions = [
flattener_name_mapping_dimensions
]
flattener_dimensions = dict(
tuple(x.split(": "))
for x in flattener_name_mapping_dimensions
)
# Remove a leading / (slash) from dimensions in the
# root group
for key, value in flattener_dimensions.items():
if value.startswith("/") and value.count("/") == 1:
flattener_dimensions[key] = value[1:]
flattener_name_mapping_attributes = getattr(
nc, "__flattener_name_mapping_attributes", None
)
if flattener_name_mapping_attributes is not None:
if isinstance(flattener_name_mapping_attributes, str):
flattener_name_mapping_attributes = [
flattener_name_mapping_attributes
]
flattener_attributes = dict(
tuple(x.split(": "))
for x in flattener_name_mapping_attributes
)
# Remove group attributes from the global attributes,
# and vice versa.
for flat_attr in flattener_attributes.copy():
attr = flattener_attributes.pop(flat_attr)
x = attr.split("/")
groups = x[1:-1]
if groups:
g["global_attributes"].pop(flat_attr)
group_attr = x[-1]
flattener_attributes.setdefault(tuple(groups), {})[
group_attr
] = nc.getncattr(flat_attr)
# Remove flattener attributes from the global attributes
for attr in (
"__flattener_name_mapping_variables",
"__flattener_name_mapping_dimensions",
"__flattener_name_mapping_attributes",
):
g["global_attributes"].pop(attr, None)
for ncvar in nc.variables:
ncvar_basename = ncvar
groups = ()
group_attributes = {}
variable = nc.variables[ncvar]
# --------------------------------------------------------
# Specify the group structure for each variable (CF>=1.8)
# TODO
# If the file only has the root group then this dictionary
# will be empty. Variables in the root group when there
# are sub-groups will have dictionary values of None.
# --------------------------------------------------------
if has_groups:
# Replace the flattened variable name with its
# absolute path.
ncvar_flat = ncvar
ncvar = flattener_variables[ncvar]
groups = tuple(ncvar.split("/")[1:-1])
if groups:
# This variable is in a group. Remove the group
# structure that was prepended to the netCDF
# variable name by the netCDF flattener.
ncvar_basename = re.sub(
f"^{_flattener_separator.join(groups)}{_flattener_separator}",
"",
ncvar_flat,
)
# ------------------------------------------------
# Group attributes. Note that, currently,
# sub-group attributes supercede all parent group
# attributes (but not global attributes).
# ------------------------------------------------
group_attributes = {}
for i in range(1, len(groups) + 1):
hierarchy = groups[:i]
if hierarchy not in flattener_attributes:
continue
group_attributes.update(
flattener_attributes[hierarchy]
)
else:
# Remove the leading / from the absolute netCDF
# variable path
ncvar = ncvar[1:]
flattener_variables[ncvar] = ncvar
variable_grouped_dataset[ncvar] = g["nc_grouped"]
variable_attributes[ncvar] = {}
for attr in map(str, variable.ncattrs()):
try:
variable_attributes[ncvar][attr] = variable.getncattr(attr)
if isinstance(variable_attributes[ncvar][attr], str):
try:
variable_attributes[ncvar][attr] = str(
variable_attributes[ncvar][attr]
)
except UnicodeEncodeError:
variable_attributes[ncvar][
attr
] = variable_attributes[ncvar][attr].encode(
errors="ignore"
)
except UnicodeDecodeError:
pass
variable_dimensions[ncvar] = tuple(variable.dimensions)
variable_dataset[ncvar] = nc
variable_filename[ncvar] = g["filename"]
variables[ncvar] = variable
variable_basename[ncvar] = ncvar_basename
variable_groups[ncvar] = groups
variable_group_attributes[ncvar] = group_attributes
# Populate dimensions_groups abd dimension_basename
# dictionaries
for ncdim in nc.dimensions:
ncdim_org = ncdim
ncdim_basename = ncdim
groups = ()
ncdim_basename = ncdim
if has_groups:
# Replace the flattened variable name with its
# absolute path.
ncdim_flat = ncdim
ncdim = flattener_dimensions[ncdim_flat]
groups = tuple(ncdim.split("/")[1:-1])
if groups:
# This dimension is in a group.
ncdim_basename = re.sub(
"^{_flattener_separator.join(groups)}{_flattener_separator}",
"",
ncdim_flat,
)
dimension_groups[ncdim] = groups
dimension_basename[ncdim] = ncdim_basename
dimension_isunlimited[ncdim] = nc.dimensions[
ncdim_org
].isunlimited()
if has_groups:
variable_dimensions = {
name: tuple([flattener_dimensions[ncdim] for ncdim in value])
for name, value in variable_dimensions.items()
}
if is_log_level_debug(logger):
logger.debug(
" General read variables:\n"
" read_vars['variable_dimensions'] =\n"
f" {variable_dimensions}"
) # pragma: no cover
# The netCDF attributes for each variable
#
# E.g. {'grid_lon': {'standard_name': 'grid_longitude'}}
g["variable_attributes"] = variable_attributes
# The netCDF dimensions for each variable
#
# E.g. {'grid_lon_bounds': ('grid_longitude', 'bounds2')}
g["variable_dimensions"] = variable_dimensions
# The netCDF4 dataset object for each variable
g["variable_dataset"] = variable_dataset
# The original gouped dataset for each variable (empty if the
# original dataset is not grouped) v1.8.8.1
g["variable_grouped_dataset"] = variable_grouped_dataset
# The name of the file containing the each variable
g["variable_filename"] = variable_filename
# The netCDF4 variable object for each variable
g["variables"] = variables
# The netCDF4 dataset objects that have been opened (i.e. the
# for parent file and any external files)
g["datasets"] = [nc]
# The names of the variable in the parent files
# (i.e. excluding any external variables)
g["internal_variables"] = set(variables)
# The netCDF dimensions of the parent file
internal_dimension_sizes = {}
for name, dimension in nc.dimensions.items():
if (
has_groups
and dimension_isunlimited[flattener_dimensions[name]]
):
# For grouped datasets, get the unlimited dimension
# size from the original grouped dataset, because
# unlimited dimensions have size 0 in the flattened
# dataset (because it contains no data) (v1.8.8.1)
group, ncdim = self._netCDF4_group(
g["nc_grouped"], flattener_dimensions[name]
)
internal_dimension_sizes[name] = group.dimensions[ncdim].size
else:
internal_dimension_sizes[name] = dimension.size
if g["has_groups"]:
internal_dimension_sizes = {
flattener_dimensions[name]: value
for name, value in internal_dimension_sizes.items()
}
g["internal_dimension_sizes"] = internal_dimension_sizes
# The group structure for each variable. Variables in the root
# group have a group structure of ().
#
# E.g. {'lat': (),
# '/forecasts/lon': ('forecasts',)
# '/forecasts/model/t': 'forecasts', 'model')}
g["variable_groups"] = variable_groups
# The group attributes that apply to each variable
#
# E.g. {'latitude': {},
# 'eastward_wind': {'model': 'climate1'}}
g["variable_group_attributes"] = variable_group_attributes
# Mapped components of a flattened version of the netCDF file
g["flattener_variables"] = flattener_variables
g["flattener_dimensions"] = flattener_dimensions
g["flattener_attributes"] = flattener_attributes
# The basename of each variable. I.e. the dimension name
# without its prefixed group structure.
#
# E.g. {'lat': 'lat',
# '/forecasts/lon': 'lon',
# '/forecasts/model/t': 't'}
g["variable_basename"] = variable_basename
# The unlimited status of each dimension
#
# E.g. {'/forecast/lat': False, 'bounds2': False, 'lon':
# False}
g["dimension_isunlimited"] = dimension_isunlimited
# The group structure for each dimension. Dimensions in the
# root group have a group structure of ().
#
# E.g. {'lat': (),
# '/forecasts/lon': ('forecasts',)
# '/forecasts/model/t': 9'forecasts', 'model')}
g["dimension_groups"] = dimension_groups
# The basename of each dimension. I.e. the dimension name
# without its prefixed group structure.
#
# E.g. {'lat': 'lat',
# '/forecasts/lon': 'lon',
# '/forecasts/model/t': 't'}
g["dimension_basename"] = dimension_basename
if is_log_level_debug(logger):
logger.debug(
" read_vars['dimension_isunlimited'] =\n"
f" {g['dimension_isunlimited']}\n"
" read_vars['internal_dimension_sizes'] =\n"
f" {g['internal_dimension_sizes']}\n"
" Groups read vars:\n"
" read_vars['variable_groups'] =\n"
f" {g['variable_groups']}\n"
" read_vars['variable_basename'] =\n"
f" {variable_basename}\n"
" read_vars['dimension_groups'] =\n"
f" {g['dimension_groups']}\n"
" read_vars['dimension_basename'] =\n"
f" {g['dimension_basename']}\n"
" read_vars['flattener_variables'] =\n"
f" {g['flattener_variables']}\n"
" read_vars['flattener_dimensions'] =\n"
f" {g['flattener_dimensions']}\n"
" read_vars['flattener_attributes'] =\n"
f" {g['flattener_attributes']}\n"
f" netCDF dimensions: {internal_dimension_sizes}"
) # pragma: no cover
# ------------------------------------------------------------
# List variables
#
# Identify and parse all list variables
# ------------------------------------------------------------
for ncvar, dimensions in variable_dimensions.items():
if dimensions != (ncvar,):
continue
# This variable is a Unidata coordinate variable
compress = variable_attributes[ncvar].get("compress")
if compress is None:
continue
# This variable is a list variable for gathering
# arrays
self._parse_compression_gathered(ncvar, compress)
# Do not attempt to create a field from a list
# variable
g["do_not_create_field"].add(ncvar)
# ------------------------------------------------------------
# DSG variables (CF>=1.6)
#
# Identify and parse all DSG count and DSG index variables
# ------------------------------------------------------------
if g["CF>=1.6"]:
featureType = g["global_attributes"].get("featureType")
if featureType is not None:
g["featureType"] = featureType
sample_dimension = None
for ncvar, attributes in variable_attributes.items():
if "sample_dimension" not in attributes:
continue
# ------------------------------------------------
# This variable is a count variable for DSG
# contiguous ragged arrays
# ------------------------------------------------
sample_dimension = attributes["sample_dimension"]
if has_groups:
sample_dimension = g["flattener_dimensions"].get(
sample_dimension, sample_dimension
)
cf_compliant = self._check_sample_dimension(
ncvar, sample_dimension
)
if not cf_compliant:
sample_dimension = None
else:
self._parse_ragged_contiguous_compression(
ncvar, sample_dimension
)
# Do not attempt to create a field from a
# count variable
g["do_not_create_field"].add(ncvar)
instance_dimension = None
for ncvar, attributes in variable_attributes.items():
if "instance_dimension" not in attributes:
continue
# ------------------------------------------------
# This variable is an index variable for DSG
# indexed ragged arrays
# ------------------------------------------------
instance_dimension = attributes["instance_dimension"]
if has_groups:
instance_dimension = g["flattener_dimensions"].get(
instance_dimension, instance_dimension
)
cf_compliant = self._check_instance_dimension(
ncvar, instance_dimension
)
if not cf_compliant:
instance_dimension = None
else:
self._parse_indexed_compression(
ncvar, instance_dimension
)
# Do not attempt to create a field from a
# index variable
g["do_not_create_field"].add(ncvar)
if (
sample_dimension is not None
and instance_dimension is not None
):
# ------------------------------------------------
# There are DSG indexed contiguous ragged arrays
# ------------------------------------------------
self._parse_indexed_contiguous_compression(
sample_dimension, instance_dimension
)
# ------------------------------------------------------------
# Identify and parse all geometry container variables
# (CF>=1.8)
# ------------------------------------------------------------
if g["CF>=1.8"]:
for ncvar, attributes in variable_attributes.items():
if "geometry" not in attributes:
# This data variable does not have a geometry
# container
continue
geometry_ncvar = self._parse_geometry(
ncvar, variable_attributes
)
if not geometry_ncvar:
# The geometry container has already been parsed,
# or a sufficiently compliant geometry container
# could not be found.
continue
# Do not attempt to create a field construct from a
# node coordinate variable
g["do_not_create_field"].add(geometry_ncvar)
if is_log_level_debug(logger):
logger.debug(
" Compression read vars:\n"
" read_vars['compression'] =\n"
f" {g['compression']}"
) # pragma: no cover
# ------------------------------------------------------------
# Parse external variables (CF>=1.7)
# ------------------------------------------------------------
if g["CF>=1.7"]:
netcdf_external_variables = global_attributes.pop(
"external_variables", None
)
parsed_external_variables = self._split_string_by_white_space(
None, netcdf_external_variables
)
parsed_external_variables = self._check_external_variables(
netcdf_external_variables, parsed_external_variables
)
g["external_variables"] = set(parsed_external_variables)
# Now that all of the variables have been scanned, customize
# the read parameters.
self._customize_read_vars()
if _scan_only:
return self.read_vars
# ------------------------------------------------------------
# Get external variables (CF>=1.7)
# ------------------------------------------------------------
if g["CF>=1.7"]:
logger.info(
f" External variables: {g['external_variables']}\n"
f" External files : {g['external_files']}"
) # pragma: no cover
if g["external_files"] and g["external_variables"]:
self._get_variables_from_external_files(
netcdf_external_variables
)
# ------------------------------------------------------------
# Create a field from every netCDF variable (apart from
# special variables that have already been identified as such)
# ------------------------------------------------------------
all_fields = OrderedDict()
for ncvar in g["variables"]:
if ncvar not in g["do_not_create_field"]:
all_fields[ncvar] = self._create_field(ncvar)
# ------------------------------------------------------------
# Check for unreferenced external variables (CF>=1.7)
# ------------------------------------------------------------
if g["CF>=1.7"]:
unreferenced_external_variables = g[
"external_variables"
].difference(g["referenced_external_variables"])
for ncvar in unreferenced_external_variables:
self._add_message(
None,
ncvar,
message=("External variable", "is not referenced in file"),
attribute={
"external_variables": netcdf_external_variables
},
)
if is_log_level_debug(logger):
logger.debug(
" Reference read vars:\n"
" read_vars['references'] =\n"
f" {g['references']}\n"
" read_vars['referencers'] =\n"
f" {g['referencers']}"
) # pragma: no cover
# ------------------------------------------------------------
# Discard fields created from netCDF variables that are
# referenced by other netCDF variables
# ------------------------------------------------------------
fields = OrderedDict()
for ncvar, f in all_fields.items():
if self._is_unreferenced(ncvar):
fields[ncvar] = f
referenced_variables = [
ncvar
for ncvar in sorted(all_fields)
if not self._is_unreferenced(ncvar)
]
unreferenced_variables = [
ncvar
for ncvar in sorted(all_fields)
if self._is_unreferenced(ncvar)
]
for ncvar in referenced_variables[:]:
if all(
referencer in referenced_variables
for referencer in g["referencers"][ncvar]
):
referenced_variables.remove(ncvar)
unreferenced_variables.append(ncvar)
fields[ncvar] = all_fields[ncvar]
logger.info(
" Referenced netCDF variables:\n "
+ "\n ".join(referenced_variables)
) # pragma: no cover
if g["do_not_create_field"]:
logger.info(
" "
+ "\n ".join(
[ncvar for ncvar in sorted(g["do_not_create_field"])]
)
) # pragma: no cover
logger.info(
" Unreferenced netCDF variables:\n "
+ "\n ".join(unreferenced_variables)
) # pragma: no cover
# ------------------------------------------------------------
# If requested, reinstate fields created from netCDF variables
# that are referenced by other netCDF variables.
# ------------------------------------------------------------
self_referenced = {}
if g["extra"]:
fields0 = list(fields.values())
for construct_type in g["extra"]:
for f in fields0:
for construct in g["get_constructs"][construct_type](
f
).values():
ncvar = self.implementation.nc_get_variable(construct)
if ncvar not in all_fields:
continue
if ncvar not in fields:
fields[ncvar] = all_fields[ncvar]
else:
self_referenced[ncvar] = all_fields[ncvar]
if not self_referenced:
items = fields.items()
else:
items = tuple(fields.items()) + tuple(self_referenced.items())
out = [x[1] for x in sorted(items)]
if warnings:
for x in out:
qq = x.dataset_compliance()
if qq:
logger.warning(
f"WARNING: {x.__class__.__name__} incomplete due to "
f"non-CF-compliant dataset. Report:\n{qq}"
) # pragma: no cover
if warn_valid:
# --------------------------------------------------------
# Warn for the presence of 'valid_min', 'valid_max'or
# 'valid_range' properties. (Introduced at v1.8.3)
# --------------------------------------------------------
for f in out:
# Check field constructs
self._check_valid(f, f)
# Check constructs with data
for c in self.implementation.get_constructs(
f, data=True
).values():
self._check_valid(f, c)
# ------------------------------------------------------------
# Close all opened netCDF files
# ------------------------------------------------------------
self.file_close()
# ------------------------------------------------------------
# Return the fields
# ------------------------------------------------------------
return out
def _check_valid(self, field, construct):
"""Warns when valid_[min|max|range] properties exist on data.
Issue a warning if a construct with data has
valid_[min|max|range] properties.
.. versionadded:: (cfdm) 1.8.3
:Parameters:
field: `Field`
The parent field construct.
construct: Construct or Bounds
The construct that may have valid_[min|max|range]
properties. May also be the parent field construct or
Bounds.
:Returns:
`None`
"""
# Check the bounds, if any.
if self.implementation.has_bounds(construct):
bounds = self.implementation.get_bounds(construct)
self._check_valid(field, bounds)
x = sorted(
self.read_vars["valid_properties"].intersection(
self.implementation.get_properties(construct)
)
)
if not x:
return
# Still here?
if self.implementation.is_field(construct):
construct = ""
else:
construct = f" {construct!r} with"
message = (
f"WARNING: {field!r} has {construct} {', '.join(x)} "
"{self._plural(x, 'property')}. "
)
print(message)
def _plural(self, x, singular):
"""Pluralises a singular word if *x* is not of length one.
Return the plural of a word if *x* has zero elements or more
than one element, otherwise return the word unchanged.
:Parameters:
x: sequence
singular: `str`
The word in it's singular form.
:Returns:
`str`
The word in its singular or plural form.
**Examples:**
>>> n._plural([1, 2], 'property')
'properties'
>>> n._plural([1], 'property')
'property'
>>> n._plural([], 'property')
'properties'
"""
if len(x) == 1:
return singular
if singular[-1] == "y":
return singular[:-1] + "ies"
raise ValueError(f"Can't pluralise {singular}")
def _set_default_FillValue(self, construct, ncvar):
"""Ensure there is a fill value recorded on the construct.
The motivation for this is that masking can later be
applied manually on the construct after the masking has
been turned off.
.. versionadded:: (cfdm) 1.8.3
"""
_FillValue = self.implementation.get_property(
construct, "_FillValue", None
)
if _FillValue is None:
self.implementation.set_properties(
construct,
{"_FillValue": self.default_netCDF_fill_value(ncvar)},
)
def _customize_read_vars(self):
"""Customize the read parameters.
This method is primarily aimed at providing a customization
entry point for subclasses.
.. versionadded:: (cfdm) 1.7.3
"""
pass
def _get_variables_from_external_files(self, netcdf_external_variables):
"""Get external variables from external files.
..versionadded:: (cfdm) 1.7.0
:Parameters:
netcdf_external_variables: `str`
The un-parsed netCDF external_variables attribute in the
parent file.
*Parmaeter example:*
``external_variables='areacello'``
:Returns:
`None`
"""
attribute = {"external_variables": netcdf_external_variables}
read_vars = self.read_vars.copy()
verbose = read_vars["verbose"]
external_variables = read_vars["external_variables"]
external_files = read_vars["external_files"]
datasets = read_vars["datasets"]
parent_dimension_sizes = read_vars["internal_dimension_sizes"]
keys = (
"variable_attributes",
"variable_dimensions",
"variable_dataset",
"variable_filename",
"variable_groups",
"variable_group_attributes",
"variable_basename",
"variables",
)
found = []
for external_file in external_files:
logger.info(
"\nScanning external file:\n-----------------------"
) # pragma: no cover
external_read_vars = self.read(
external_file, _scan_only=True, verbose=verbose
)
logger.info(
"Finished scanning external file\n"
) # pragma: no cover
# Reset self.read_vars
self.read_vars = read_vars
datasets.append(external_read_vars["nc"])
for ncvar in external_variables.copy():
if ncvar not in external_read_vars["internal_variables"]:
# The external variable name is not in this
# external file
continue
if ncvar in found:
# Error: The external variable exists in more than
# one external file
external_variables.add(ncvar)
for key in keys:
self.read_vars[key].pop(ncvar)
self._add_message(
None,
ncvar,
message=(
"External variable",
"exists in multiple external files",
),
attribute=attribute,
)
continue
# Still here? Then the external variable exists in
# this external file
found.append(ncvar)
# Check that the external variable dimensions exist in
# parent file, with the same sizes.
ok = True
for d in external_read_vars["variable_dimensions"][ncvar]:
size = parent_dimension_sizes.get(d)
if size is None:
ok = False
self._add_message(
None,
ncvar,
message=(
"External variable dimension",
"does not exist in file",
),
attribute=attribute,
)
elif (
external_read_vars["internal_dimension_sizes"][d]
!= size
):
ok = False
self._add_message(
None,
ncvar,
message=(
"External variable dimension",
"has incorrect size",
),
attribute=attribute,
)
else:
continue
if ok:
# Update the read parameters so that this external
# variable looks like it is an internal variable
for key in keys:
self.read_vars[key][ncvar] = external_read_vars[key][
ncvar
]
# Remove this ncvar from the set of external variables
external_variables.remove(ncvar)
def _parse_compression_gathered(self, ncvar, compress):
"""Parse a list variable for compressing arrays by gathering."""
g = self.read_vars
logger.info(
f" List variable: compress = {compress}"
) # pragma: no cover
gathered_ncdimension = g["variable_dimensions"][ncvar][0]
parsed_compress = self._split_string_by_white_space(
ncvar, compress, variables=True
)
cf_compliant = self._check_compress(ncvar, compress, parsed_compress)
if not cf_compliant:
return
list_variable = self._create_List(ncvar)
g["compression"][gathered_ncdimension] = {
"gathered": {
"list_variable": list_variable,
"implied_ncdimensions": parsed_compress,
"sample_dimension": gathered_ncdimension,
}
}
def _parse_ragged_contiguous_compression(self, ncvar, sample_dimension):
"""Parse a count variable for DSG contiguous ragged arrays.
:Parameters:
ncvar: `str`
The netCDF variable name of the count variable (section
9.3.3).
sample_dimension: `str`
The netCDF dimension name of the sample dimension (section
9.3.3).
:Returns:
`str`
The made-up netCDF dimension name of the element dimension.
"""
g = self.read_vars
logger.info(
f" count variable: sample_dimension = {sample_dimension}"
) # pragma: no cover
instance_dimension = g["variable_dimensions"][ncvar][0]
elements_per_instance = self._create_Count(
ncvar=ncvar, ncdim=instance_dimension
)
# Make up a netCDF dimension name for the element dimension
featureType = g["featureType"].lower()
if featureType in ("timeseries", "trajectory", "profile"):
element_dimension = featureType
elif featureType == "timeseriesprofile":
element_dimension = "profile"
elif featureType == "trajectoryprofile":
element_dimension = "profile"
else:
element_dimension = "element"
logger.info(
f" featureType = {g['featureType']}"
) # pragma: no cover
element_dimension = self._set_ragged_contiguous_parameters(
elements_per_instance=elements_per_instance,
sample_dimension=sample_dimension,
element_dimension=element_dimension,
instance_dimension=instance_dimension,
)
return element_dimension
def _parse_indexed_compression(self, ncvar, instance_dimension):
"""Parse an index variable for DSG indexed ragged arrays.
The CF-netCDF index variable contains the zero-based index of the
feature to which each element belongs. It is identifiable by the
presence of an attribute, "instance_dimension", which names the
dimension of the instance variables. For those indices of the
sample dimension into which data have not yet been written, the
index variable should be pre-filled with missing values.
:Parameters:
ncvar: `str`
The netCDF variable name of the index variable.
instance_dimension: `str`
The netCDF variable name of the instance dimension.
:Returns:
`str`
An invented netCDF name for the element dimension,
e.g. ``'timeseriesprofile'``.
"""
g = self.read_vars
# Read the data of the index variable
ncdim = g["variable_dimensions"][ncvar][0]
index = self._create_Index(ncvar, ncdim=ncdim)
# Make up a netCDF dimension name for the element dimension
featureType = g["featureType"].lower()
if featureType in ("timeseries", "trajectory", "profile"):
element_dimension = featureType.lower()
elif featureType == "timeseriesprofile":
element_dimension = "timeseries"
elif featureType == "trajectoryprofile":
element_dimension = "trajectory"
else:
element_dimension = "element"
logger.info(
f" featureType = {g['featureType']}"
) # pragma: no cover
element_dimension = self._set_ragged_indexed_parameters(
index=index,
indexed_sample_dimension=g["variable_dimensions"][ncvar][0],
element_dimension=element_dimension,
instance_dimension=instance_dimension,
)
return element_dimension
def _parse_indexed_contiguous_compression(
self, sample_dimension, instance_dimension
):
"""Parse an index variable for indexed contiguous ragged arrays.
:Parameters:
sample_dimension: `str`
The netCDF dimension name of the sample dimension.
element_dimension_1: `str`
The name of the implied element dimension whose size is the
maximum number of sub-features in any instance.
"""
g = self.read_vars
profile_dimension = g["compression"][sample_dimension][
"ragged_contiguous"
]["profile_dimension"]
if is_log_level_debug(logger):
logger.debug(
" Pre-processing indexed and contiguous compression "
f"for instance dimension: {instance_dimension}\n"
f" sample_dimension : {sample_dimension}\n"
f" instance_dimension: {instance_dimension}\n"
f" profile_dimension : {profile_dimension}"
) # pragma: no cover
contiguous = g["compression"][sample_dimension]["ragged_contiguous"]
indexed = g["compression"][profile_dimension]["ragged_indexed"]
# The indices of the sample dimension which define the start
# positions of each instances profiles
profile_indices = indexed["index_variable"]
# profiles_per_instance is a numpy array
profiles_per_instance = indexed["elements_per_instance"]
elements_per_profile = contiguous["count_variable"]
instance_dimension_size = indexed["instance_dimension_size"]
element_dimension_1_size = int(profiles_per_instance.max())
element_dimension_2_size = int(
self.implementation.get_data_maximum(elements_per_profile)
)
g["compression"][sample_dimension]["ragged_indexed_contiguous"] = {
"count_variable": elements_per_profile,
"index_variable": profile_indices,
"implied_ncdimensions": (
instance_dimension,
indexed["element_dimension"],
contiguous["element_dimension"],
),
"instance_dimension_size": instance_dimension_size,
"element_dimension_1_size": element_dimension_1_size,
"element_dimension_2_size": element_dimension_2_size,
}
del g["compression"][sample_dimension]["ragged_contiguous"]
if is_log_level_debug(logger):
logger.debug(
f" Created read_vars['compression'][{sample_dimension!r}]"
"['ragged_indexed_contiguous']\n"
f" Implied dimensions: {sample_dimension} -> "
f"{g['compression'][sample_dimension]['ragged_indexed_contiguous']['implied_ncdimensions']}\n"
" Removed "
f"read_vars['compression'][{sample_dimension!r}]['ragged_contiguous']"
) # pragma: no cover
def _parse_geometry(self, parent_ncvar, attributes):
"""Parse a geometry container variable.
.. versionadded:: (cfdm) 1.8.0
:Parameters:
parent_ncvar: `str`
The netCDF variable name of the parent data variable.
attributes: `dict`
All attributes of *all* netCDF variables, keyed by netCDF
variable name.
:Returns:
`str` or `None`
The new geometry netCDF variable name, or `None` if a)
the container has already been parsed or b) a
sufficiently compliant geometry container could not be
found.
"""
g = self.read_vars
geometry_attribute = attributes[parent_ncvar]["geometry"]
parsed_geometry = self._split_string_by_white_space(
parent_ncvar, geometry_attribute, variables=True
)
cf_compliant = self._check_geometry_attribute(
parent_ncvar, geometry_attribute, parsed_geometry
)
if not cf_compliant:
return
geometry_ncvar = parsed_geometry[0]
if geometry_ncvar in g["geometries"]:
# We've already parsed this geometry container, so record
# the fact that this parent netCDF variable has this
# geometry variable and return.
g["variable_geometry"][parent_ncvar] = geometry_ncvar
return
logger.info(
f" Geometry container = {geometry_ncvar!r}\n"
" netCDF attributes: {attributes[geometry_ncvar]}"
) # pragma: no cover
geometry_type = attributes[geometry_ncvar].get("geometry_type")
g["geometries"][geometry_ncvar] = {"geometry_type": geometry_type}
node_coordinates = attributes[geometry_ncvar].get("node_coordinates")
node_count = attributes[geometry_ncvar].get("node_count")
part_node_count = attributes[geometry_ncvar].get("part_node_count")
interior_ring = attributes[geometry_ncvar].get("interior_ring")
parsed_node_coordinates = self._split_string_by_white_space(
geometry_ncvar, node_coordinates, variables=True
)
parsed_interior_ring = self._split_string_by_white_space(
geometry_ncvar, interior_ring, variables=True
)
parsed_node_count = self._split_string_by_white_space(
geometry_ncvar, node_count, variables=True
)
parsed_part_node_count = self._split_string_by_white_space(
geometry_ncvar, part_node_count, variables=True
)
logger.info(
f" parsed_node_coordinates = {parsed_node_coordinates}\n"
f" parsed_interior_ring = {parsed_interior_ring}\n"
f" parsed_node_count = {parsed_node_count}\n"
f" parsed_part_node_count = {parsed_part_node_count}"
) # pragma: no cover
cf_compliant = True
if interior_ring is not None and part_node_count is None:
attribute = {
parent_ncvar
+ ":geometry": attributes[parent_ncvar]["geometry"]
}
self._add_message(
parent_ncvar,
geometry_ncvar,
message=("part_node_count attribute", "is missing"),
attribute=attribute,
)
cf_compliant = False
cf_compliant = cf_compliant & self._check_node_coordinates(
parent_ncvar,
geometry_ncvar,
node_coordinates,
parsed_node_coordinates,
)
cf_compliant = cf_compliant & self._check_node_count(
parent_ncvar, geometry_ncvar, node_count, parsed_node_count
)
cf_compliant = cf_compliant & self._check_part_node_count(
parent_ncvar,
geometry_ncvar,
part_node_count,
parsed_part_node_count,
)
cf_compliant = cf_compliant & self._check_interior_ring(
parent_ncvar, geometry_ncvar, interior_ring, parsed_interior_ring
)
if not cf_compliant:
return
part_dimension = None
# Find the netCDF dimension for the total number of nodes
node_dimension = g["variable_dimensions"][parsed_node_coordinates[0]][
0
]
logger.info(
f" node_dimension = {node_dimension!r}"
) # pragma: no cover
if node_count is None:
# --------------------------------------------------------
# There is no node_count variable, so all geometries must
# be size 1 point geometries => we can create a node_count
# variable in this case.
# --------------------------------------------------------
nodes_per_geometry = self.implementation.initialise_Count()
size = g["nc"].dimensions[node_dimension].size
ones = self.implementation.initialise_Data(
array=numpy.ones((size,), dtype="int32"), copy=False
)
self.implementation.set_data(nodes_per_geometry, data=ones)
# --------------------------------------------------------
# Cell dimension can not be taken from the node_count
# variable (because it doesn't exist), so it has to be
# taken from one of the node_coordinate variables,
# instead.
# --------------------------------------------------------
geometry_dimension = g["variable_dimensions"][
parsed_node_coordinates[0]
][0]
else:
# Find the netCDF dimension for the total number of cells
node_count = parsed_node_count[0]
geometry_dimension = g["variable_dimensions"][node_count][0]
nodes_per_geometry = self._create_Count(
ncvar=node_count, ncdim=geometry_dimension
)
# --------------------------------------------------------
# Create a node count variable (which does not contain any
# data)
# --------------------------------------------------------
nc = self._create_NodeCount(ncvar=node_count)
g["geometries"][geometry_ncvar]["node_count"] = nc
# Do not attempt to create a field construct from a
# netCDF part node count variable
g["do_not_create_field"].add(node_count)
# Record the netCDF node dimension as the sample dimension of
# the count variable
self.implementation.nc_set_sample_dimension(
nodes_per_geometry, self._ncdim_abspath(node_dimension)
)
if part_node_count is None:
# --------------------------------------------------------
# There is no part_count variable, i.e. cell has exactly
# one part.
#
# => we can treat the nodes as a contiguous ragged array
# --------------------------------------------------------
self._set_ragged_contiguous_parameters(
elements_per_instance=nodes_per_geometry,
sample_dimension=node_dimension,
element_dimension="node",
instance_dimension=geometry_dimension,
)
else:
# --------------------------------------------------------
# There is a part node count variable.
#
# => we must treat the nodes as an indexed contiguous
# ragged array
# --------------------------------------------------------
part_node_count = parsed_part_node_count[0]
# Do not attempt to create a field construct from a
# netCDF part node count variable
g["do_not_create_field"].add(part_node_count)
part_dimension = g["variable_dimensions"][part_node_count][0]
g["geometries"][geometry_ncvar]["part_dimension"] = part_dimension
parts = self._create_Count(
ncvar=part_node_count, ncdim=part_dimension
)
total_number_of_parts = self.implementation.get_data_size(parts)
parts_data = self.implementation.get_data(parts)
nodes_per_geometry_data = self.implementation.get_data(
nodes_per_geometry
)
index = self.implementation.initialise_Index()
self.implementation.set_data(index, data=parts_data)
instance_index = 0
i = 0
for cell_no in range(
self.implementation.get_data_size(nodes_per_geometry)
):
n_nodes_in_this_cell = int(nodes_per_geometry_data[cell_no])
# Initialise partial_node_count, a running count of
# how many nodes there are in this geometry
n_nodes = 0
for k in range(i, total_number_of_parts):
index.data[k] = instance_index
n_nodes += int(parts_data[k])
if n_nodes >= n_nodes_in_this_cell:
instance_index += 1
i += k + 1
break
self._set_ragged_contiguous_parameters(
elements_per_instance=parts,
sample_dimension=node_dimension,
element_dimension="node",
instance_dimension=part_dimension,
)
indexed_sample_dimension = g["variable_dimensions"][
part_node_count
][0]
self._set_ragged_indexed_parameters(
index=index,
indexed_sample_dimension=indexed_sample_dimension,
element_dimension="part",
instance_dimension=geometry_dimension,
)
self._parse_indexed_contiguous_compression(
sample_dimension=node_dimension,
instance_dimension=geometry_dimension,
)
# --------------------------------------------------------
# Create a part node count variable (which does not
# contain any data)
# --------------------------------------------------------
pnc = self._create_PartNodeCount(
ncvar=part_node_count, ncdim=part_dimension
)
g["geometries"][geometry_ncvar]["part_node_count"] = pnc
# Do not attempt to create a field construct from a
# netCDF part node count variable
g["do_not_create_field"].add(part_node_count)
# --------------------------------------------------------
# Create an interior ring variable (do this after setting
# up the indexed ragged array compression parameters).
# --------------------------------------------------------
if parsed_interior_ring:
interior_ring = parsed_interior_ring[0]
part_dimension = g["variable_dimensions"][interior_ring][0]
i_r = self._create_InteriorRing(
ncvar=interior_ring, ncdim=part_dimension
)
g["geometries"][geometry_ncvar]["interior_ring"] = i_r
# Record that this netCDF interor ring variable spans
# a compressed dimension
g["compression"][indexed_sample_dimension].setdefault(
"netCDF_variables", set()
).update(parsed_interior_ring)
# Do not attempt to create a field from an
# interior ring variable
g["do_not_create_field"].add(interior_ring)
# Record which the netCDF node variables span the compressed
# dimension
g["compression"][node_dimension].setdefault(
"netCDF_variables", set()
).update(parsed_node_coordinates)
# Do not attempt to create field constructs from netCDF node
# coordinate variables
g["do_not_create_field"].update(parsed_node_coordinates)
g["geometries"][geometry_ncvar].update(
{
"node_coordinates": parsed_node_coordinates,
"geometry_dimension": geometry_dimension,
"node_dimension": node_dimension,
}
)
# Record the fact that this parent netCDF variable has a
# geometry variable
g["variable_geometry"][parent_ncvar] = geometry_ncvar
return geometry_ncvar
def _set_ragged_contiguous_parameters(
self,
elements_per_instance=None,
sample_dimension=None,
element_dimension=None,
instance_dimension=None,
):
"""Set the DSG ragged contiguous compression global attributes.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
elements_per_instance: `Count`
sample_dimension: `str`
element_dimension: `str`
instance_dimension: `str`
:Returns:
`str`
The element dimension, possibly modified to make sure that it
is unique.
"""
g = self.read_vars
instance_dimension_size = self.implementation.get_data_size(
elements_per_instance
)
element_dimension_size = int(
self.implementation.get_data_maximum(elements_per_instance)
)
# Make sure that the element dimension name is unique
base = element_dimension
n = 0
while (
element_dimension in g["internal_dimension_sizes"]
or element_dimension in g["new_dimensions"]
or element_dimension in g["variables"]
):
n += 1
element_dimension = f"{base}_{n}"
g["new_dimensions"][element_dimension] = element_dimension_size
g["compression"].setdefault(sample_dimension, {})[
"ragged_contiguous"
] = {
"count_variable": elements_per_instance,
"implied_ncdimensions": (instance_dimension, element_dimension),
"profile_dimension": instance_dimension,
"element_dimension": element_dimension,
"element_dimension_size": element_dimension_size,
"instance_dimension_size": instance_dimension_size,
}
return element_dimension
def _set_ragged_indexed_parameters(
self,
index=None,
indexed_sample_dimension=None,
element_dimension=None,
instance_dimension=None,
):
"""Set the DSG ragged indexed compression global attributes.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
index: `Index`
element_dimension: `str`
instance_dimension: `str`
:Returns:
`str`
The element dimension, possibly modified to make sure that it
is unique.
"""
g = self.read_vars
(_, count) = numpy.unique(index.data.array, return_counts=True)
# The number of elements per instance. For the instances array
# example above, the elements_per_instance array is [7, 5, 7].
elements_per_instance = count # self._create_Data(array=count)
instance_dimension_size = g["internal_dimension_sizes"][
instance_dimension
]
element_dimension_size = int(elements_per_instance.max())
base = element_dimension
n = 0
while (
element_dimension in g["internal_dimension_sizes"]
or element_dimension in g["new_dimensions"]
or element_dimension in g["variables"]
):
n += 1
element_dimension = f"{base}_{n}"
g["compression"].setdefault(indexed_sample_dimension, {})[
"ragged_indexed"
] = {
"elements_per_instance": elements_per_instance,
"index_variable": index,
"implied_ncdimensions": (instance_dimension, element_dimension),
"element_dimension": element_dimension,
"instance_dimension_size": instance_dimension_size,
"element_dimension_size": element_dimension_size,
}
g["new_dimensions"][element_dimension] = element_dimension_size
if is_log_level_debug(logger):
logger.debug(
" Created "
f"read_vars['compression'][{indexed_sample_dimension!r}]['ragged_indexed']"
) # pragma: no cover
return element_dimension
def _check_external_variables(
self, external_variables, parsed_external_variables
):
"""Check that named external variables do not exist in the file.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
external_variables: `str`
The external_variables attribute as found in the file.
parsed_external_variables: `list`
The external_variables attribute parsed into a list of
external variable names.
:Returns:
`list`
The external variable names, less those which are also netCDF
variables in the file.
"""
g = self.read_vars
attribute = {"external_variables": external_variables}
message = ("External variable", "exists in the file")
out = []
for ncvar in parsed_external_variables:
if ncvar not in g["internal_variables"]:
out.append(ncvar)
else:
self._add_message(
None, ncvar, message=message, attribute=attribute
)
return out
def _check_formula_terms(
self, field_ncvar, coord_ncvar, formula_terms, z_ncdim=None
):
"""Check formula_terms for CF-compliance.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
field_ncvar: `str`
coord_ncvar: `str`
formula_terms: `str`
A CF-netCDF formula_terms attribute.
"""
# ============================================================
# CF-1.7 7.1. Cell Boundaries
#
# If a parametric coordinate variable with a formula_terms
# attribute (section 4.3.2) also has a bounds attribute, its
# boundary variable must have a formula_terms attribute
# too. In this case the same terms would appear in both (as
# specified in Appendix D), since the transformation from the
# parametric coordinate values to physical space is realised
# through the same formula. For any term that depends on the
# vertical dimension, however, the variable names appearing in
# the formula terms would differ from those found in the
# formula_terms attribute of the coordinate variable itself
# because the boundary variables for formula terms are
# two-dimensional while the formula terms themselves are
# one-dimensional.
#
# Whenever a formula_terms attribute is attached to a boundary
# variable, the formula terms may additionally be identified
# using a second method: variables appearing in the vertical
# coordinates' formula_terms may be declared to be coordinate,
# scalar coordinate or auxiliary coordinate variables, and
# those coordinates may have bounds attributes that identify
# their boundary variables. In that case, the bounds attribute
# of a formula terms variable must be consistent with the
# formula_terms attribute of the boundary variable. Software
# digesting legacy datasets (constructed prior to version 1.7
# of this standard) may have to rely in some cases on the
# first method of identifying the formula term variables and
# in other cases, on the second. Starting from version 1.7,
# however, the first method will be sufficient.
# ============================================================
g = self.read_vars
attribute = {coord_ncvar + ":formula_terms": formula_terms}
g["formula_terms"].setdefault(coord_ncvar, {"coord": {}, "bounds": {}})
parsed_formula_terms = self._parse_x(coord_ncvar, formula_terms)
incorrectly_formatted = (
"formula_terms attribute",
"is incorrectly formatted",
)
if not parsed_formula_terms:
self._add_message(
field_ncvar,
coord_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
return False
self._ncdimensions(field_ncvar)
for x in parsed_formula_terms:
term, values = list(x.items())[0]
g["formula_terms"][coord_ncvar]["coord"][term] = None
if len(values) != 1:
self._add_message(
field_ncvar,
coord_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
continue
ncvar = values[0]
if ncvar not in g["internal_variables"]:
ncvar, message = self._check_missing_variable(
ncvar, "Formula terms variable"
)
self._add_message(
field_ncvar, ncvar, message=message, attribute=attribute
)
continue
g["formula_terms"][coord_ncvar]["coord"][term] = ncvar
bounds_ncvar = g["variable_attributes"][coord_ncvar].get("bounds")
if bounds_ncvar is None:
# --------------------------------------------------------
# Parametric Z coordinate does not have bounds
# --------------------------------------------------------
for term in g["formula_terms"][coord_ncvar]["coord"]:
g["formula_terms"][coord_ncvar]["bounds"][term] = None
else:
# --------------------------------------------------------
# Parametric Z coordinate has bounds
# --------------------------------------------------------
bounds_formula_terms = g["variable_attributes"][bounds_ncvar].get(
"formula_terms"
)
if bounds_formula_terms is not None:
# ----------------------------------------------------
# Parametric Z coordinate has bounds, and the bounds
# variable has a formula_terms attribute
# ----------------------------------------------------
bounds_attribute = {
bounds_ncvar + ":formula_terms": bounds_formula_terms
}
parsed_bounds_formula_terms = self._parse_x(
bounds_ncvar, bounds_formula_terms
)
if not parsed_bounds_formula_terms:
self._add_message(
field_ncvar,
bounds_ncvar,
message=(
"Bounds formula_terms attribute",
"is incorrectly formatted",
),
attribute=attribute,
variable=coord_ncvar,
)
for x in parsed_bounds_formula_terms:
term, values = list(x.items())[0]
g["formula_terms"][coord_ncvar]["bounds"][term] = None
if len(values) != 1:
self._add_message(
field_ncvar,
bounds_ncvar,
message=(
"Bounds formula_terms attribute",
"is incorrectly formatted",
),
attribute=bounds_attribute,
variable=coord_ncvar,
)
continue
ncvar = values[0]
if ncvar not in g["internal_variables"]:
ncvar, message = self._check_missing_variable(
ncvar, "Bounds formula terms variable"
)
self._add_message(
field_ncvar,
ncvar,
message=message,
attribute=bounds_attribute,
variable=coord_ncvar,
)
continue
if term not in g["formula_terms"][coord_ncvar]["coord"]:
self._add_message(
field_ncvar,
bounds_ncvar,
message=(
"Bounds formula_terms attribute",
"has incompatible terms",
),
attribute=bounds_attribute,
variable=coord_ncvar,
)
continue
parent_ncvar = g["formula_terms"][coord_ncvar]["coord"][
term
]
d_ncdims = g["variable_dimensions"][parent_ncvar]
dimensions = g["variable_dimensions"][ncvar]
if z_ncdim not in d_ncdims:
if ncvar != parent_ncvar:
self._add_message(
field_ncvar,
bounds_ncvar,
message=(
"Bounds formula terms variable",
"that does not span the vertical "
"dimension is inconsistent with the "
"formula_terms of the parametric "
"coordinate variable",
),
attribute=bounds_attribute,
variable=coord_ncvar,
)
continue
elif len(dimensions) != len(d_ncdims) + 1:
self._add_message(
field_ncvar,
bounds_ncvar,
message=(
"Bounds formula terms variable",
"spans incorrect dimensions",
),
attribute=bounds_attribute,
dimensions=dimensions,
variable=coord_ncvar,
)
continue
# WRONG - need to account for char arrays:
elif d_ncdims != dimensions[:-1]:
self._add_message(
field_ncvar,
bounds_ncvar,
message=(
"Bounds formula terms variable",
"spans incorrect dimensions",
),
attribute=bounds_attribute,
dimensions=dimensions,
variable=coord_ncvar,
)
continue
# Still here?
g["formula_terms"][coord_ncvar]["bounds"][term] = ncvar
if set(g["formula_terms"][coord_ncvar]["coord"]) != set(
g["formula_terms"][coord_ncvar]["bounds"]
):
self._add_message(
field_ncvar,
bounds_ncvar,
message=(
"Bounds formula_terms attribute",
"has incompatible terms",
),
attribute=bounds_attribute,
variable=coord_ncvar,
)
else:
# ----------------------------------------------------
# Parametric Z coordinate has bounds, but the bounds
# variable does not have a formula_terms attribute =>
# Infer the formula terms bounds variables from the
# coordinates
# ----------------------------------------------------
for term, ncvar in g["formula_terms"][coord_ncvar][
"coord"
].items():
g["formula_terms"][coord_ncvar]["bounds"][term] = None
if z_ncdim not in self._ncdimensions(ncvar):
g["formula_terms"][coord_ncvar]["bounds"][term] = ncvar
continue
is_coordinate_with_bounds = False
for c_ncvar in g["coordinates"][field_ncvar]:
if ncvar != c_ncvar:
continue
is_coordinate_with_bounds = True
if z_ncdim not in g["variable_dimensions"][c_ncvar]:
# Coordinates do not span the Z dimension
g["formula_terms"][coord_ncvar]["bounds"][
term
] = ncvar
else:
# Coordinates span the Z dimension
b = g["bounds"][field_ncvar].get(ncvar)
if b is not None:
g["formula_terms"][coord_ncvar]["bounds"][
term
] = b
else:
is_coordinate_with_bounds = False
break
if not is_coordinate_with_bounds:
self._add_message(
field_ncvar,
ncvar,
message=(
"Formula terms variable",
"that spans the vertical dimension "
"has no bounds",
),
attribute=attribute,
variable=coord_ncvar,
)
def _check_missing_variable(self, ncvar, message0):
"""Return the name of a missing variable with a message.
.. versionaddedd:: (cfdm) 1.8.6.0
:Parameters:
ncvar: `str`
message0: `str`
:Returns:
`str`, `tuple`
The (possibly modified) netCDF variable name, and the
appropriate full message about it being missing.
"""
if self.read_vars["has_groups"]:
message = (message0, "is not locatable in the group hierarchy")
if ncvar.startswith("REF_NOT_FOUND:_"):
ncvar = ncvar.replace("REF_NOT_FOUND:_", "", 1)
else:
message = (message0, "is not in file")
return ncvar, message
def _create_field(self, field_ncvar):
"""Create a field for a given netCDF variable.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
field_ncvar: `str`
The name of the netCDF variable to be turned into a field.
:Returns:
Field construct
"""
g = self.read_vars
# Reset 'domain_ancillary_key'
g["domain_ancillary_key"] = {}
dimensions = g["variable_dimensions"][field_ncvar]
g["dataset_compliance"][field_ncvar] = {
"CF version": self.implementation.get_cf_version(),
"dimensions": dimensions,
"non-compliance": {},
}
logger.info(
" Converting netCDF variable "
f"{field_ncvar}({', '.join(dimensions)}) to a Field:"
) # pragma: no cover
# ------------------------------------------------------------
# Combine the global and group properties with the data
# variable properties, giving precedence to those of the data
# variable and then those of any groups.
# ------------------------------------------------------------
field_properties = g["global_attributes"].copy()
if g["has_groups"]:
field_properties.update(
g["variable_group_attributes"][field_ncvar]
)
field_properties.update(g["variable_attributes"][field_ncvar])
if is_log_level_debug(logger):
logger.debug(
" netCDF attributes:\n"
f" {field_properties}"
) # pragma: no cover
# Take cell_methods out of the data variable's properties
# since it will need special processing once the domain has
# been defined
cell_methods_string = field_properties.pop("cell_methods", None)
# Take add_offset and scale_factor out of the data variable's
# properties since they will be dealt with by the variable's
# Data object. Makes sure we note that they were there so we
# can adjust the field's data type accordingly.
values = [
field_properties.pop(k, None)
for k in ("add_offset", "scale_factor")
]
unpacked_dtype = values != [None, None]
if unpacked_dtype:
try:
values.remove(None)
except ValueError:
pass
unpacked_dtype = numpy.result_type(*values)
# Initialise node_coordinates_as_bounds
g["node_coordinates_as_bounds"] = set()
# ------------------------------------------------------------
# Initialise the field with properties
# ------------------------------------------------------------
f = self.implementation.initialise_Field()
self.implementation.set_properties(f, field_properties, copy=True)
if not g["mask"]:
self._set_default_FillValue(f, field_ncvar)
# Store the field's netCDF variable name
self.implementation.nc_set_variable(f, field_ncvar)
# Store the field's netCDF global attributes
x = g["global_attributes"].copy()
for k, v in g["global_attributes"].items():
if (
k not in g["variable_attributes"][field_ncvar]
and k not in g["variable_group_attributes"][field_ncvar]
):
x[k] = None
self.implementation.nc_set_global_attributes(f, x)
# ------------------------------------------------------------
# Store the data variable's group-level attributes
# ------------------------------------------------------------
if g["has_groups"]:
x = g["variable_group_attributes"][field_ncvar].copy()
for k, v in g["variable_group_attributes"][field_ncvar].items():
if k not in g["variable_attributes"][field_ncvar]:
x[k] = None
self.implementation.nc_set_group_attributes(f, x)
# ------------------------------------------------------------
# Remove the field construct's "geometry" property, saving its
# value
# ------------------------------------------------------------
if g["CF>=1.8"]:
geometry = self.implementation.del_property(f, "geometry", None)
if geometry is not None:
self.implementation.nc_set_geometry_variable(f, geometry)
# Map netCDF dimension names to domain axis names.
#
# For example: {'lat': 'dim0', 'time': 'dim1'}
ncdim_to_axis = {}
g["ncdim_to_axis"] = ncdim_to_axis
ncscalar_to_axis = {}
# Map netCDF variable names to internal identifiers
#
# For example: {'dimensioncoordinate1': 'time'}
ncvar_to_key = {}
data_axes = []
# ------------------------------------------------------------
# Add axes and non-scalar dimension coordinates to the field
# ------------------------------------------------------------
field_ncdimensions = self._ncdimensions(field_ncvar)
field_groups = g["variable_groups"][field_ncvar]
for ncdim in field_ncdimensions:
ncvar, method = self._find_coordinate_variable(
field_ncvar, field_groups, ncdim
)
if ncvar is not None:
# There is a Unidata coordinate variable for this
# dimension, so create a domain axis and dimension
# coordinate
if ncvar in g["dimension_coordinate"]:
coord = self._copy_construct(
"dimension_coordinate", field_ncvar, ncvar
)
else:
coord = self._create_dimension_coordinate(
field_ncvar, ncvar, f
)
g["dimension_coordinate"][ncvar] = coord
size = self.implementation.get_construct_data_size(coord)
domain_axis = self._create_domain_axis(size, ncdim)
logger.detail(
f" [a] Inserting {domain_axis.__class__.__name__} sith size {size}"
) # pragma: no cover
axis = self.implementation.set_domain_axis(
field=f, construct=domain_axis, copy=False
)
logger.detail(
f" [b] Inserting {coord.__class__.__name__}{method}"
) # pragma: no cover
dim = self.implementation.set_dimension_coordinate(
field=f, construct=coord, axes=[axis], copy=False
)
self._reference(ncvar, field_ncvar)
if coord.has_bounds():
bounds = self.implementation.get_bounds(coord)
self._reference(
self.implementation.nc_get_variable(bounds),
field_ncvar,
)
# Set unlimited status of axis
# if nc.dimensions[ncdim].isunlimited():
if g["dimension_isunlimited"][ncdim]:
self.implementation.nc_set_unlimited_axis(f, axis)
ncvar_to_key[ncvar] = dim
g["coordinates"].setdefault(field_ncvar, []).append(ncvar)
else:
# There is no dimension coordinate for this dimension,
# so just create a domain axis with the correct size.
if ncdim in g["new_dimensions"]:
size = g["new_dimensions"][ncdim]
else:
size = g["internal_dimension_sizes"][ncdim]
domain_axis = self._create_domain_axis(size, ncdim)
logger.detail(
f" [c] Inserting {domain_axis.__class__.__name__} with size {size}"
) # pragma: no cover
axis = self.implementation.set_domain_axis(
field=f, construct=domain_axis, copy=False
)
# Set unlimited status of axis
try:
# if nc.dimensions[ncdim].isunlimited():
if g["dimension_isunlimited"][ncdim]:
self.implementation.nc_set_unlimited_axis(f, axis)
except KeyError:
# This dimension is not in the netCDF file (as
# might be the case for an element dimension
# implied by a ragged array).
pass
# Update data dimension name and set dimension size
data_axes.append(axis)
ncdim_to_axis[ncdim] = axis
data = self._create_data(field_ncvar, f, unpacked_dtype=unpacked_dtype)
logger.detail(
f" [d] Inserting {data.__class__.__name__}{data.shape}"
) # pragma: no cover
self.implementation.set_data(f, data, axes=data_axes, copy=False)
# ----------------------------------------------------------------
# Add scalar dimension coordinates and auxiliary coordinates to
# the field
# ----------------------------------------------------------------
coordinates = self.implementation.del_property(f, "coordinates", None)
if coordinates is not None:
parsed_coordinates = self._split_string_by_white_space(
field_ncvar, coordinates, variables=True
)
for ncvar in parsed_coordinates:
# Skip dimension coordinates which are in the list
if ncvar in field_ncdimensions:
continue
cf_compliant = self._check_auxiliary_scalar_coordinate(
field_ncvar, ncvar, coordinates
)
if not cf_compliant:
continue
# Set dimensions for this variable
dimensions = self._get_domain_axes(ncvar)
if ncvar in g["auxiliary_coordinate"]:
coord = g["auxiliary_coordinate"][ncvar].copy()
else:
coord = self._create_auxiliary_coordinate(
field_ncvar, ncvar, f
)
g["auxiliary_coordinate"][ncvar] = coord
# --------------------------------------------------------
# Turn a
# --------------------------------------------------------
is_scalar_dimension_coordinate = False
scalar = False
if not dimensions:
scalar = True
if self._is_char_or_string(ncvar):
# String valued scalar coordinate. T turn it
# into a 1-d auxiliary coordinate construct.
domain_axis = self._create_domain_axis(1)
logger.detail(
" [d] Inserting {domain_axis.__class__.__name__} with size 1"
) # pragma: no cover
dim = self.implementation.set_domain_axis(
f, domain_axis
)
dimensions = [dim]
coord = self.implementation.construct_insert_dimension(
construct=coord, position=0
)
g["auxiliary_coordinate"][ncvar] = coord
else:
# Numeric valued scalar coordinate
is_scalar_dimension_coordinate = True
if is_scalar_dimension_coordinate:
# Insert a domain axis and dimension coordinate
# derived from a numeric scalar auxiliary
# coordinate.
# First turn the scalar auxiliary corodinate into
# a 1-d auxiliary coordinate construct
coord = self.implementation.construct_insert_dimension(
construct=coord, position=0
)
# Now turn the 1-d size 1 auxiliary coordinate
# into a dimension coordinate
coord = self.implementation.initialise_DimensionCoordinate_from_AuxiliaryCoordinate(
auxiliary_coordinate=coord, copy=False
)
size = self.implementation.get_construct_data_size(coord)
domain_axis = self._create_domain_axis(size)
logger.detail(
f" [e] Inserting {domain_axis.__class__.__name__} with size {size}"
) # pragma: no cover
axis = self.implementation.set_domain_axis(
field=f, construct=domain_axis, copy=False
)
logger.detail(
f" [e] Inserting {coord.__class__.__name__}"
) # pragma: no cover
dim = self.implementation.set_dimension_coordinate(
f, coord, axes=[axis], copy=False
)
self._reference(ncvar, field_ncvar)
if self.implementation.has_bounds(coord):
bounds = self.implementation.get_bounds(coord)
self._reference(
self.implementation.nc_get_variable(bounds),
field_ncvar,
)
dimensions = [axis]
ncvar_to_key[ncvar] = dim
g["dimension_coordinate"][ncvar] = coord
del g["auxiliary_coordinate"][ncvar]
else:
# Insert auxiliary coordinate
logger.detail(
f" [f] Inserting {coord.__class__.__name__}"
) # pragma: no cover
aux = self.implementation.set_auxiliary_coordinate(
f, coord, axes=dimensions, copy=False
)
self._reference(ncvar, field_ncvar)
if self.implementation.has_bounds(coord):
bounds = self.implementation.get_bounds(coord)
self._reference(
self.implementation.nc_get_variable(bounds),
field_ncvar,
)
ncvar_to_key[ncvar] = aux
if scalar:
ncscalar_to_axis[ncvar] = dimensions[0]
# ------------------------------------------------------------
# Add auxiliary coordinate constructs from geometry node
# coordinates that are not already bounds of existing
# auxiliary coordinate constructs (CF>=1.8)
# ------------------------------------------------------------
geometry = self._get_geometry(field_ncvar)
if geometry is not None:
for node_ncvar in geometry["node_coordinates"]:
found = any(
[
self.implementation.get_bounds_ncvar(a) == node_ncvar
for a in self.implementation.get_auxiliary_coordinates(
f
).values()
]
)
if found:
continue
#
if node_ncvar in g["auxiliary_coordinate"]:
coord = g["auxiliary_coordinate"][node_ncvar].copy()
else:
coord = self._create_auxiliary_coordinate(
field_ncvar=field_ncvar,
ncvar=None,
f=f,
bounds_ncvar=node_ncvar,
nodes=True,
)
geometry_type = geometry["geometry_type"]
if geometry_type is not None:
self.implementation.set_geometry(coord, geometry_type)
g["auxiliary_coordinate"][node_ncvar] = coord
# Insert auxiliary coordinate
logger.detail(
f" [f] Inserting {coord.__class__.__name__}"
) # pragma: no cover
# TODO check that geometry_dimension is a dimension of
# the data variable
geometry_dimension = geometry["geometry_dimension"]
if geometry_dimension not in g["ncdim_to_axis"]:
raise ValueError(
f"Geometry dimension {geometry_dimension!r} is not in "
f"read_vars['ncdim_to_axis']: {g['ncdim_to_axis']}"
)
aux = self.implementation.set_auxiliary_coordinate(
f,
coord,
axes=(g["ncdim_to_axis"][geometry_dimension],),
copy=False,
)
self._reference(node_ncvar, field_ncvar)
ncvar_to_key[node_ncvar] = aux
# ------------------------------------------------------------
# Add coordinate reference constructs from formula_terms
# properties
# ------------------------------------------------------------
for key, coord in self.implementation.get_coordinates(field=f).items():
coord_ncvar = self.implementation.nc_get_variable(coord)
if coord_ncvar is None:
# This might be the case if the coordinate construct
# just contains geometry nodes
continue
formula_terms = g["variable_attributes"][coord_ncvar].get(
"formula_terms"
)
if formula_terms is None:
# This coordinate does not have a formula_terms
# attribute
continue
if coord_ncvar not in g["formula_terms"]:
self._check_formula_terms(
field_ncvar,
coord_ncvar,
formula_terms,
z_ncdim=g["variable_dimensions"][coord_ncvar][0],
)
ok = True
domain_ancillaries = []
for term, ncvar in g["formula_terms"][coord_ncvar][
"coord"
].items():
if ncvar is None:
continue
# Set dimensions
axes = self._get_domain_axes(ncvar)
if ncvar in g["domain_ancillary"]:
domain_anc = self._copy_construct(
"domain_ancillary", field_ncvar, ncvar
)
else:
bounds = g["formula_terms"][coord_ncvar]["bounds"].get(
term
)
if bounds == ncvar:
bounds = None
domain_anc = self._create_domain_ancillary(
field_ncvar, ncvar, f, bounds_ncvar=bounds
)
if len(axes) == len(self._ncdimensions(ncvar)):
domain_ancillaries.append((ncvar, domain_anc, axes))
else:
# The domain ancillary variable spans a dimension
# that is not spanned by its parent data variable
self._add_message(
field_ncvar,
ncvar,
message=(
"Formula terms variable",
"spans incorrect dimensions",
),
attribute={
coord_ncvar + ":formula_terms": formula_terms
},
dimensions=g["variable_dimensions"][ncvar],
)
ok = False
if not ok:
# Move on to the next coordinate
continue
# Still here? Create a formula terms coordinate reference.
for ncvar, domain_anc, axes in domain_ancillaries:
logger.detail(
f" [g] Inserting {domain_anc.__class__.__name__}"
) # pragma: no cover
da_key = self.implementation.set_domain_ancillary(
field=f, construct=domain_anc, axes=axes, copy=False
)
self._reference(ncvar, field_ncvar)
if self.implementation.has_bounds(domain_anc):
bounds = self.implementation.get_bounds(domain_anc)
self._reference(
self.implementation.nc_get_variable(bounds),
field_ncvar,
)
if ncvar not in ncvar_to_key:
ncvar_to_key[ncvar] = da_key
g["domain_ancillary"][ncvar] = domain_anc
g["domain_ancillary_key"][ncvar] = da_key
coordinate_reference = self._create_formula_terms_ref(
f, key, coord, g["formula_terms"][coord_ncvar]["coord"]
)
self.implementation.set_coordinate_reference(
field=f, construct=coordinate_reference, copy=False
)
logger.detail(
f" [l] Inserting {coordinate_reference.__class__.__name__}"
) # pragma: no cover
g["vertical_crs"][key] = coordinate_reference
# ------------------------------------------------------------
# Add grid mapping coordinate references (do this after
# formula terms)
# ------------------------------------------------------------
grid_mapping = self.implementation.del_property(
f, "grid_mapping", None
)
if grid_mapping is not None:
parsed_grid_mapping = self._parse_grid_mapping(
field_ncvar, grid_mapping
)
cf_compliant = self._check_grid_mapping(
field_ncvar, grid_mapping, parsed_grid_mapping
)
if not cf_compliant:
logger.warning(
f" Bad grid_mapping: {grid_mapping!r}"
) # pragma: no cover
else:
for x in parsed_grid_mapping:
grid_mapping_ncvar, coordinates = list(x.items())[0]
parameters = g["variable_attributes"][
grid_mapping_ncvar
].copy()
# Convert netCDF variable names to internal identifiers
coordinates = [
ncvar_to_key[ncvar]
for ncvar in coordinates
if ncvar in ncvar_to_key
]
# ------------------------------------------------
# Find the datum and coordinate conversion for the
# grid mapping
# ------------------------------------------------
datum_parameters = {}
coordinate_conversion_parameters = {}
for parameter, value in parameters.items():
if parameter in g["datum_parameters"]:
datum_parameters[parameter] = value
else:
coordinate_conversion_parameters[parameter] = value
datum = self.implementation.initialise_Datum(
parameters=datum_parameters
)
coordinate_conversion = (
self.implementation.initialise_CoordinateConversion(
parameters=coordinate_conversion_parameters
)
)
create_new = True
if not coordinates:
# DCH ALERT
# what to do about duplicate standard names? TODO
name = parameters.get("grid_mapping_name", None)
for (
n
) in self.cf_coordinate_reference_coordinates().get(
name, ()
):
for (
key,
coord,
) in self.implementation.get_coordinates(
field=f
).items():
if n == self.implementation.get_property(
coord, "standard_name", None
):
coordinates.append(key)
# Add the datum to already existing vertical
# coordinate references
for vcr in g["vertical_crs"].values():
self.implementation.set_datum(
coordinate_reference=vcr, datum=datum
)
else:
for vcoord, vcr in g["vertical_crs"].items():
if vcoord in coordinates:
# Add the datum to an already existing
# vertical coordinate reference
logger.detail(
f" [k] Inserting {datum.__class__.__name__} into {vcr.__class__.__name__}"
) # pragma: no cover
self.implementation.set_datum(
coordinate_reference=vcr, datum=datum
)
coordinates.remove(vcoord)
create_new = bool(coordinates)
if create_new:
coordref = (
self.implementation.initialise_CoordinateReference()
)
self.implementation.set_datum(
coordinate_reference=coordref, datum=datum
)
self.implementation.set_coordinate_conversion(
coordinate_reference=coordref,
coordinate_conversion=coordinate_conversion,
)
self.implementation.set_coordinate_reference_coordinates(
coordref, coordinates
)
self.implementation.nc_set_variable(
coordref, grid_mapping_ncvar
)
key = self.implementation.set_coordinate_reference(
field=f, construct=coordref, copy=False
)
logger.detail(
f" [l] Inserting {coordref.__class__.__name__}"
) # pragma: no cover
self._reference(grid_mapping_ncvar, field_ncvar)
ncvar_to_key[grid_mapping_ncvar] = key
# ------------------------------------------------------------
# Add cell measures to the field
# ------------------------------------------------------------
measures = self.implementation.del_property(f, "cell_measures", None)
if measures is not None:
parsed_cell_measures = self._parse_x(field_ncvar, measures)
cf_compliant = self._check_cell_measures(
field_ncvar, measures, parsed_cell_measures
)
if cf_compliant:
for x in parsed_cell_measures:
measure, ncvars = list(x.items())[0]
ncvar = ncvars[0]
# Set the domain axes for the cell measure
axes = self._get_domain_axes(ncvar, allow_external=True)
if ncvar in g["cell_measure"]:
# Copy the cell measure from one that already
# exists
cell = g["cell_measure"][ncvar].copy()
else:
cell = self._create_cell_measure(measure, ncvar)
g["cell_measure"][ncvar] = cell
logger.detail(
f" [h] Inserting {cell.__class__.__name__}"
) # pragma: no cover
key = self.implementation.set_cell_measure(
field=f, construct=cell, axes=axes, copy=False
)
# Count a reference to the cell measure ...
if ncvar != field_ncvar:
# ... but only if it is not the same as its
# parent data variable (introduced at v1.8.6).
self._reference(ncvar, field_ncvar)
ncvar_to_key[ncvar] = key
if ncvar in g["external_variables"]:
g["referenced_external_variables"].add(ncvar)
# ------------------------------------------------------------
# Add cell methods to the field
# ------------------------------------------------------------
if cell_methods_string is not None:
name_to_axis = ncdim_to_axis.copy()
name_to_axis.update(ncscalar_to_axis)
cell_methods = self._parse_cell_methods(
cell_methods_string, field_ncvar
)
for properties in cell_methods:
axes = properties.pop("axes")
if g["has_groups"]:
# Replace flattened names with absolute names
axes = [
g["flattener_dimensions"].get(
axis, g["flattener_variables"].get(axis, axis)
)
for axis in axes
]
# Replace names with domain axis keys
axes = [name_to_axis.get(axis, axis) for axis in axes]
method = properties.pop("method", None)
cell_method = self._create_cell_method(
axes, method, properties
)
logger.detail(
f" [i] Inserting {method!r} {cell_method.__class__.__name__}"
) # pragma: no cover
self.implementation.set_cell_method(
field=f, construct=cell_method, copy=False
)
# ------------------------------------------------------------
# Add field ancillaries to the field
# ------------------------------------------------------------
ancillary_variables = self.implementation.del_property(
f, "ancillary_variables", None
)
if ancillary_variables is not None:
parsed_ancillary_variables = self._split_string_by_white_space(
field_ncvar, ancillary_variables, variables=True
)
cf_compliant = self._check_ancillary_variables(
field_ncvar, ancillary_variables, parsed_ancillary_variables
)
if not cf_compliant:
pass
else:
for ncvar in parsed_ancillary_variables:
# Set dimensions
axes = self._get_domain_axes(ncvar)
if ncvar in g["field_ancillary"]:
field_anc = g["field_ancillary"][ncvar].copy()
else:
field_anc = self._create_field_ancillary(ncvar)
g["field_ancillary"][ncvar] = field_anc
# Insert the field ancillary
logger.detail(
f" [j] Inserting {field_anc.__class__.__name__}"
) # pragma: no cover
key = self.implementation.set_field_ancillary(
field=f, construct=field_anc, axes=axes, copy=False
)
self._reference(ncvar, field_ncvar)
ncvar_to_key[ncvar] = key
# Add the structural read report to the field
dataset_compliance = g["dataset_compliance"][field_ncvar]
components = dataset_compliance["non-compliance"]
if components:
dataset_compliance = {field_ncvar: dataset_compliance}
else:
dataset_compliance = {}
self.implementation.set_dataset_compliance(f, dataset_compliance)
# Return the finished field
return f
def _find_coordinate_variable(self, field_ncvar, field_groups, ncdim):
"""Find a coordinate variable for a data-dimension combination.
Find a Unidata coordinate variable for a particular CF-netCDF
data variable and netCDF dimension combination.
.. versionadded:: (cfdm) 1.8.6
:Parameters:
field_ncvar: `str`
field_groups: `tuple`
ncdim: `str`
:Returns:
(`str`, `str`) or (`None`, str`)
The second item is a message saying how the coordinate
variable was discovered.
"""
g = self.read_vars
ncdim_groups = g["dimension_groups"].get(ncdim, ())
n_ncdim_groups = len(ncdim_groups)
if g["variable_dimensions"].get(ncdim) == (ncdim,):
# There is a Unidata coordinate variable for this
# dimension, so create a domain axis and dimension
# coordinate
return ncdim, ""
if not g["has_groups"]:
# This file has no group structure and there is no
# coordinate variable for this dimension
return None, ""
# ------------------------------------------------------------
# File has groups. Look for a coordinate variable by proximal
# and lateral search techniques
# ------------------------------------------------------------
proximal_candidates = {}
lateral_candidates = {}
for ncvar, ncdims in g["variable_dimensions"].items():
if ncvar == field_ncvar:
# A data variable can not be its own coordinate
# variable
continue
if ncdims != (ncdim,):
# This variable does not span the correct dimension
continue
if g["variable_basename"][ncvar] != g["dimension_basename"][ncdim]:
# This variable does not have the same basename as the
# dimension. E.g. if ncdim is '/forecast/lon' and
# ncvar is '/forecast/model/lat' then their basenames
# are 'lon' and 'lat' respectively.
continue
ncvar_groups = g["variable_groups"][ncvar]
if ncvar_groups[:n_ncdim_groups] != ncdim_groups:
# The variable's group is not the same as, nor a
# subgroup of, the local apex group.
continue
if field_groups[: len(ncvar_groups)] == ncvar_groups:
# Group is acceptable for proximal search
proximal_candidates[ncvar] = ncvar_groups
else:
# Group is acceptable for lateral search
lateral_candidates[ncvar] = ncvar_groups
if proximal_candidates:
# Choose the coordinate variable closest to the field by
# proximal search
ncvars = [
k
for k in sorted(
proximal_candidates.items(),
reverse=True,
key=lambda item: len(item[1]),
)
]
ncvar = ncvars[0][0]
return ncvar, " (found by proximal serach)"
if lateral_candidates:
# Choose the coordinate variable that is closest the local
# apex group by proximal search. If more than one such
# vaiable exists then lateral search has failed.
ncvars = [
k
for k in sorted(
lateral_candidates.items(), key=lambda item: len(item[1])
)
]
ncvar, group = ncvars[0]
if len(lateral_candidates) == 1:
# There is a unique coordinate variable found by
# lateral search that is closest to the local apex
# group
return ncvar, " (found by lateral serach)"
else:
group2 = ncvars[1][1]
if len(group) < len(group2):
# There is a unique coordinate variable found by
# lateral search that is closest to the local apex
# group
return ncvar, " (found by lateral serach)"
# Two coordinate variables found by lateral search are
# the same distance from the local apex group
lateral_candidates = []
if lateral_candidates:
self._add_message(
field_ncvar,
field_ncvar,
message=(
"Multiple coordinate variable candidates",
"identified by lateral search",
),
dimensions=g["variable_dimensions"][field_ncvar],
)
return None, ""
def _is_char_or_string(self, ncvar):
"""True if the netCDf variable has string or char datatype.
.. versionadded:: (cfdm) 1.8.0
:Parameters:
ncvar: `str`
The name of the netCDF variable.
:Returns:
`bool`
**Examples:**
>>> n._is_char_or_string('regions')
True
"""
datatype = self.read_vars["variables"][ncvar].dtype
return datatype == str or datatype.kind in "SU"
def _is_char(self, ncvar):
"""Return True if the netCDf variable has char datatype.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
The name of the netCDF variable.
:Returns:
`bool`
**Examples:**
>>> n._is_char('regions')
True
"""
datatype = self.read_vars["variables"][ncvar].dtype
return datatype != str and datatype.kind in "SU"
def _get_geometry(self, field_ncvar, return_ncvar=False):
"""Return a geometry container for this field construct.
.. versionadded:: (cfdm) 1.8.0
:Parameters:
field_ncvar: `str`
The netCDF varibale name for the field construct.
return_ncvar: `bool`
If True then return the netCDF variable name of the
geometry instead.
:Returns:
`dict` or `str` or None`
A `dict` containing geometry container information, or the
netCDF geometry container name. If there is no geometry
container for this data variable, or if the dataset
version is CF<=1.7, then `None` is returned.
"""
g = self.read_vars
if g["CF>=1.8"]:
geometry_ncvar = g["variable_geometry"].get(field_ncvar)
if return_ncvar:
if geometry_ncvar in g["geometries"]:
return geometry_ncvar
return
return g["geometries"].get(geometry_ncvar)
def _add_message(
self,
field_ncvar,
ncvar,
message=None,
attribute=None,
dimensions=None,
variable=None,
conformance=None,
):
"""Stores and logs a message about an issue with a field.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
field_ncvar: `str`
The netCDF variable name of the field.
*Parameter example:*
``field_ncvar='tas'``
ncvar: `str`
The netCDF variable name of the field component that has
the problem.
*Parameter example:*
``field_ncvar='rotated_latitude_longitude'``
message: (`str`, `str`), optional
attribute: `str`, optional
The name and value of the netCDF attribute that has a problem.
*Parameter example:*
``attribute={'tas:cell_measures': 'area: areacella'}``
dimensions: sequence of `str`, optional
The netCDF dimensions of the variable that has a problem.
*Parameter example:*
``dimensions=('lat', 'lon')``
variable: `str`, optional
"""
g = self.read_vars
if message is not None:
try:
code = self._code0[message[0]] * 1000 + self._code1[message[1]]
except KeyError:
code = None
message = " ".join(message)
else:
code = None
d = {
"code": code,
"attribute": attribute,
"reason": message,
}
if dimensions is not None:
d["dimensions"] = dimensions
if variable is None:
variable = ncvar
g["dataset_compliance"][field_ncvar]["non-compliance"].setdefault(
ncvar, []
).append(d)
e = g["component_report"].setdefault(variable, {})
e.setdefault(ncvar, []).append(d)
if dimensions is None: # pragma: no cover
dimensions = "" # pragma: no cover
else: # pragma: no cover
dimensions = "(" + ", ".join(dimensions) + ")" # pragma: no cover
logger.info(
" Error processing netCDF variable "
f"{ncvar}{dimensions}: {d['reason']}"
) # pragma: no cover
return d
def _get_domain_axes(self, ncvar, allow_external=False):
"""Find a domain axis identifier for the variable's dimensions.
Return the domain axis identifiers that correspond to a
netCDF variable's netCDF dimensions.
.. versionadded:: (cfdm) 1.7.0
:Parameter:
ncvar: `str`
The netCDF variable name.
allow_external: `bool`
If True and *ncvar* is an external variable then return an
empty list.
:Returns:
`list`
**Examples:**
>>> r._get_domain_axes('areacello')
['domainaxis0', 'domainaxis1']
>>> r._get_domain_axes('areacello', allow_external=True)
[]
"""
g = self.read_vars
if allow_external and ncvar in g["external_variables"]:
axes = []
else:
ncdim_to_axis = g["ncdim_to_axis"]
ncdimensions = self._ncdimensions(ncvar)
axes = [
ncdim_to_axis[ncdim]
for ncdim in ncdimensions
if ncdim in ncdim_to_axis
]
return axes
def _ncdim_abspath(self, ncdim):
"""Return the absolute path of the netCDF dimension name.
If the file has no groups, then the netCDF dimension is returned
unchanged.
.. versionadded:: (cfdm) 1.8.6
:Parameters:
ncdim: `str` or `None`
The (falttened) netCDF dimension name.
:Returns:
`str` or `None`
The (absolute path of the) netCDF dimension name.
"""
g = self.read_vars
if ncdim is None or not g["has_groups"]:
return ncdim
# Replace the netCDF dimension name with its full group
# path. E.g. if dimension 'time' is in group '/forecast' then
# it will be renamed '/forecast/time'. (CF>=1.8)
return g["flattener_dimensions"].get(ncdim, ncdim)
def _create_auxiliary_coordinate(
self, field_ncvar, ncvar, f, bounds_ncvar=None, nodes=False
):
"""Create an auxiliary coordinate construct.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
field_ncvar: `str`
The netCDF variable name of the parent field construct.
ncvar: `str` or `None`
The netCDF name of the variable. See the *nodes*
parameter.
field: field construct
The parent field construct.
bounds_ncvar: `str`, optional
The netCDF variable name of the coordinate bounds.
nodes: `bool`
Set to True only if and only if the coordinate construct
is to be created with only bounds from a node coordinates
variable, whose netCDF name is given by *bounds_ncvar*. In
this case *ncvar* must be `None`.
:Returns:
The auxiliary coordinate construct.
"""
return self._create_bounded_construct(
field_ncvar=field_ncvar,
ncvar=ncvar,
f=f,
auxiliary=True,
bounds_ncvar=bounds_ncvar,
nodes=nodes,
)
def _create_dimension_coordinate(
self, field_ncvar, ncvar, f, bounds_ncvar=None
):
"""Create a dimension coordinate construct.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
field_ncvar: `str`
The netCDF variable name of the parent field construct.
ncvar: `str`
The netCDF name of the variable.
field: field construct
The parent field construct.
bounds_ncvar: `str`, optional
The netCDF variable name of the coordinate bounds.
:Returns:
The dimension coordinate construct.
"""
return self._create_bounded_construct(
field_ncvar=field_ncvar,
ncvar=ncvar,
f=f,
dimension=True,
bounds_ncvar=bounds_ncvar,
)
def _create_domain_ancillary(
self, field_ncvar, ncvar, f, bounds_ncvar=None
):
"""Create a domain ancillary construct object.
.. versionadded:: (cfdm) 1.7.0
:Returns:
The domain ancillary construct.
"""
return self._create_bounded_construct(
field_ncvar=field_ncvar,
ncvar=ncvar,
f=f,
domain_ancillary=True,
bounds_ncvar=bounds_ncvar,
)
def _create_bounded_construct(
self,
field_ncvar,
ncvar,
f,
dimension=False,
auxiliary=False,
domain_ancillary=False,
bounds_ncvar=None,
has_coordinates=True,
nodes=False,
):
"""Create a variable which might have bounds.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str` or `None`
The netCDF name of the variable. See the *nodes*
parameter.
f: `Field`
The parent field construct.
dimension: `bool`, optional
If True then a dimension coordinate construct is created.
auxiliary: `bool`, optional
If True then an auxiliary coordinate consrtruct is created.
domain_ancillary: `bool`, optional
If True then a domain ancillary construct is created.
nodes: `bool`
Set to True only if and only if the coordinate construct
is to be created with only bounds from a node coordinates
variable, whose netCDF name is given by *bounds_ncvar*. In
this case *ncvar* must be `None`.
:Returns:
`DimensionCoordinate` or `AuxiliaryCoordinate` or `DomainAncillary`
The new construct.
"""
g = self.read_vars
nc = g["nc"]
g["bounds"][field_ncvar] = {}
g["coordinates"][field_ncvar] = []
if ncvar is not None:
properties = g["variable_attributes"][ncvar].copy()
properties.pop("formula_terms", None)
else:
properties = {}
# ------------------------------------------------------------
# Look for a geometry container
# ------------------------------------------------------------
geometry = self._get_geometry(field_ncvar)
attribute = "bounds" # TODO Bad default? consider if bounds != None
# If there are bounds then find the name of the attribute that
# names them, and the netCDF variable name of the bounds.
if bounds_ncvar is None:
bounds_ncvar = properties.pop("bounds", None)
if bounds_ncvar is None:
bounds_ncvar = properties.pop("climatology", None)
if bounds_ncvar is not None:
attribute = "climatology"
elif geometry:
bounds_ncvar = properties.pop("nodes", None)
if bounds_ncvar is not None:
attribute = "nodes"
elif nodes:
attribute = "nodes"
if dimension:
properties.pop("compress", None)
c = self.implementation.initialise_DimensionCoordinate()
elif auxiliary:
c = self.implementation.initialise_AuxiliaryCoordinate()
elif domain_ancillary:
c = self.implementation.initialise_DomainAncillary()
else:
raise ValueError(
"Must set exactly one of the dimension, auxiliary or "
"domain_ancillary parameters to True"
)
self.implementation.set_properties(c, properties)
if not g["mask"]:
self._set_default_FillValue(c, ncvar)
data = None
if has_coordinates and ncvar is not None:
data = self._create_data(ncvar, c)
self.implementation.set_data(c, data, copy=False)
# ------------------------------------------------------------
# Add any bounds
# ------------------------------------------------------------
if bounds_ncvar:
if g["has_groups"]:
# Replace a flattened name with an absolute name
bounds_ncvar = g["flattener_variables"].get(
bounds_ncvar, bounds_ncvar
)
if attribute == "nodes":
# Check geomerty node coordinate boounds
cf_compliant = self._check_geometry_node_coordinates(
field_ncvar, bounds_ncvar, geometry
)
else:
# Check "normal" boounds
cf_compliant = self._check_bounds(
field_ncvar, ncvar, attribute, bounds_ncvar
)
if not cf_compliant:
bounds_ncvar = None
if bounds_ncvar:
bounds = self.implementation.initialise_Bounds()
bounds_properties = g["variable_attributes"][bounds_ncvar].copy()
bounds_properties.pop("formula_terms", None)
self.implementation.set_properties(bounds, bounds_properties)
if not g["mask"]:
self._set_default_FillValue(bounds, bounds_ncvar)
bounds_data = self._create_data(
bounds_ncvar, bounds, parent_ncvar=ncvar
)
self.implementation.set_data(bounds, bounds_data, copy=False)
# Store the netCDF variable name
self.implementation.nc_set_variable(bounds, bounds_ncvar)
# Store the netCDF bounds dimension name
bounds_ncdim = self._ncdim_abspath(
g["variable_dimensions"][bounds_ncvar][-1]
)
# Set the netCDF trailing bounds dimension name. (But not
# if it is a dimension of its parent coordinate
# variable. This can sometimes happen if the bounds are
# node coordinates.)
if bounds_ncdim not in g["variable_dimensions"].get(ncvar, ()):
self.implementation.nc_set_dimension(bounds, bounds_ncdim)
# Set the bounds on the parent construct
error = self.implementation.set_bounds(c, bounds, copy=False)
if error:
logger.warning(f"WARNING: {error}")
if not domain_ancillary:
g["bounds"][field_ncvar][ncvar] = bounds_ncvar
# --------------------------------------------------------
# Geometries
# --------------------------------------------------------
if (
geometry is not None
and bounds_ncvar in geometry["node_coordinates"]
):
# Record the netCDF node dimension name
count = self.implementation.get_count(bounds)
node_ncdim = self.implementation.nc_get_sample_dimension(count)
self.implementation.nc_set_dimension(
bounds, self._ncdim_abspath(node_ncdim)
)
geometry_type = geometry["geometry_type"]
if geometry_type is not None:
self.implementation.set_geometry(c, geometry_type)
g["node_coordinates_as_bounds"].add(bounds_ncvar)
if self.implementation.get_data_ndim(bounds) == 2:
# Insert a size 1 part dimension
bounds = self.implementation.bounds_insert_dimension(
bounds=bounds, position=1
)
self.implementation.set_bounds(c, bounds, copy=False)
# Add a node count variable
nc = geometry.get("node_count")
if nc is not None:
self.implementation.set_node_count(parent=c, node_count=nc)
# Add a part node count variable
pnc = geometry.get("part_node_count")
if pnc is not None:
self.implementation.set_part_node_count(
parent=c, part_node_count=pnc
)
# Add an interior ring variable
interior_ring = geometry.get("interior_ring")
if interior_ring is not None:
self.implementation.set_interior_ring(
parent=c, interior_ring=interior_ring
)
# Store the netCDF variable name
self.implementation.nc_set_variable(c, ncvar)
if not domain_ancillary:
g["coordinates"][field_ncvar].append(ncvar)
# ---------------------------------------------------------
# Return the bounded variable
# ---------------------------------------------------------
return c
def _create_cell_measure(self, measure, ncvar):
"""Create a cell measure object.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
measure: `str`
The cell measure.
*Parameter example:*
``measure='area'``
ncvar: `str`
The netCDF name of the cell measure variable.
*Parameter example:*
``ncvar='areacello'``
:Returns:
`CellMeasure`
The new item.
"""
g = self.read_vars
# Initialise the cell measure construct
cell_measure = self.implementation.initialise_CellMeasure(
measure=measure
)
# Store the netCDF variable name
self.implementation.nc_set_variable(cell_measure, ncvar)
if ncvar in g["external_variables"]:
# The cell measure variable is in an unknown external file
self.implementation.nc_set_external(construct=cell_measure)
else:
# The cell measure variable is in this file or in a known
# external file
self.implementation.set_properties(
cell_measure, g["variable_attributes"][ncvar]
)
if not g["mask"]:
self._set_default_FillValue(cell_measure, ncvar)
data = self._create_data(ncvar, cell_measure)
self.implementation.set_data(cell_measure, data, copy=False)
return cell_measure
def _create_Count(self, ncvar, ncdim):
"""Create a count variable.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
The name of the netCDF count variable.
*Parameter example:*
``ncvar='row_size'``
ncdim: `str`
The name of the count variable's netCDF dimension.
*Parameter example:*
``ncdim='profile'``
:Returns:
Count variable instance
"""
g = self.read_vars
# Initialise the count variable
variable = self.implementation.initialise_Count()
# Set the CF properties
properties = g["variable_attributes"][ncvar]
sample_ncdim = properties.pop("sample_dimension", None)
self.implementation.set_properties(variable, properties)
if not g["mask"]:
self._set_default_FillValue(variable, ncvar)
# Set the netCDF variable name
self.implementation.nc_set_variable(variable, ncvar)
# Set the netCDF sample dimension name
if sample_ncdim is not None:
self.implementation.nc_set_sample_dimension(
variable, self._ncdim_abspath(sample_ncdim)
)
# Set the name of the netCDF dimension spaned by the variable
# (which, for indexed contiguous ragged arrays, will not be the
# same as the netCDF instance dimension)
self.implementation.nc_set_dimension(
variable, self._ncdim_abspath(ncdim)
)
data = self._create_data(ncvar, variable, uncompress_override=True)
self.implementation.set_data(variable, data, copy=False)
return variable
def _create_Index(self, ncvar, ncdim):
"""Create an index variable.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
The name of the netCDF index variable.
*Parameter example:*
``ncvar='landpoints'``
ncdim: `str`
The name of the index variable's netCDF dimension.
*Parameter example:*
``ncdim='profile'``
:Returns:
Index variable instance
"""
g = self.read_vars
# Initialise the index variable
variable = self.implementation.initialise_Index()
# Set the CF properties
properties = g["variable_attributes"][ncvar]
properties.pop("instance_dimension", None)
self.implementation.set_properties(variable, properties)
if not g["mask"]:
self._set_default_FillValue(variable, ncvar)
# Set the netCDF variable name
self.implementation.nc_set_variable(variable, ncvar)
# Set the netCDF sample dimension name
sample_ncdim = ncdim
self.implementation.nc_set_sample_dimension(
variable, self._ncdim_abspath(sample_ncdim)
)
# Set the name of the netCDF dimension spaned by the variable
# (which, for indexed contiguous ragged arrays, will not be
# the same as the netCDF sample dimension)
self.implementation.nc_set_dimension(
variable, self._ncdim_abspath(ncdim)
)
# Set the data
data = self._create_data(ncvar, variable, uncompress_override=True)
self.implementation.set_data(variable, data, copy=False)
return variable
def _create_InteriorRing(self, ncvar, ncdim):
"""Create an interior ring variable.
.. versionadded:: (cfdm) 1.8.0
:Parameters:
ncvar: `str`
The name of the netCDF interior ring variable.
*Parameter example:*
``ncvar='interior_ring'``
ncdim: `str`
The name of the part dimension.
*Parameter example:*
``ncdim='part'``
:Returns:
Interior ring variable instance
"""
g = self.read_vars
# Initialise the interior ring variable
variable = self.implementation.initialise_InteriorRing()
# Store the netCDF variable name
self.implementation.nc_set_variable(variable, ncvar)
self.implementation.nc_set_dimension(
variable, self._ncdim_abspath(ncdim)
)
properties = g["variable_attributes"][ncvar]
self.implementation.set_properties(variable, properties)
if not g["mask"]:
self._set_default_FillValue(variable, ncvar)
data = self._create_data(ncvar, variable)
self.implementation.set_data(variable, data, copy=False)
return variable
def _create_List(self, ncvar):
"""Create a netCDF list variable (List).
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
The name of the netCDF list variable.
*Parameter example:*
``ncvar='landpoints'``
:Returns:
`List`
"""
# Initialise the list variable
variable = self.implementation.initialise_List()
# Store the netCDF variable name
self.implementation.nc_set_variable(variable, ncvar)
properties = self.read_vars["variable_attributes"][ncvar]
properties.pop("compress", None)
self.implementation.set_properties(variable, properties)
if not self.read_vars["mask"]:
self._set_default_FillValue(variable, ncvar)
data = self._create_data(ncvar, variable, uncompress_override=True)
self.implementation.set_data(variable, data, copy=False)
return variable
def _create_NodeCount(self, ncvar):
"""Create a node count variable.
.. versionadded:: (cfdm) 1.8.0
:Parameters:
ncvar: `str`
The netCDF node count variable name.
*Parameter example:*
``ncvar='node_count'``
:Returns:
Node count variable instance
"""
g = self.read_vars
# Initialise the interior ring variable
variable = self.implementation.initialise_NodeCount()
# Store the netCDF variable name
self.implementation.nc_set_variable(variable, ncvar)
properties = g["variable_attributes"][ncvar]
self.implementation.set_properties(variable, properties)
if not g["mask"]:
self._set_default_FillValue(variable, ncvar)
return variable
def _create_PartNodeCount(self, ncvar, ncdim):
"""Create a part node count variable.
.. versionadded:: (cfdm) 1.8.0
:Parameters:
ncvar: `str`
The name of the netCDF part node count variable.
*Parameter example:*
``ncvar='part_node_count'``
ncdim: `str`
The name of the part dimension.
*Parameter example:*
``ncdim='part'``
:Returns:
Part node count variable instance
"""
g = self.read_vars
# Initialise the interior ring variable
variable = self.implementation.initialise_PartNodeCount()
# Store the netCDF variable name
self.implementation.nc_set_variable(variable, ncvar)
self.implementation.nc_set_dimension(
variable, self._ncdim_abspath(ncdim)
)
properties = g["variable_attributes"][ncvar]
self.implementation.set_properties(variable, properties)
if not g["mask"]:
self._set_default_FillValue(variable, ncvar)
return variable
def _create_cell_method(self, axes, method, qualifiers):
"""Create a cell method object.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
axes: `tuple`
method: 'str`
properties: `dict`
:Returns:
`CellMethod`
"""
return self.implementation.initialise_CellMethod(
axes=axes, method=method, qualifiers=qualifiers
)
def _create_netcdfarray(self, ncvar, unpacked_dtype=False):
"""Set the Data attribute of a variable.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
unpacked_dtype: `False` or `numpy.dtype`, optional
:Returns:
`NetCDFArray`
"""
g = self.read_vars
if g["has_groups"]:
# Get the variable from the original grouped file. This is
# primarily so that unlimited dimensions don't come out
# with size 0 (v1.8.8.1)
group, name = self._netCDF4_group(
g["variable_grouped_dataset"][ncvar], ncvar
)
# path = ncvar.split('/')
# for group_name in path[1:-1]:
# group = group[group_name]
variable = group.variables.get(name)
else:
variable = g["variables"].get(ncvar)
if variable is None:
return None
dtype = variable.dtype
if dtype is str:
# netCDF string types have a dtype of `str`, which needs
# to be reset as a numpy.dtype, but we don't know what
# without reading the data, so set it to None for now.
dtype = None
if dtype is not None and unpacked_dtype is not False:
dtype = numpy.result_type(dtype, unpacked_dtype)
ndim = variable.ndim
shape = variable.shape
size = variable.size
if size < 2:
size = int(size)
if self._is_char(ncvar) and ndim >= 1:
# Has a trailing string-length dimension
strlen = shape[-1]
shape = shape[:-1]
size /= strlen
ndim -= 1
dtype = numpy.dtype(f"S{strlen}")
filename = g["variable_filename"][ncvar]
# Find the group that this variable is in. The group will be
# None if the variable is in the root group.
if g["has_groups"]:
group = g["variable_groups"].get(ncvar, ())
if group:
# Make sure that we use the variable name without any
# group structure prepended to it
ncvar = g["variable_basename"][ncvar]
else:
# This variable is in the root group
group = None
# TODO: think using e.g. '/forecasts/model1' has the value for
# nc_set_variable. What about nc_set_dimension?
return self.implementation.initialise_NetCDFArray(
filename=filename,
ncvar=ncvar,
group=group,
dtype=dtype,
ndim=ndim,
shape=shape,
size=size,
mask=g["mask"],
)
def _create_data(
self,
ncvar,
construct=None,
unpacked_dtype=False,
uncompress_override=None,
parent_ncvar=None,
):
"""Create a data object (Data).
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
The name of the netCDF variable that contains the data.
construct: optional
unpacked_dtype: `False` or `numpy.dtype`, optional
uncompress_override: `bool`, optional
:Returns:
`Data`
"""
g = self.read_vars
array = self._create_netcdfarray(ncvar, unpacked_dtype=unpacked_dtype)
if array is None:
return None
units = g["variable_attributes"][ncvar].get("units", None)
calendar = g["variable_attributes"][ncvar].get("calendar", None)
if parent_ncvar is not None:
if units is None:
units = g["variable_attributes"][parent_ncvar].get(
"units", None
)
if calendar is None:
calendar = g["variable_attributes"][parent_ncvar].get(
"calendar", None
)
compression = g["compression"]
dimensions = g["variable_dimensions"][ncvar]
if (
(uncompress_override is not None and uncompress_override)
or not compression
or not set(compression).intersection(dimensions)
):
# --------------------------------------------------------
# The array is not compressed (or not to be uncompressed)
# --------------------------------------------------------
pass
else:
# --------------------------------------------------------
# The array is compressed
# --------------------------------------------------------
# Loop round the dimensions of data variable, as they
# appear in the netCDF file
for ncdim in dimensions:
if ncdim in compression:
# This dimension represents two or more compressed
# dimensions
c = compression[ncdim]
if ncvar not in c.get("netCDF_variables", (ncvar,)):
# This variable is not compressed, even though
# it spans a dimension that is compressed for
# some other variables For example, this sort
# of situation may arise with simple
# geometries.
continue
if "gathered" in c:
# --------------------------------------------
# Compression by gathering. Note the
# uncompressed dimensions exist as internal
# dimensions.
# --------------------------------------------
c = c["gathered"]
uncompressed_shape = tuple(
[
g["internal_dimension_sizes"][dim]
for dim in self._ncdimensions(ncvar)
]
)
compressed_dimension = g["variable_dimensions"][
ncvar
].index(c["sample_dimension"])
array = self._create_gathered_array(
gathered_array=self._create_Data(array),
uncompressed_shape=uncompressed_shape,
compressed_dimension=compressed_dimension,
list_variable=c["list_variable"],
)
elif "ragged_indexed_contiguous" in c:
# --------------------------------------------
# Contiguous indexed ragged array. Check this
# before ragged_indexed and ragged_contiguous
# because both of these will exist for an
# indexed and contiguous array.
# --------------------------------------------
c = c["ragged_indexed_contiguous"]
i = dimensions.index(ncdim)
if i != 0:
raise ValueError(
"Data can only be created when the netCDF "
"dimension spanned by the data variable is the "
"left-most dimension in the ragged array."
)
uncompressed_shape = list(array.shape)
uncompressed_shape[i : i + 1] = [
c["instance_dimension_size"],
c["element_dimension_1_size"],
c["element_dimension_2_size"],
]
uncompressed_shape = tuple(uncompressed_shape)
array = self._create_ragged_indexed_contiguous_array(
ragged_indexed_contiguous_array=self._create_Data(
array
),
uncompressed_shape=uncompressed_shape,
count_variable=c["count_variable"],
index_variable=c["index_variable"],
)
elif "ragged_contiguous" in c:
# --------------------------------------------
# Contiguous ragged array
# --------------------------------------------
c = c["ragged_contiguous"]
i = dimensions.index(ncdim)
if i != 0:
raise ValueError(
"Data can only be created when the netCDF "
"dimension spanned by the data variable is the "
"left-most dimension in the ragged array."
)
uncompressed_shape = list(array.shape)
uncompressed_shape[i : i + 1] = [
c["instance_dimension_size"],
c["element_dimension_size"],
]
uncompressed_shape = tuple(uncompressed_shape)
array = self._create_ragged_contiguous_array(
ragged_contiguous_array=self._create_Data(array),
uncompressed_shape=uncompressed_shape,
count_variable=c["count_variable"],
)
elif "ragged_indexed" in c:
# --------------------------------------------
# Indexed ragged array
# --------------------------------------------
c = c["ragged_indexed"]
i = dimensions.index(ncdim)
if i != 0:
raise ValueError(
"Data can only be created when the netCDF "
"dimension spanned by the data variable is the "
"left-most dimension in the ragged array."
)
uncompressed_shape = list(array.shape)
uncompressed_shape[i : i + 1] = [
c["instance_dimension_size"],
c["element_dimension_size"],
]
uncompressed_shape = tuple(uncompressed_shape)
array = self._create_ragged_indexed_array(
ragged_indexed_array=self._create_Data(array),
uncompressed_shape=uncompressed_shape,
index_variable=c["index_variable"],
)
else:
raise ValueError(
f"Bad compression vibes. c.keys()={list(c.keys())}"
)
return self._create_Data(array, units=units, calendar=calendar)
def _create_domain_axis(self, size, ncdim=None):
"""Create a domain axis construct.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
size: `int`
ncdim: `str, optional
:Returns:
Domain axis construct
"""
domain_axis = self.implementation.initialise_DomainAxis(size=size)
self.implementation.nc_set_dimension(
domain_axis, self._ncdim_abspath(ncdim)
)
return domain_axis
def _create_field_ancillary(self, ncvar):
"""Create a field ancillary construct.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
The netCDF name of the field ancillary variable.
:Returns:
Field ancillary construct
"""
# Create a field ancillary object
field_ancillary = self.implementation.initialise_FieldAncillary()
# Insert properties
self.implementation.set_properties(
field_ancillary,
self.read_vars["variable_attributes"][ncvar],
copy=True,
)
if not self.read_vars["mask"]:
self._set_default_FillValue(field_ancillary, ncvar)
# Insert data
data = self._create_data(ncvar, field_ancillary)
self.implementation.set_data(field_ancillary, data, copy=False)
# Store the netCDF variable name
self.implementation.nc_set_variable(field_ancillary, ncvar)
return field_ancillary
def _parse_cell_methods(self, cell_methods_string, field_ncvar=None):
"""Parse a CF cell_methods string.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
cell_methods_string: `str`
A CF cell methods string.
:Returns:
`list` of `dict`
**Examples:**
>>> c = parse_cell_methods('t: minimum within years '
... 't: mean over ENSO years)')
"""
if field_ncvar:
attribute = {field_ncvar + ":cell_methods": cell_methods_string}
incorrect_interval = (
"Cell method interval",
"is incorrectly formatted",
)
out = []
if not cell_methods_string:
return out
# ------------------------------------------------------------
# Split the cell_methods string into a list of strings ready
# for parsing. For example:
#
# 'lat: mean (interval: 1 hour)'
#
# would be split up into:
#
# ['lat:', 'mean', '(', 'interval:', '1', 'hour', ')']
# ------------------------------------------------------------
cell_methods = re.sub(r"\((?=[^\s])", "( ", cell_methods_string)
cell_methods = re.sub(r"(?<=[^\s])\)", " )", cell_methods).split()
while cell_methods:
cm = {}
axes = []
while cell_methods:
if not cell_methods[0].endswith(":"):
break
# TODO Check that "name" ends with colon? How? ('lat: mean
# (area-weighted) or lat: mean (interval: 1 degree_north comment:
# area-weighted)')
axis = cell_methods.pop(0)[:-1]
axes.append(axis)
cm["axes"] = axes
if not cell_methods:
out.append(cm)
break
# Method
cm["method"] = cell_methods.pop(0)
if not cell_methods:
out.append(cm)
break
# Climatological statistics, and statistics which apply to
# portions of cells
while cell_methods[0] in ("within", "where", "over"):
attr = cell_methods.pop(0)
cm[attr] = cell_methods.pop(0)
if not cell_methods:
break
if not cell_methods:
out.append(cm)
break
# interval and comment
intervals = []
if cell_methods[0].endswith("("):
cell_methods.pop(0)
if not (re.search(r"^(interval|comment):$", cell_methods[0])):
cell_methods.insert(0, "comment:")
while not re.search(r"^\)$", cell_methods[0]):
term = cell_methods.pop(0)[:-1]
if term == "interval":
interval = cell_methods.pop(0)
if cell_methods[0] != ")":
units = cell_methods.pop(0)
else:
units = None
try:
parsed_interval = literal_eval(interval)
except (SyntaxError, ValueError):
if not field_ncvar:
raise ValueError(incorrect_interval)
self._add_message(
field_ncvar,
field_ncvar,
message=incorrect_interval,
)
return []
try:
data = self.implementation.initialise_Data(
array=parsed_interval, units=units, copy=False
)
except Exception:
if not field_ncvar:
raise ValueError(incorrect_interval)
self._add_message(
field_ncvar,
field_ncvar,
message=incorrect_interval,
attribute=attribute,
)
return []
intervals.append(data)
continue
if term == "comment":
comment = []
while cell_methods:
if cell_methods[0].endswith(")"):
break
if cell_methods[0].endswith(":"):
break
comment.append(cell_methods.pop(0))
cm["comment"] = " ".join(comment)
if cell_methods[0].endswith(")"):
cell_methods.pop(0)
n_intervals = len(intervals)
if n_intervals > 1 and n_intervals != len(axes):
if not field_ncvar:
raise ValueError(incorrect_interval)
self._add_message(
field_ncvar,
field_ncvar,
message=incorrect_interval,
attribute=attribute,
)
return []
if intervals:
cm["interval"] = intervals
out.append(cm)
return out
def _create_formula_terms_ref(self, f, key, coord, formula_terms):
"""Create a formula terms coordinate reference.
Specifically, create a coordinate reference of a netCDF
formula terms attribute.
.. versionadded:: (cfdm) 1.7.0
If the coordinate object has properties 'standard_name' or
'computed_standard_name' then they are copied to coordinate
conversion parameters.
:Parameters:
f: `Field`
key: `str`
The internal identifier of the coordinate.
coord: `Coordinate`
formula_terms: `dict`
The formula_terms attribute value from the netCDF file.
*Parameter example:*
``formula_terms={'a':'a','b':'b','orog':'surface_altitude'}``
:Returns:
`CoordinateReference`
"""
g = self.read_vars
domain_ancillaries = {}
parameters = {}
for term, ncvar in formula_terms.items():
# The term's value is a domain ancillary of the field, so
# we put its identifier into the coordinate reference.
domain_ancillaries[term] = g["domain_ancillary_key"].get(ncvar)
for name in ("standard_name", "computed_standard_name"):
value = self.implementation.get_property(coord, name, None)
if value is not None:
parameters[name] = value
datum_parameters = {}
coordinate_conversion_parameters = {}
for x, value in parameters.items():
if x in g["datum_parameters"]:
datum_parameters[x] = value
else:
coordinate_conversion_parameters[x] = value
datum = self.implementation.initialise_Datum(
parameters=datum_parameters
)
coordinate_conversion = (
self.implementation.initialise_CoordinateConversion(
parameters=coordinate_conversion_parameters,
domain_ancillaries=domain_ancillaries,
)
)
coordref = self.implementation.initialise_CoordinateReference()
self.implementation.set_coordinate_reference_coordinates(
coordinate_reference=coordref, coordinates=[key]
)
self.implementation.set_datum(
coordinate_reference=coordref, datum=datum
)
self.implementation.set_coordinate_conversion(
coordinate_reference=coordref,
coordinate_conversion=coordinate_conversion,
)
return coordref
def _ncdimensions(self, ncvar):
"""Lists the netCDF dimensions associated with a variable.
If the variable has been compressed then the *implied
uncompressed* dimensions are returned.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
The netCDF variable name.
:Returns:
`list`
The list of netCDF dimension names spanned by the netCDF
variable.
**Examples:**
>>> n._ncdimensions('humidity')
['time', 'lat', 'lon']
"""
g = self.read_vars
variable = g["variables"][ncvar]
ncdimensions = list(g["variable_dimensions"][ncvar])
if self._is_char(ncvar) and variable.ndim >= 1:
# Remove the trailing string-length dimension
ncdimensions.pop()
# Check for dimensions which have been compressed. If there
# are any, then return the netCDF dimensions for the
# uncompressed variable.
compression = g["compression"]
if compression and set(compression).intersection(ncdimensions):
for ncdim in ncdimensions:
if ncdim in compression:
c = compression[ncdim]
if ncvar not in c.get("netCDF_variables", (ncvar,)):
# This variable is not compressed, even though
# it spans a dimension that is compressed for
# some other variables For example, this sort
# of situation may arise with simple
# geometries.
continue
i = ncdimensions.index(ncdim)
if "gathered" in c:
# Compression by gathering
ncdimensions[i : i + 1] = c["gathered"][
"implied_ncdimensions"
]
elif "ragged_indexed_contiguous" in c:
# Indexed contiguous ragged array.
#
# Check this before ragged_indexed and
# ragged_contiguous because both of these will
# exist for an array that is both indexed and
# contiguous.
ncdimensions[i : i + 1] = c[
"ragged_indexed_contiguous"
]["implied_ncdimensions"]
elif "ragged_contiguous" in c:
# Contiguous ragged array
ncdimensions[i : i + 1] = c["ragged_contiguous"][
"implied_ncdimensions"
]
elif "ragged_indexed" in c:
# Indexed ragged array
ncdimensions[i : i + 1] = c["ragged_indexed"][
"implied_ncdimensions"
]
break
return list(map(str, ncdimensions))
def _create_gathered_array(
self,
gathered_array=None,
uncompressed_shape=None,
compressed_dimension=None,
list_variable=None,
):
"""Creates Data for a compressed-by-gathering netCDF variable.
Specifically, a `Data` object is created.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
gathered_array: `NetCDFArray`
list_variable: `List`
:Returns:
`Data`
"""
uncompressed_ndim = len(uncompressed_shape)
uncompressed_size = int(reduce(operator.mul, uncompressed_shape, 1))
return self.implementation.initialise_GatheredArray(
compressed_array=gathered_array,
ndim=uncompressed_ndim,
shape=uncompressed_shape,
size=uncompressed_size,
compressed_dimension=compressed_dimension,
list_variable=list_variable,
)
def _create_ragged_contiguous_array(
self,
ragged_contiguous_array,
uncompressed_shape=None,
count_variable=None,
):
"""Creates Data for a contiguous ragged array variable.
Creates a `Data` object for a compressed-by-contiguous-ragged-
array netCDF variable.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ragged_contiguous_array: `Data`
uncompressed_shape; `tuple`
count_variable: `Count`
:Returns:
`Data`
"""
uncompressed_ndim = len(uncompressed_shape)
uncompressed_size = int(reduce(operator.mul, uncompressed_shape, 1))
return self.implementation.initialise_RaggedContiguousArray(
compressed_array=ragged_contiguous_array,
ndim=uncompressed_ndim,
shape=uncompressed_shape,
size=uncompressed_size,
count_variable=count_variable,
)
def _create_ragged_indexed_array(
self,
ragged_indexed_array,
uncompressed_shape=None,
index_variable=None,
):
"""Creates Data for an indexed ragged array variable.
.. versionadded:: (cfdm) 1.7.0
:Returns:
`Data`
"""
uncompressed_ndim = len(uncompressed_shape)
uncompressed_size = int(reduce(operator.mul, uncompressed_shape, 1))
return self.implementation.initialise_RaggedIndexedArray(
compressed_array=ragged_indexed_array,
ndim=uncompressed_ndim,
shape=uncompressed_shape,
size=uncompressed_size,
index_variable=index_variable,
)
def _create_ragged_indexed_contiguous_array(
self,
ragged_indexed_contiguous_array,
uncompressed_shape=None,
count_variable=None,
index_variable=None,
):
"""Creates Data for an indexed contiguous ragged array variable.
.. versionadded:: (cfdm) 1.7.0
:Returns:
`Data`
"""
uncompressed_ndim = len(uncompressed_shape)
uncompressed_size = int(reduce(operator.mul, uncompressed_shape, 1))
return self.implementation.initialise_RaggedIndexedContiguousArray(
compressed_array=ragged_indexed_contiguous_array,
ndim=uncompressed_ndim,
shape=uncompressed_shape,
size=uncompressed_size,
count_variable=count_variable,
index_variable=index_variable,
)
def _create_Data(
self, array=None, units=None, calendar=None, ncvar=None, **kwargs
):
"""Create a Data object.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
ncvar: `str`
The netCDF variable from which to get units and calendar.
"""
data = self.implementation.initialise_Data(
array=array, units=units, calendar=calendar, copy=False, **kwargs
)
return data
def _copy_construct(self, construct_type, field_ncvar, ncvar):
"""Return a copy of an existing construct.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
construct_type: `str`
E.g. 'dimension_coordinate'
field_ncvar: `str
The netCDF variable name of the field that will contain the
copy of the construct.
ncvar: `str`
The netCDF variable name of the construct.
:Returns:
A copy of the construct.
"""
g = self.read_vars
component_report = g["component_report"].get(ncvar)
if component_report is not None:
for var, report in component_report.items():
g["dataset_compliance"][field_ncvar][
"non-compliance"
].setdefault(var, []).extend(report)
return self.implementation.copy_construct(g[construct_type][ncvar])
# ================================================================
# Methods for checking CF compliance
#
# These methods (whose names all start with "_check") check the
# minimum required for mapping the file to CFDM structural
# elements. General CF compliance is not checked (e.g. whether or
# not grid mapping variable has a grid_mapping_name attribute).
# ================================================================
def _check_bounds(
self, field_ncvar, parent_ncvar, attribute, bounds_ncvar
):
"""Check a bounds variable spans the correct dimensions.
.. versionadded:: (cfdm) 1.7.0
Checks that
* The bounds variable has exactly one more dimension than the
parent variable
* The bounds variable's dimensions, other than the trailing
dimension are the same, and in the same order, as the parent
variable's dimensions.
:Parameters:
nc: `netCDF4.Dataset`
The netCDF dataset object.
parent_ncvar: `str`
The netCDF variable name of the parent variable.
bounds_ncvar: `str`
The netCDF variable name of the bounds.
:Returns:
`bool`
"""
attribute = {parent_ncvar + ":" + attribute: bounds_ncvar}
incorrect_dimensions = (
"Bounds variable",
"spans incorrect dimensions",
)
g = self.read_vars
if bounds_ncvar not in g["internal_variables"]:
bounds_ncvar, message = self._check_missing_variable(
bounds_ncvar, "Bounds variable"
)
self._add_message(
field_ncvar,
bounds_ncvar,
message=message,
attribute=attribute,
variable=parent_ncvar,
)
return False
ok = True
c_ncdims = self._ncdimensions(parent_ncvar)
b_ncdims = self._ncdimensions(bounds_ncvar)
if len(b_ncdims) == len(c_ncdims) + 1:
if c_ncdims != b_ncdims[:-1]:
self._add_message(
field_ncvar,
bounds_ncvar,
message=incorrect_dimensions,
attribute=attribute,
dimensions=g["variable_dimensions"][bounds_ncvar],
variable=parent_ncvar,
)
ok = False
else:
self._add_message(
field_ncvar,
bounds_ncvar,
message=incorrect_dimensions,
attribute=attribute,
dimensions=g["variable_dimensions"][bounds_ncvar],
variable=parent_ncvar,
)
ok = False
return ok
def _check_geometry_node_coordinates(
self, field_ncvar, node_ncvar, geometry
):
"""Check a geometry node corodinate variable.
.. versionadded:: (cfdm) 1.8.6
:Parameters:
field_ncvar: `str`
The netCDF variable name of the parent data variable.
node_ncvar: `str`
The netCDF variable name of the node coordinate variable.
geometry: `dict`
:Returns:
`bool`
"""
g = self.read_vars
geometry_ncvar = g["variable_geometry"].get(field_ncvar)
attribute = {
field_ncvar
+ ":"
+ geometry_ncvar: " ".join(geometry["node_coordinates"])
}
if node_ncvar not in g["internal_variables"]:
node_ncvar, message = self._check_missing_variable(
node_ncvar, "Node coordinate variable"
)
self._add_message(
field_ncvar,
node_ncvar,
message=message,
attribute=attribute,
variable=field_ncvar,
)
return False
ok = True
if node_ncvar not in geometry.get("node_coordinates", ()):
self._add_message(
field_ncvar,
node_ncvar,
message=(
"Node coordinate variable",
"not in node_coordinates",
),
attribute=attribute,
variable=field_ncvar,
)
ok = False
return ok
def _check_cell_measures(self, field_ncvar, string, parsed_string):
"""Checks requirements.
* 7.2.requirement.1
* 7.2.requirement.3
* 7.2.requirement.4
.. versionadded:: (cfdm) 1.7.0
:Parameters:
field_ncvar: `str`
string: `str`
The value of the netCDF cell_measures attribute.
parsed_string: `list`
:Returns:
`bool`
"""
attribute = {field_ncvar + ":cell_measures": string}
incorrectly_formatted = (
"cell_measures attribute",
"is incorrectly formatted",
)
incorrect_dimensions = (
"Cell measures variable",
"spans incorrect dimensions",
)
missing_variable = (
"Cell measures variable",
"is not in file nor referenced by the external_variables "
"global attribute",
)
g = self.read_vars
if not parsed_string:
self._add_message(
field_ncvar,
field_ncvar,
message=incorrectly_formatted,
attribute=attribute,
conformance="7.2.requirement.1",
)
return False
parent_dimensions = self._ncdimensions(field_ncvar)
external_variables = g["external_variables"]
ok = True
for x in parsed_string:
measure, values = list(x.items())[0]
if len(values) != 1:
self._add_message(
field_ncvar,
field_ncvar,
message=incorrectly_formatted,
attribute=attribute,
conformance="7.2.requirement.1",
)
ok = False
continue
ncvar = values[0]
unknown_external = ncvar in external_variables
# Check that the variable exists in the file, or if not
# that it is listed in the 'external_variables' global
# file attribute
if not unknown_external and ncvar not in g["variables"]:
self._add_message(
field_ncvar,
ncvar,
message=missing_variable,
attribute=attribute,
conformance="7.2.requirement.3",
)
ok = False
continue
if not unknown_external:
dimensions = self._ncdimensions(ncvar)
if not unknown_external and not self._dimensions_are_subset(
ncvar, dimensions, parent_dimensions
):
# The cell measure variable's dimensions do NOT span a
# subset of the parent variable's dimensions.
self._add_message(
field_ncvar,
ncvar,
message=incorrect_dimensions,
attribute=attribute,
dimensions=g["variable_dimensions"][ncvar],
conformance="7.2.requirement.4",
)
ok = False
return ok
def _check_geometry_attribute(self, field_ncvar, string, parsed_string):
"""Checks requirements.
.. versionadded:: (cfdm) 1.8.0
:Parameters:
field_ncvar: `str`
string: `str`
The value of the netCDF geometry attribute.
parsed_string: `list`
:Returns:
`bool`
"""
attribute = {field_ncvar + ":geometry": string}
incorrectly_formatted = (
"geometry attribute",
"is incorrectly formatted",
)
g = self.read_vars
if len(parsed_string) != 1:
self._add_message(
field_ncvar,
field_ncvar,
message=incorrectly_formatted,
attribute=attribute,
conformance="?",
)
return False
for ncvar in parsed_string:
# Check that the variable exists in the file, or if not
# that it is listed in the 'external_variables' global
# file attribute
if ncvar not in g["variables"]:
ncvar, message = self._check_missing_variable(
ncvar, "Geometry variable"
)
self._add_message(
field_ncvar,
ncvar,
message=message,
attribute=attribute,
conformance="?",
)
return False
return True
def _check_ancillary_variables(self, field_ncvar, string, parsed_string):
"""Checks requirements.
:Parameters:
field_ncvar: `str`
ancillary_variables: `str`
The value of the netCDF ancillary_variables attribute.
parsed_ancillary_variables: `list`
:Returns:
`bool`
"""
attribute = {field_ncvar + ":ancillary_variables": string}
incorrectly_formatted = (
"ancillary_variables attribute",
"is incorrectly formatted",
)
incorrect_dimensions = (
"Ancillary variable",
"spans incorrect dimensions",
)
g = self.read_vars
if not parsed_string:
d = self._add_message(
field_ncvar,
field_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
# Though an error of sorts, set as debug level message;
# read not terminated
if is_log_level_debug(logger):
logger.debug(
f" Error processing netCDF variable {field_ncvar}: {d['reason']}"
) # pragma: no cover
return False
parent_dimensions = self._ncdimensions(field_ncvar)
ok = True
for ncvar in parsed_string:
# Check that the variable exists in the file
if ncvar not in g["internal_variables"]:
ncvar, message = self._check_missing_variable(
ncvar, "Ancillary variable"
)
self._add_message(
field_ncvar, ncvar, message=message, attribute=attribute
)
return False
if not self._dimensions_are_subset(
ncvar, self._ncdimensions(ncvar), parent_dimensions
):
# The ancillary variable's dimensions do NOT span a
# subset of the parent variable's dimensions
self._add_message(
field_ncvar,
ncvar,
message=incorrect_dimensions,
attribute=attribute,
dimensions=g["variable_dimensions"][ncvar],
)
ok = False
return ok
def _check_auxiliary_scalar_coordinate(
self, field_ncvar, coord_ncvar, string
):
"""Checks requirements.
* 5.requirement.5
* 5.requirement.6
:Parameters:
field_ncvar: `str`
:Returns:
`bool`
"""
attribute = {field_ncvar + ":coordinates": string}
incorrect_dimensions = (
"Auxiliary/scalar coordinate variable",
"spans incorrect dimensions",
)
g = self.read_vars
if coord_ncvar not in g["internal_variables"]:
coord_ncvar, message = self._check_missing_variable(
coord_ncvar, "Auxiliary/scalar coordinate variable"
)
self._add_message(
field_ncvar,
coord_ncvar,
message=message,
attribute=attribute,
conformance="5.requirement.5",
)
return False
# Check that the variable's dimensions span a subset of the
# parent variable's dimensions (allowing for char variables
# with a trailing dimension)
if not self._dimensions_are_subset(
coord_ncvar,
self._ncdimensions(coord_ncvar),
self._ncdimensions(field_ncvar),
):
self._add_message(
field_ncvar,
coord_ncvar,
message=incorrect_dimensions,
attribute=attribute,
dimensions=g["variable_dimensions"][coord_ncvar],
conformance="5.requirement.6",
)
return False
return True
def _dimensions_are_subset(self, ncvar, dimensions, parent_dimensions):
"""True if dimensions are a subset of the parent dimensions."""
if not set(dimensions).issubset(parent_dimensions):
if not (
self._is_char(ncvar)
and set(dimensions[:-1]).issubset(parent_dimensions)
):
return False
return True
def _check_grid_mapping(
self, field_ncvar, grid_mapping, parsed_grid_mapping
):
"""Checks requirements.
* 5.6.requirement.1
* 5.6.requirement.2
* 5.6.requirement.3
:Parameters:
field_ncvar: `str`
grid_mapping: `str`
parsed_grid_mapping: `dict`
:Returns:
`bool`
"""
attribute = {field_ncvar + ":grid_mapping": grid_mapping}
incorrectly_formatted = (
"grid_mapping attribute",
"is incorrectly formatted",
)
g = self.read_vars
if not parsed_grid_mapping:
self._add_message(
field_ncvar,
field_ncvar,
message=incorrectly_formatted,
attribute=attribute,
conformance="5.6.requirement.1",
)
return False
ok = True
for x in parsed_grid_mapping:
grid_mapping_ncvar, values = list(x.items())[0]
if grid_mapping_ncvar not in g["internal_variables"]:
ok = False
grid_mapping_ncvar, message = self._check_missing_variable(
grid_mapping_ncvar, "Grid mapping variable"
)
self._add_message(
field_ncvar,
grid_mapping_ncvar,
message=message,
attribute=attribute,
conformance="5.6.requirement.2",
)
for coord_ncvar in values:
if coord_ncvar not in g["internal_variables"]:
ok = False
coord_ncvar, message = self._check_missing_variable(
coord_ncvar, "Grid mapping coordinate variable"
)
self._add_message(
field_ncvar,
coord_ncvar,
message=message,
attribute=attribute,
conformance="5.6.requirement.3",
)
if not ok:
return False
return True
def _check_compress(self, parent_ncvar, compress, parsed_compress):
"""Check a compressed dimension is valid and in the file."""
attribute = {parent_ncvar + ":compress": compress}
incorrectly_formatted = (
"compress attribute",
"is incorrectly formatted",
)
missing_dimension = ("Compressed dimension", "is not in file")
if not parsed_compress:
self._add_message(
None,
parent_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
return False
ok = True
dimensions = self.read_vars["internal_dimension_sizes"]
for ncdim in parsed_compress:
if ncdim not in dimensions:
self._add_message(
None,
parent_ncvar,
message=missing_dimension,
attribute=attribute,
)
ok = False
return ok
def _check_node_coordinates(
self,
field_ncvar,
geometry_ncvar,
node_coordinates,
parsed_node_coordinates,
):
"""Check node coordinate variables are valid and in the file."""
attribute = {geometry_ncvar + ":node_coordinates": node_coordinates}
g = self.read_vars
incorrectly_formatted = (
"node_coordinates attribute",
"is incorrectly formatted",
)
missing_attribute = ("node_coordinates attribute", "is missing")
if node_coordinates is None:
self._add_message(
field_ncvar,
geometry_ncvar,
message=missing_attribute,
attribute=attribute,
)
return False
if not parsed_node_coordinates:
# There should be at least one node coordinate variable
self._add_message(
field_ncvar,
geometry_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
return False
ok = True
for ncvar in parsed_node_coordinates:
# Check that the node coordinate variable exists in the
# file
if ncvar not in g["internal_variables"]:
ncvar, message = self._check_missing_variable(
ncvar,
"Node coordinate variable",
)
self._add_message(
field_ncvar, ncvar, message=message, attribute=attribute
)
ok = False
return ok
def _check_node_count(
self, field_ncvar, geometry_ncvar, node_count, parsed_node_count
):
"""Check node count variable is valid and exists in the file."""
attribute = {geometry_ncvar + ":node_count": node_count}
g = self.read_vars
if node_count is None:
return True
incorrectly_formatted = (
"node_count attribute",
"is incorrectly formatted",
)
if len(parsed_node_count) != 1:
self._add_message(
field_ncvar,
geometry_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
return False
ok = True
for ncvar in parsed_node_count:
# Check that the node count variable exists in the file
if ncvar not in g["internal_variables"]:
ncvar, message = self._check_missing_variable(
ncvar,
"Node count variable",
)
self._add_message(
field_ncvar, ncvar, message=message, attribute=attribute
)
ok = False
return ok
def _check_part_node_count(
self,
field_ncvar,
geometry_ncvar,
part_node_count,
parsed_part_node_count,
):
"""Check part node count variable is valid and in the file."""
if part_node_count is None:
return True
attribute = {geometry_ncvar + ":part_node_count": part_node_count}
g = self.read_vars
incorrectly_formatted = (
"part_node_count attribute",
"is incorrectly formatted",
)
if len(parsed_part_node_count) != 1:
self._add_message(
field_ncvar,
geometry_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
return False
ok = True
for ncvar in parsed_part_node_count:
# Check that the variable exists in the file
if ncvar not in g["internal_variables"]:
ncvar, message = self._check_missing_variable(
ncvar,
"Part node count variable",
)
self._add_message(
field_ncvar, ncvar, message=message, attribute=attribute
)
ok = False
return ok
def _check_interior_ring(
self, field_ncvar, geometry_ncvar, interior_ring, parsed_interior_ring
):
"""Check all interior ring variables exist in the file.
:Returns:
`bool`
"""
if interior_ring is None:
return True
attribute = {geometry_ncvar + ":interior_ring": interior_ring}
g = self.read_vars
incorrectly_formatted = (
"interior_ring attribute",
"is incorrectly formatted",
)
if not parsed_interior_ring:
self._add_message(
field_ncvar,
geometry_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
return False
ok = True
if len(parsed_interior_ring) != 1:
self._add_message(
field_ncvar,
geometry_ncvar,
message=incorrectly_formatted,
attribute=attribute,
)
return False
for ncvar in parsed_interior_ring:
# Check that the variable exists in the file
if ncvar not in g["internal_variables"]:
ncvar, message = self._check_missing_variable(
ncvar, "Interior ring variable"
)
self._add_message(
field_ncvar, ncvar, message=message, attribute=attribute
)
ok = False
return ok
def _check_instance_dimension(self, parent_ncvar, instance_dimension):
"""Check that the instance dimension name is a netCDF dimension.
.. versionadded:: (cfdm) 1.7.0
CF-1.7 Appendix A
* instance_dimension: An attribute which identifies an index
variable and names the instance dimension to
which it applies. The index variable
indicates that the indexed ragged array
representation is being used for a
collection of features.
"""
attribute = {parent_ncvar + ":instance_dimension": instance_dimension}
missing_dimension = ("Instance dimension", "is not in file")
if (
instance_dimension
not in self.read_vars["internal_dimension_sizes"]
):
self._add_message(
None,
parent_ncvar,
message=missing_dimension,
attribute=attribute,
)
return False
return True
def _check_sample_dimension(self, parent_ncvar, sample_dimension):
"""Check that the sample dimension name is a netCDF dimension.
.. versionadded:: (cfdm) 1.7.0
CF-1.7 Appendix A
* sample_dimension: An attribute which identifies a count variable
and names the sample dimension to which it
applies. The count variable indicates that the
contiguous ragged array representation is
being used for a collection of features.
"""
return sample_dimension in self.read_vars["internal_dimension_sizes"]
def _split_string_by_white_space(
self, parent_ncvar, string, variables=False
):
"""Split a string by white space.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
parent_ncvar: `str`
Not used
string: `str or `None`
variables: `bool`
If True then *string* contains internal netCDF variable
names. (Not sure yet what to do about external names.)
.. versionadded:: (cfdm) 1.8.6
:Returns:
`list`
"""
if string is None:
return []
try:
out = string.split()
except AttributeError:
return []
else:
if variables and out and self.read_vars["has_groups"]:
mapping = self.read_vars["flattener_variables"]
out = [mapping[ncvar] for ncvar in out]
return out
def _parse_grid_mapping(self, parent_ncvar, string):
"""Parse a netCDF grid_mapping attribute.
.. versionadded:: (cfdm) 1.7.0
"""
g = self.read_vars
out = []
if g["CF>=1.7"]:
# The grid mapping attribute may point to a single netCDF
# variable OR to multiple variables with associated
# coordinate variables (CF>=1.7)
out = self._parse_x(parent_ncvar, string, keys_are_variables=True)
else:
# The grid mapping attribute may only point to a single
# netCDF variable (CF<=1.6)
out = self._split_string_by_white_space(
parent_ncvar, string, variables=True
)
if len(out) == 1:
out = [{out[0]: []}]
return out
def _parse_x(self, parent_ncvar, string, keys_are_variables=False):
"""Parse CF-netCDF strings.
Handling of CF-compliant strings:
---------------------------------
'area: areacello' ->
[{'area': ['areacello']}]
'area: areacello volume: volumecello' ->
[{'area': ['areacello']}, {'volume': ['volumecello']}]
'rotated_latitude_longitude' ->
[{'rotated_latitude_longitude': []}]
'rotated_latitude_longitude: x y latitude_longitude: lat lon' ->
[{'rotated_latitude_longitude': ['x', 'y']},
{'latitude_longitude': ['lat', 'lon']}]
'rotated_latitude_longitude: x latitude_longitude: lat lon' ->
[{'rotated_latitude_longitude': ['x']},
{'latitude_longitude': ['lat', 'lon']}]
'a: A b: B orog: OROG' ->
[{'a': ['A']}, {'b': ['B']}, {'orog': ['OROG']}]
Handling of non-CF-compliant strings:
-------------------------------------
'area' ->
[{'area': []}]
'a: b: B orog: OROG' ->
[]
'rotated_latitude_longitude:' ->
[]
'rotated_latitude_longitude zzz' ->
[]
.. versionadded:: (cfdm) 1.7.0
"""
# ============================================================
# Thanks to Alan Iwi for creating these regular expressions
# ============================================================
def subst(s):
"""Substitutes WORD and SEP tokens for regular expressions.
All WORD tokens are replaced by the expression for a space
and all SEP tokens are replaced by the expression for the
end of string.
"""
return s.replace("WORD", r"[A-Za-z0-9_#]+").replace(
"SEP", r"(\s+|$)"
)
out = []
pat_value = subst("(?P<value>WORD)SEP")
pat_values = f"({pat_value})+"
pat_mapping = subst(
f"(?P<mapping_name>WORD):SEP(?P<values>{pat_values})"
)
pat_mapping_list = f"({pat_mapping})+"
pat_all = subst(
f"((?P<sole_mapping>WORD)|(?P<mapping_list>{pat_mapping_list}))$"
)
m = re.match(pat_all, string)
if m is None:
return []
sole_mapping = m.group("sole_mapping")
if sole_mapping:
out.append({sole_mapping: []})
else:
mapping_list = m.group("mapping_list")
for mapping in re.finditer(pat_mapping, mapping_list):
term = mapping.group("mapping_name")
values = [
value.group("value")
for value in re.finditer(
pat_value, mapping.group("values")
)
]
out.append({term: values})
# If there are groups then replace flattened variable names
# with absolute path names (CF>=1.8)
g = self.read_vars
if g["has_groups"]:
for x in out:
for key, value in x.copy().items():
if keys_are_variables:
del x[key]
key = g["flattener_variables"][key]
x[key] = [
g["flattener_variables"][ncvar] for ncvar in value
]
return out
def _netCDF4_group(self, nc, name):
"""Return the group of a variable or dimension in the dataset.
Given a dataset and a variable or dimension name, return the
group object for the name, and the name within the group.
.. versionadded:: 1.8.8.1
:Parameters:
nc: `netCDF4._netCDF4.Dataset` or `netCDF4._netCDF4.Group`
name: `str`
:Returns:
`netCDF4._netCDF4.Dataset` or `netCDF4._netCDF4.Group`, `str`
**Examples:**
>>> group, name = n._netCDF4_group(nc, 'time')
>>> group.name, name
('/', 'time')
>>> group, name = n._netCDF4_group(nc, '/surfacelayer/Z')
>>> group.name, name
('surfacelayer', 'Z')
"""
group = nc
path = name.split("/")
for group_name in path[1:-1]:
group = group[group_name]
return group, path[-1]
| 34.579395
| 117
| 0.500257
|
c8edf1306c9f16ae280491481a21235dfb47c289
| 106
|
py
|
Python
|
common/database/tables/__init__.py
|
WaffleHacks/wafflebot
|
e18b7e8b7ceed97f942673a4e90f7196c2542ce0
|
[
"MIT"
] | 2
|
2021-04-24T05:52:40.000Z
|
2021-06-25T23:24:08.000Z
|
common/database/tables/__init__.py
|
WaffleHacks/wafflebot
|
e18b7e8b7ceed97f942673a4e90f7196c2542ce0
|
[
"MIT"
] | 6
|
2021-06-13T01:50:50.000Z
|
2021-09-05T17:30:49.000Z
|
common/database/tables/__init__.py
|
WaffleHacks/wafflebot
|
e18b7e8b7ceed97f942673a4e90f7196c2542ce0
|
[
"MIT"
] | null | null | null |
from .announcement import Announcement
from .canned_response import CannedResponse
from .user import User
| 26.5
| 43
| 0.858491
|
a9ac1f069a77f0a18835404ec2701c97ffbef500
| 5,134
|
py
|
Python
|
RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/lisp/router/mapservercacheinfo/remotelocators/remotelocators.py
|
ralfjon/IxNetwork
|
c0c834fbc465af69c12fd6b7cee4628baba7fff1
|
[
"MIT"
] | null | null | null |
RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/lisp/router/mapservercacheinfo/remotelocators/remotelocators.py
|
ralfjon/IxNetwork
|
c0c834fbc465af69c12fd6b7cee4628baba7fff1
|
[
"MIT"
] | null | null | null |
RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/lisp/router/mapservercacheinfo/remotelocators/remotelocators.py
|
ralfjon/IxNetwork
|
c0c834fbc465af69c12fd6b7cee4628baba7fff1
|
[
"MIT"
] | null | null | null |
# Copyright 1997 - 2018 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class RemoteLocators(Base):
"""The RemoteLocators class encapsulates a system managed remoteLocators node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the RemoteLocators property from a parent instance.
The internal properties list will be empty when the property is accessed and is populated from the server by using the find method.
"""
_SDM_NAME = 'remoteLocators'
def __init__(self, parent):
super(RemoteLocators, self).__init__(parent)
@property
def MPriority(self):
"""It gives details about the m priority (Read-Only)
Returns:
number
"""
return self._get_attribute('mPriority')
@property
def MWeight(self):
"""It gives details about the m weight (Read-Only)
Returns:
number
"""
return self._get_attribute('mWeight')
@property
def Priority(self):
"""It gives details about the priority (Read-Only)
Returns:
number
"""
return self._get_attribute('priority')
@property
def RemoteLocator(self):
"""It gives details about the remote locators (Read-Only)
Returns:
str
"""
return self._get_attribute('remoteLocator')
@property
def RemoteLocatorAfi(self):
"""It gives details about the remote locators Afi (Read-Only)
Returns:
str
"""
return self._get_attribute('remoteLocatorAfi')
@property
def RlocFlagL(self):
"""It gives details about the rLoc Flag L (Read-Only)
Returns:
bool
"""
return self._get_attribute('rlocFlagL')
@property
def RlocFlagP(self):
"""It gives details about the rLoc FlagP (Read-Only)
Returns:
bool
"""
return self._get_attribute('rlocFlagP')
@property
def RlocFlagR(self):
"""If True, It gives details about the rLoc Flag R (Read-Only)
Returns:
bool
"""
return self._get_attribute('rlocFlagR')
@property
def Weight(self):
"""It gives details about the weight (Read-Only)
Returns:
number
"""
return self._get_attribute('weight')
def find(self, MPriority=None, MWeight=None, Priority=None, RemoteLocator=None, RemoteLocatorAfi=None, RlocFlagL=None, RlocFlagP=None, RlocFlagR=None, Weight=None):
"""Finds and retrieves remoteLocators data from the server.
All named parameters support regex and can be used to selectively retrieve remoteLocators data from the server.
By default the find method takes no parameters and will retrieve all remoteLocators data from the server.
Args:
MPriority (number): It gives details about the m priority (Read-Only)
MWeight (number): It gives details about the m weight (Read-Only)
Priority (number): It gives details about the priority (Read-Only)
RemoteLocator (str): It gives details about the remote locators (Read-Only)
RemoteLocatorAfi (str): It gives details about the remote locators Afi (Read-Only)
RlocFlagL (bool): It gives details about the rLoc Flag L (Read-Only)
RlocFlagP (bool): It gives details about the rLoc FlagP (Read-Only)
RlocFlagR (bool): If True, It gives details about the rLoc Flag R (Read-Only)
Weight (number): It gives details about the weight (Read-Only)
Returns:
self: This instance with matching remoteLocators data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of remoteLocators data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the remoteLocators data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| 32.493671
| 166
| 0.724581
|
26d661f0fb773dbf82f582bbe856af5297831ac6
| 116
|
py
|
Python
|
mergecounts/utils/__init__.py
|
adthrasher/merge-counts
|
87f41bec1c7fd3600e1fff52722ca6cc28509c0c
|
[
"MIT"
] | 1
|
2022-03-10T02:09:47.000Z
|
2022-03-10T02:09:47.000Z
|
mergecounts/utils/__init__.py
|
adthrasher/merge-counts
|
87f41bec1c7fd3600e1fff52722ca6cc28509c0c
|
[
"MIT"
] | 3
|
2020-08-13T18:39:41.000Z
|
2021-06-18T15:12:21.000Z
|
mergecounts/utils/__init__.py
|
adthrasher/merge-counts
|
87f41bec1c7fd3600e1fff52722ca6cc28509c0c
|
[
"MIT"
] | 1
|
2021-02-18T16:20:58.000Z
|
2021-02-18T16:20:58.000Z
|
"""Utility methods for use in the merge-counts command line tool."""
from . import args, cache, dx, errors, matrix
| 29
| 68
| 0.724138
|
baf0922cf8f7b5ccc4f9e168e6a9a62427e2d991
| 390
|
py
|
Python
|
get_some_food/foodlist/migrations/0008_shoppinglistitem_due_date.py
|
asergeenko/get_some_food
|
a9cfc776193287d2f375437420e985961688d6ed
|
[
"MIT"
] | null | null | null |
get_some_food/foodlist/migrations/0008_shoppinglistitem_due_date.py
|
asergeenko/get_some_food
|
a9cfc776193287d2f375437420e985961688d6ed
|
[
"MIT"
] | null | null | null |
get_some_food/foodlist/migrations/0008_shoppinglistitem_due_date.py
|
asergeenko/get_some_food
|
a9cfc776193287d2f375437420e985961688d6ed
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1 on 2021-11-30 19:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('foodlist', '0007_auto_20211129_1903'),
]
operations = [
migrations.AddField(
model_name='shoppinglistitem',
name='due_date',
field=models.DateField(null=True),
),
]
| 21.666667
| 48
| 0.607692
|
2a92c0f74f36e89f882afd686d5d090fc30d2847
| 3,189
|
py
|
Python
|
data/p2DJ/New/program/qiskit/class/startQiskit_Class212.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p2DJ/New/program/qiskit/class/startQiskit_Class212.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p2DJ/New/program/qiskit/class/startQiskit_Class212.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=2
# total number=12
import cirq
import qiskit
from qiskit import IBMQ
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename='circuit/deutsch-oracle.png')
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n, "qc")
target = QuantumRegister(1, "qt")
prog = QuantumCircuit(input_qubit, target)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(target)
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[1]) # number=6
prog.cz(input_qubit[0],input_qubit[1]) # number=7
prog.h(input_qubit[1]) # number=9
prog.h(input_qubit[1]) # number=8
prog.h(target)
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [target])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
prog.y(input_qubit[1]) # number=2
prog.cx(input_qubit[0],input_qubit[1]) # number=4
prog.y(input_qubit[1]) # number=3
prog.cx(input_qubit[1],input_qubit[0]) # number=10
prog.cx(input_qubit[1],input_qubit[0]) # number=11
# circuit end
return prog
if __name__ == '__main__':
n = 2
f = lambda rep: rep[-1]
# f = lambda rep: "1" if rep[0:2] == "01" or rep[0:2] == "10" else "0"
# f = lambda rep: "0"
prog = make_circuit(n, f)
sample_shot =2800
backend = BasicAer.get_backend('statevector_simulator')
circuit1 = transpile(prog,FakeVigo())
circuit1.x(qubit=3)
circuit1.x(qubit=3)
prog = circuit1
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
writefile = open("../data/startQiskit_Class212.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 28.473214
| 80
| 0.620571
|
7eb9e52d8ee17e8dcbb04d41525c658c43b68025
| 5,593
|
py
|
Python
|
python/igsdk/bluetooth5/vsp_central.py
|
berezovskyi-oleksandr/igsdk
|
f0a770f4562fd821cd2f5d6ed00fb6fab65c80b9
|
[
"MIT"
] | null | null | null |
python/igsdk/bluetooth5/vsp_central.py
|
berezovskyi-oleksandr/igsdk
|
f0a770f4562fd821cd2f5d6ed00fb6fab65c80b9
|
[
"MIT"
] | 2
|
2019-07-11T20:05:10.000Z
|
2020-04-14T17:22:02.000Z
|
python/igsdk/bluetooth5/vsp_central.py
|
berezovskyi-oleksandr/igsdk
|
f0a770f4562fd821cd2f5d6ed00fb6fab65c80b9
|
[
"MIT"
] | 8
|
2020-04-06T11:07:10.000Z
|
2022-03-12T11:04:58.000Z
|
#
# vsp_central
#
# Python class for BLE VSP central role
#
# This implements the core functionality of the central
# role using VSP (virtual serial port) communication with
# a remote peripheral (such as a sensor). This class
# works along with the central smartBASIC application
# running on the Bluetooth 5 co-processor (BL654) on the
# Sentrius IG60.
#
import threading
from ..modbus.serial_queue import SerialQueue
import logging
import time
import json, json.decoder
class VSPCentral(threading.Thread):
ADDR_CFG_INDEX = 3000
LE_BANDWIDTH_INDEX = 214
def __init__(self, port, baudrate, vsp_recv_cb=None, central_app='central', msg_timeout=0.5):
self.logger = logging.getLogger(__name__)
self.queue = SerialQueue(port, baudrate, timeout=1)
self.running = False
self.vsp_recv_cb = vsp_recv_cb
self.central_app = central_app
self.msg_timeout = msg_timeout
threading.Thread.__init__(self)
def receive_start(self):
"""Start receiving serial data from the Bluetooth module
"""
self.queue.receive_start()
def receive_stop(self):
"""Stop receiving serial data from the Bluetooth module
"""
self.queue.receive_stop()
def vsp_start(self):
"""Start the VSP application and data passthrough on the Bluetooth module
"""
self.running = True
self.start()
self.at_cmd(self.central_app, timeout=0)
def vsp_stop(self):
"""Stop the VSP application
"""
self.queue.send_break()
self.queue.receive_flush()
self.running = False
self.queue.await_cancel()
pass
def run(self):
"""Main thread routine - await and verify payloads
"""
while self.running:
self.logger.debug('Awaiting incoming message.')
msg = self.queue.await_msg()
if msg:
decoded = msg.decode('utf-8')
stripped = decoded.strip()
if stripped.startswith('##'):
# Control message from smartBASIC application
self.logger.info(stripped)
else:
# Handle payload completion by awaiting additional messages
while True:
msg = self.queue.await_msg(self.msg_timeout)
if msg is not None:
decoded = decoded + msg.decode('utf-8')
else:
break
self.logger.debug('Received message: {}'.format(decoded))
if self.vsp_recv_cb:
self.vsp_recv_cb(decoded)
self.logger.debug('Message receive stopped.')
def vsp_send(self, message):
"""Send a message via the Bluetooth co-processor.
"""
self.logger.debug('Sending message: {}'.format(message))
self.queue.send_msg(message.encode('utf-8'))
def at_cmd(self, request, timeout=5):
"""Send an AT command, await and parse the response
"""
self.queue.send_msg((request+'\r').encode('utf-8'))
lines = []
done = False
while not done:
r = self.queue.await_msg(timeout)
if r is not None:
resp = r.decode('utf-8')
# Decode each response line into a tuple with the result code
# followed by an array of response arguments
for l in resp.strip().split('\r'):
a = l.replace('\t', ' ').split(' ')
result = int(a[0])
lines.append((result, a[1:]))
# If the last result is 0 or 1, we are done
if result == 0 or result == 1:
done = True
else:
done = True
return lines
def at_cfg_set(self, index, val):
"""Write a configuration key into the module
"""
resp = self.at_cmd('AT+CFG {} {}'.format(index, val))
if len(resp) > 0:
result, output = resp[0]
if result != 0:
self.logger.warn('Attempt to write key failed: {}'.format(result))
else:
self.logger.warn('Unexected or missing response when writing key.')
def at_cfg_get(self, index):
"""Read a configuration key from the module
"""
resp = self.at_cmd('AT+CFG {} ?'.format(index))
if len(resp) > 0:
result, output = resp[0]
if result == 27:
return int(output[0], 0)
else:
self.logger.warn('Attempt to read key failed: {}'.format(result))
else:
self.logger.warn('Unexpected or missing response when reading key.')
return None
def set_peripheral_address(self, address):
"""Set the remote peripheral MAC address by programming
configuration keys on the BL654
"""
self.logger.debug('Setting peripheral address: {}'.format(address))
addr = address.replace(':', '')
self.at_cfg_set(self.ADDR_CFG_INDEX, int(addr[0:2], 16)) # Byte 0
self.at_cfg_set(self.ADDR_CFG_INDEX + 1, int(addr[2:8], 16)) # Bytes 1-3
self.at_cfg_set(self.ADDR_CFG_INDEX + 2, int(addr[8:14], 16)) # Bytes 4-6
def set_le_bandwidth(self, value):
"""Set the LE bandwidth configuration key on the BL654
"""
self.logger.debug('Setting LE bandwidth: {}'.format(value))
self.at_cfg_set(self.LE_BANDWIDTH_INDEX, value)
| 36.555556
| 97
| 0.568389
|
17d23bf952e935e3acf9cd41d8d1ac8c2ae4aedd
| 2,052
|
py
|
Python
|
whatsapp-bot/whatsapp-bot.py
|
geekymeeky/PyAutomate
|
de83238eddb88cd8f35f7cdaa339a5be31578c11
|
[
"MIT"
] | null | null | null |
whatsapp-bot/whatsapp-bot.py
|
geekymeeky/PyAutomate
|
de83238eddb88cd8f35f7cdaa339a5be31578c11
|
[
"MIT"
] | null | null | null |
whatsapp-bot/whatsapp-bot.py
|
geekymeeky/PyAutomate
|
de83238eddb88cd8f35f7cdaa339a5be31578c11
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from pandas import read_csv
from datetime import datetime
from time import sleep
def loadText():
"""[loads the text from 'message.txt' file]
"""
with open('message.txt', 'r') as text_file:
message = text_file.read().replace('\n', ' ')
return message
def loadContacts():
"""[gets a list of phone numbers from 'phoneNo.csv' file]
"""
data = read_csv('phoneNo.csv')
userNums = []
for number in data['Phone No.']:
userNums.append(str(number))
return(userNums)
def sendMessage(userNums, message):
"""[summary]
Args:
userNums ([list]): [List of Phone Numbers]
message ([str]): [Message to be sent]
"""
for number in userNums:
url = f'https://web.whatsapp.com/send?phone={number}'
driver.get(url)
sleep(5)
messageBox = driver.find_element_by_xpath(xpaths['xMessageBox'])
messageBox.send_keys(message)
send = driver.find_element_by_xpath(xpaths['xSend'])
send.click()
sleep(1)
def scheduler():
"""[Schedule the message]
"""
year = int(input('Enter a year:'))
month = int(input('Enter a month:'))
day = int(input('Enter a day:'))
hour = int(input('Enter hours:'))
minute = int(input('Enter a minutes:'))
scheduleDateTime = datetime(year, month, day, hour, minute)
timeDiff = (scheduleDateTime-datetime.now())
sleep(timeDiff.total_seconds())
# Xpaths to locate elements
xpaths = {
"xMessageBox": '//div[@spellcheck="true"]',
"xSend": '//span[@data-icon="send"]',
}
if __name__ == "__main__":
response = input('Schedule the message ? [y/n]')
if response.upper() == 'Y':
scheduler()
# Initialize Drivers
driver = webdriver.Chrome()
driver.get("https://web.whatsapp.com")
# Authenticate Login within 15 seconds
print('Scan the QR code to authenticate')
sleep(15)
userNums = loadContacts()
message = loadText()
sendMessage(userNums, message)
driver.close()
pass
| 26.307692
| 72
| 0.622807
|
cda84821487cb96b9f45ee17bb4adcba5d09c708
| 3,107
|
py
|
Python
|
eztda/examples.py
|
ulgenklc/DONUT
|
ce721d558f5f5e739795606091615ed4444cd411
|
[
"MIT"
] | 2
|
2020-06-10T10:16:04.000Z
|
2020-08-06T04:33:37.000Z
|
eztda/examples.py
|
ulgenklc/DONUTDA
|
ce721d558f5f5e739795606091615ed4444cd411
|
[
"MIT"
] | null | null | null |
eztda/examples.py
|
ulgenklc/DONUTDA
|
ce721d558f5f5e739795606091615ed4444cd411
|
[
"MIT"
] | null | null | null |
from eztda import Cell, FilteredComplex, pdiagram, LDAComplex, ImageComplex
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def triangle():
"""
Example Persistent Homology Computation on Filtered triangle
The triangle fills in one vertex, edge, and face at a time. The face closes the triangle forms at degree 5 and isn't filled in by the face until
degree 10 leaving one persistent cycle.
"""
s = """
a
| \\
| \\
| D \\
b ------ c
degrees:
a = 0
b = 1
c = 2
ab = 3
bc = 4
ac = 5 <---- hole forms
D = 10 <---- hole dies
"""
a = Cell(0,0)
b = Cell(0,1)
c = Cell(0,2)
ab = Cell(1,3)
bc = Cell(1,4)
ac = Cell(1,5)
abc = Cell(2,10)
cells = [a,b,c,ab,bc,ac,abc]
# normally we'd let the complex determine the boundary but in this case its
# easier just to tag each cell with its boundary.
a.boundary = []
b.boundary = []
c.boundary = []
ab.boundary = [a,b]
bc.boundary = [b,c]
ac.boundary = [a,c]
abc.boundary = [ab,bc,ac]
f = FilteredComplex(cells)
pairs,cycles= f.persistence(cyclereps=True)
print(s)
print('Persistence pairs\n' + '-'*30 + '\n', pairs)
print('\nCycle Representative\n' + '-'*30 + '\n',cycles)
pdiagram(pairs, dim=1) #h1 diagram
def circle(npoints=100):
"""
Example Persistent Homology Computation on Filtered Circle
The circle is filtered by the Density Adaptive Complex. The Density Adaptive complex is subcomplex of the Delaunay complex on the points. It is parameterized by α∈[0,1] such that when α=0 the complex is the set of points and when α=1 the complex is the full Delaunay complex.
"""
samples = np.random.normal(0,1,(npoints,2))
circle = samples/np.reshape(np.linalg.norm(samples,axis=1), (npoints,1))
noisy_circle = circle+np.random.normal(0,.1,(npoints,2))
dacomp = LDAComplex(noisy_circle)
pairs,cycles = dacomp.persistence(cyclereps=True)
fig,ax = plt.subplots(1,3, figsize=(12,8))
ax[0].scatter(*zip(*noisy_circle))
pdiagram(pairs[1],ax=ax[1])
dacomp.plot_cycle(cycles[1][0],ax=ax[2])
for a in ax:
a.set_aspect('equal')
return fig, ax
def image(img):
"""
Example Persistent Homology Computation on Filtered Image
The image is a grayscale image with bright pixels corresponding to features and dark pixels corresponding to background. The filtered complex is parameterized by a scalar, t, representing a threshold value. A cell enters the complex if its pixel is brighter than max(img)-t so that dark pixels enter last and bright pixels enter first.
"""
#img = mpimg.imread(fn)
#img = np.dot(img[...,:3], [0.299, 0.587, 0.114]) #convert to gray
imgcomp = ImageComplex(img)
pairs,cycles = imgcomp.persistence(cyclereps=True)
fig,ax = plt.subplots(1,2,figsize=(12,6))
ax[0].imshow(img)
imgcomp.plot_cycles([cycles[1][0]],ax=ax[1])
| 32.364583
| 342
| 0.624718
|
34f4d8c542725a12f8e62f759f1ceb85a6744f7d
| 4,778
|
py
|
Python
|
python/paddle/fluid/tests/unittests/test_dist_fleet_grad_clip.py
|
Sand3r-/Paddle
|
1217a521554d63caa1381b8716910d0268dfc22d
|
[
"Apache-2.0"
] | 2
|
2020-02-11T08:53:05.000Z
|
2020-02-20T08:06:25.000Z
|
python/paddle/fluid/tests/unittests/test_dist_fleet_grad_clip.py
|
MaJun-cn/Paddle
|
0ec3a42e9740a5f5066053bb49a923d538eba24a
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/test_dist_fleet_grad_clip.py
|
MaJun-cn/Paddle
|
0ec3a42e9740a5f5066053bb49a923d538eba24a
|
[
"Apache-2.0"
] | 4
|
2020-07-27T13:24:03.000Z
|
2020-08-06T08:20:32.000Z
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import unittest
import paddle.fluid as fluid
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.fluid.transpiler.distribute_transpiler import DistributeTranspilerConfig
from test_dist_fleet_base import TestFleetBase
from dist_simnet_bow import train_network
class TestDistGeoClipByGlobalNormTranspiler(unittest.TestCase):
def test_pserver(self):
role = role_maker.UserDefinedRoleMaker(
current_id=0,
role=role_maker.Role.SERVER,
worker_num=2,
server_endpoints=["127.0.0.1:36011", "127.0.0.1:36012"])
fleet.init(role)
batch_size = 128
is_sparse = True
is_distribute = False
strategy = DistributeTranspilerConfig()
strategy.sync_mode = False
strategy.geo_sgd_mode = True
strategy.geo_sgd_need_push_nums = 5
avg_cost, _, _ = train_network(batch_size, is_distribute, is_sparse)
fluid.clip.set_gradient_clip(
clip=fluid.clip.GradientClipByGlobalNorm(2.0))
optimizer = fluid.optimizer.SGD(0.1)
optimizer = fleet.distributed_optimizer(optimizer, strategy)
optimizer.minimize(avg_cost)
pserver_startup_program = fleet.startup_program
pserver_mian_program = fleet.main_program
class TestDistGeoClipByGlobalNorm(TestFleetBase):
def _setup_config(self):
self._mode = "geo"
self._reader = "dataset"
self._geo_sgd_need_push_nums = 5
self._grad_clip_mode = 3
def check_with_place(self,
model_file,
delta=1e-3,
check_error_log=False,
need_envs={}):
required_envs = {
"PATH": os.getenv("PATH", ""),
"PYTHONPATH": os.getenv("PYTHONPATH", ""),
"LD_LIBRARY_PATH": os.getenv("LD_LIBRARY_PATH", ""),
"FLAGS_rpc_deadline": "5000", # 5sec to fail fast
"http_proxy": ""
}
required_envs.update(need_envs)
tr0_losses, tr1_losses = self._run_cluster(model_file, required_envs)
def test_dist_train(self):
self.check_with_place(
"dist_fleet_ctr.py", delta=1e-5, check_error_log=True)
def _setup_config(self):
self._sync_mode = False
self._grad_clip_mode = 2
def check_with_place(self,
model_file,
delta=1e-3,
check_error_log=False,
need_envs={}):
required_envs = {
"PATH": os.getenv("PATH", ""),
"PYTHONPATH": os.getenv("PYTHONPATH", ""),
"LD_LIBRARY_PATH": os.getenv("LD_LIBRARY_PATH", ""),
"FLAGS_rpc_deadline": "5000", # 5sec to fail fast
"http_proxy": ""
}
required_envs.update(need_envs)
tr0_losses, tr1_losses = self._run_cluster(model_file, required_envs)
def test_dist_train(self):
self.check_with_place(
"dist_fleet_ctr.py", delta=1e-5, check_error_log=True)
class TestDistASyncClipByGlobalNorm(TestFleetBase):
def _setup_config(self):
self._mode = "async"
self._reader = "dataset"
self._grad_clip_mode = 3
def check_with_place(self,
model_file,
delta=1e-3,
check_error_log=False,
need_envs={}):
required_envs = {
"PATH": os.getenv("PATH", ""),
"PYTHONPATH": os.getenv("PYTHONPATH", ""),
"LD_LIBRARY_PATH": os.getenv("LD_LIBRARY_PATH", ""),
"FLAGS_rpc_deadline": "5000", # 5sec to fail fast
"http_proxy": ""
}
required_envs.update(need_envs)
tr0_losses, tr1_losses = self._run_cluster(model_file, required_envs)
def test_dist_train(self):
self.check_with_place(
"dist_fleet_ctr.py", delta=1e-5, check_error_log=True)
if __name__ == "__main__":
unittest.main()
| 34.374101
| 84
| 0.628924
|
1fc996b8869b18696fe1586905dfb711a648c91a
| 165
|
py
|
Python
|
prawdditions/__init__.py
|
praw-dev/prawdditions
|
0a13236c3d3f192399f9aaf6ba811619f22eb345
|
[
"BSD-2-Clause"
] | 6
|
2017-10-09T07:55:47.000Z
|
2021-12-16T06:12:31.000Z
|
prawdditions/__init__.py
|
praw-dev/prawdditions
|
0a13236c3d3f192399f9aaf6ba811619f22eb345
|
[
"BSD-2-Clause"
] | 1
|
2021-05-27T17:06:20.000Z
|
2021-06-08T07:41:33.000Z
|
prawdditions/__init__.py
|
praw-dev/prawdditions
|
0a13236c3d3f192399f9aaf6ba811619f22eb345
|
[
"BSD-2-Clause"
] | 3
|
2017-06-20T15:59:36.000Z
|
2020-03-16T00:18:36.000Z
|
"""Top level of the PRAWdditions package."""
import prawdditions.patch # NOQA (seems to get rid of any sphinx build errors)
from .const import __version__ # NOQA
| 33
| 79
| 0.751515
|
5387d8b134e25805babfbeb5e933a7a27442205c
| 201
|
py
|
Python
|
xivdb/src/jobs/job.py
|
luther38/XivDbWeb
|
204d066232c04dce0ea5a03ec55f160cfbc62659
|
[
"MIT"
] | null | null | null |
xivdb/src/jobs/job.py
|
luther38/XivDbWeb
|
204d066232c04dce0ea5a03ec55f160cfbc62659
|
[
"MIT"
] | null | null | null |
xivdb/src/jobs/job.py
|
luther38/XivDbWeb
|
204d066232c04dce0ea5a03ec55f160cfbc62659
|
[
"MIT"
] | null | null | null |
class Job():
"""
This is the base class for all jobs to take to standaize them.
"""
def __init__(self) -> None:
pass
def runJob(self):
raise NotImplementedError
| 15.461538
| 66
| 0.58209
|
2a18685f1910615e1f210f6d090d35971be7dad8
| 51,290
|
py
|
Python
|
neutron/plugins/ml2/drivers/ovn/mech_driver/mech_driver.py
|
cloudification-io/neutron
|
e17464ad11bec9555ee1ac51159b7e15711050c5
|
[
"Apache-2.0"
] | 1
|
2020-01-29T17:06:17.000Z
|
2020-01-29T17:06:17.000Z
|
neutron/plugins/ml2/drivers/ovn/mech_driver/mech_driver.py
|
cloudification-io/neutron
|
e17464ad11bec9555ee1ac51159b7e15711050c5
|
[
"Apache-2.0"
] | null | null | null |
neutron/plugins/ml2/drivers/ovn/mech_driver/mech_driver.py
|
cloudification-io/neutron
|
e17464ad11bec9555ee1ac51159b7e15711050c5
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import atexit
import copy
import functools
import operator
import signal
import threading
import types
from neutron_lib.api.definitions import portbindings
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants as const
from neutron_lib import context as n_context
from neutron_lib import exceptions as n_exc
from neutron_lib.plugins import directory
from neutron_lib.plugins.ml2 import api
from neutron_lib.services.qos import constants as qos_consts
from oslo_config import cfg
from oslo_db import exception as os_db_exc
from oslo_log import log
from oslo_utils import timeutils
from ovsdbapp.backend.ovs_idl import idlutils
from neutron._i18n import _
from neutron.common.ovn import acl as ovn_acl
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils as ovn_utils
from neutron.common import utils as n_utils
from neutron.conf.plugins.ml2.drivers.ovn import ovn_conf
from neutron.db import ovn_hash_ring_db
from neutron.db import ovn_revision_numbers_db
from neutron.db import provisioning_blocks
from neutron.plugins.ml2 import db as ml2_db
from neutron.plugins.ml2.drivers.ovn.mech_driver.ovsdb import impl_idl_ovn
from neutron.plugins.ml2.drivers.ovn.mech_driver.ovsdb import maintenance
from neutron.plugins.ml2.drivers.ovn.mech_driver.ovsdb import ovn_client
from neutron.plugins.ml2.drivers.ovn.mech_driver.ovsdb import ovn_db_sync
from neutron.plugins.ml2.drivers.ovn.mech_driver.ovsdb import worker
from neutron.services.qos.drivers.ovn import driver as qos_driver
from neutron.services.segments import db as segment_service_db
from neutron.services.trunk.drivers.ovn import trunk_driver
LOG = log.getLogger(__name__)
METADATA_READY_WAIT_TIMEOUT = 15
AGENTS = {}
class MetadataServiceReadyWaitTimeoutException(Exception):
pass
class OVNPortUpdateError(n_exc.BadRequest):
pass
class OVNMechanismDriver(api.MechanismDriver):
"""OVN ML2 mechanism driver
A mechanism driver is called on the creation, update, and deletion
of networks and ports. For every event, there are two methods that
get called - one within the database transaction (method suffix of
_precommit), one right afterwards (method suffix of _postcommit).
Exceptions raised by methods called inside the transaction can
rollback, but should not make any blocking calls (for example,
REST requests to an outside controller). Methods called after
transaction commits can make blocking external calls, though these
will block the entire process. Exceptions raised in calls after
the transaction commits may cause the associated resource to be
deleted.
Because rollback outside of the transaction is not done in the
update network/port case, all data validation must be done within
methods that are part of the database transaction.
"""
supported_qos_rule_types = [qos_consts.RULE_TYPE_BANDWIDTH_LIMIT]
def initialize(self):
"""Perform driver initialization.
Called after all drivers have been loaded and the database has
been initialized. No abstract methods defined below will be
called prior to this method being called.
"""
LOG.info("Starting OVNMechanismDriver")
self._nb_ovn = None
self._sb_ovn = None
self._plugin_property = None
self._ovn_client_inst = None
self._maintenance_thread = None
self.node_uuid = None
self.hash_ring_group = ovn_const.HASH_RING_ML2_GROUP
self.sg_enabled = ovn_acl.is_sg_enabled()
# NOTE(lucasagomes): _clean_hash_ring() must be called before
# self.subscribe() to avoid processes racing when adding or
# deleting nodes from the Hash Ring during service initialization
self._clean_hash_ring()
self._post_fork_event = threading.Event()
if cfg.CONF.SECURITYGROUP.firewall_driver:
LOG.warning('Firewall driver configuration is ignored')
self._setup_vif_port_bindings()
self.subscribe()
self.qos_driver = qos_driver.OVNQosNotificationDriver.create(self)
self.trunk_driver = trunk_driver.OVNTrunkDriver.create(self)
@property
def _plugin(self):
if self._plugin_property is None:
self._plugin_property = directory.get_plugin()
return self._plugin_property
@property
def _ovn_client(self):
if self._ovn_client_inst is None:
if not(self._nb_ovn and self._sb_ovn):
# Wait until the post_fork_initialize method has finished and
# IDLs have been correctly setup.
self._post_fork_event.wait()
self._ovn_client_inst = ovn_client.OVNClient(self._nb_ovn,
self._sb_ovn)
return self._ovn_client_inst
def _setup_vif_port_bindings(self):
self.supported_vnic_types = [portbindings.VNIC_NORMAL,
portbindings.VNIC_DIRECT]
self.vif_details = {
portbindings.VIF_TYPE_OVS: {
portbindings.CAP_PORT_FILTER: self.sg_enabled
},
portbindings.VIF_TYPE_VHOST_USER: {
portbindings.CAP_PORT_FILTER: False,
portbindings.VHOST_USER_MODE:
portbindings.VHOST_USER_MODE_SERVER,
portbindings.VHOST_USER_OVS_PLUG: True
},
portbindings.VIF_DETAILS_CONNECTIVITY:
portbindings.CONNECTIVITY_L2,
}
def subscribe(self):
registry.subscribe(self.pre_fork_initialize,
resources.PROCESS,
events.BEFORE_SPAWN)
registry.subscribe(self.post_fork_initialize,
resources.PROCESS,
events.AFTER_INIT)
registry.subscribe(self._add_segment_host_mapping_for_segment,
resources.SEGMENT,
events.AFTER_CREATE)
# Handle security group/rule notifications
if self.sg_enabled:
registry.subscribe(self._create_security_group_precommit,
resources.SECURITY_GROUP,
events.PRECOMMIT_CREATE)
registry.subscribe(self._update_security_group,
resources.SECURITY_GROUP,
events.AFTER_UPDATE)
registry.subscribe(self._create_security_group,
resources.SECURITY_GROUP,
events.AFTER_CREATE)
registry.subscribe(self._delete_security_group,
resources.SECURITY_GROUP,
events.AFTER_DELETE)
registry.subscribe(self._create_sg_rule_precommit,
resources.SECURITY_GROUP_RULE,
events.PRECOMMIT_CREATE)
registry.subscribe(self._process_sg_rule_notification,
resources.SECURITY_GROUP_RULE,
events.AFTER_CREATE)
registry.subscribe(self._process_sg_rule_notification,
resources.SECURITY_GROUP_RULE,
events.BEFORE_DELETE)
def _clean_hash_ring(self, *args, **kwargs):
admin_context = n_context.get_admin_context()
ovn_hash_ring_db.remove_nodes_from_host(admin_context,
self.hash_ring_group)
def pre_fork_initialize(self, resource, event, trigger, payload=None):
"""Pre-initialize the ML2/OVN driver."""
atexit.register(self._clean_hash_ring)
signal.signal(signal.SIGTERM, self._clean_hash_ring)
def post_fork_initialize(self, resource, event, trigger, payload=None):
# NOTE(rtheis): This will initialize all workers (API, RPC,
# plugin service and OVN) with OVN IDL connections.
self._post_fork_event.clear()
self._ovn_client_inst = None
is_maintenance = (ovn_utils.get_method_class(trigger) ==
worker.MaintenanceWorker)
if not is_maintenance:
admin_context = n_context.get_admin_context()
self.node_uuid = ovn_hash_ring_db.add_node(admin_context,
self.hash_ring_group)
self._nb_ovn, self._sb_ovn = impl_idl_ovn.get_ovn_idls(
self, trigger, binding_events=not is_maintenance)
# AGENTS must be populated after fork so if ovn-controller is stopped
# before a worker handles a get_agents request, we still show agents
populate_agents(self)
# Override agents API methods
self.patch_plugin_merge("get_agents", get_agents)
self.patch_plugin_choose("get_agent", get_agent)
self.patch_plugin_choose("update_agent", update_agent)
self.patch_plugin_choose("delete_agent", delete_agent)
# Now IDL connections can be safely used.
self._post_fork_event.set()
if is_maintenance:
# Call the synchronization task if its maintenance worker
# This sync neutron DB to OVN-NB DB only in inconsistent states
self.nb_synchronizer = ovn_db_sync.OvnNbSynchronizer(
self._plugin,
self._nb_ovn,
self._sb_ovn,
ovn_conf.get_ovn_neutron_sync_mode(),
self
)
self.nb_synchronizer.sync()
# This sync neutron DB to OVN-SB DB only in inconsistent states
self.sb_synchronizer = ovn_db_sync.OvnSbSynchronizer(
self._plugin,
self._sb_ovn,
self
)
self.sb_synchronizer.sync()
self._maintenance_thread = maintenance.MaintenanceThread()
self._maintenance_thread.add_periodics(
maintenance.DBInconsistenciesPeriodics(self._ovn_client))
self._maintenance_thread.add_periodics(
maintenance.HashRingHealthCheckPeriodics(
self.hash_ring_group))
self._maintenance_thread.start()
def _create_security_group_precommit(self, resource, event, trigger,
**kwargs):
ovn_revision_numbers_db.create_initial_revision(
kwargs['context'], kwargs['security_group']['id'],
ovn_const.TYPE_SECURITY_GROUPS)
def _create_security_group(self, resource, event, trigger,
security_group, **kwargs):
self._ovn_client.create_security_group(security_group)
def _delete_security_group(self, resource, event, trigger,
security_group_id, **kwargs):
self._ovn_client.delete_security_group(kwargs['context'],
security_group_id)
def _update_security_group(self, resource, event, trigger,
security_group, **kwargs):
# OVN doesn't care about updates to security groups, only if they
# exist or not. We are bumping the revision number here so it
# doesn't show as inconsistent to the maintenance periodic task
ovn_revision_numbers_db.bump_revision(
kwargs['context'], security_group, ovn_const.TYPE_SECURITY_GROUPS)
def _create_sg_rule_precommit(self, resource, event, trigger, **kwargs):
sg_rule = kwargs.get('security_group_rule')
context = kwargs.get('context')
ovn_revision_numbers_db.create_initial_revision(
context, sg_rule['id'], ovn_const.TYPE_SECURITY_GROUP_RULES)
def _process_sg_rule_notification(
self, resource, event, trigger, **kwargs):
if event == events.AFTER_CREATE:
self._ovn_client.create_security_group_rule(
kwargs.get('security_group_rule'))
elif event == events.BEFORE_DELETE:
sg_rule = self._plugin.get_security_group_rule(
kwargs['context'], kwargs.get('security_group_rule_id'))
self._ovn_client.delete_security_group_rule(
kwargs['context'],
sg_rule)
def _is_network_type_supported(self, network_type):
return (network_type in [const.TYPE_LOCAL,
const.TYPE_FLAT,
const.TYPE_GENEVE,
const.TYPE_VLAN])
def _validate_network_segments(self, network_segments):
for network_segment in network_segments:
network_type = network_segment['network_type']
segmentation_id = network_segment['segmentation_id']
physical_network = network_segment['physical_network']
LOG.debug('Validating network segment with '
'type %(network_type)s, '
'segmentation ID %(segmentation_id)s, '
'physical network %(physical_network)s',
{'network_type': network_type,
'segmentation_id': segmentation_id,
'physical_network': physical_network})
if not self._is_network_type_supported(network_type):
msg = _('Network type %s is not supported') % network_type
raise n_exc.InvalidInput(error_message=msg)
def create_network_precommit(self, context):
"""Allocate resources for a new network.
:param context: NetworkContext instance describing the new
network.
Create a new network, allocating resources as necessary in the
database. Called inside transaction context on session. Call
cannot block. Raising an exception will result in a rollback
of the current transaction.
"""
self._validate_network_segments(context.network_segments)
ovn_revision_numbers_db.create_initial_revision(
context._plugin_context, context.current['id'],
ovn_const.TYPE_NETWORKS)
def create_network_postcommit(self, context):
"""Create a network.
:param context: NetworkContext instance describing the new
network.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
cause the deletion of the resource.
"""
network = context.current
self._ovn_client.create_network(network)
def update_network_precommit(self, context):
"""Update resources of a network.
:param context: NetworkContext instance describing the new
state of the network, as well as the original state prior
to the update_network call.
Update values of a network, updating the associated resources
in the database. Called inside transaction context on session.
Raising an exception will result in rollback of the
transaction.
update_network_precommit is called for all changes to the
network state. It is up to the mechanism driver to ignore
state or state changes that it does not know or care about.
"""
self._validate_network_segments(context.network_segments)
def update_network_postcommit(self, context):
"""Update a network.
:param context: NetworkContext instance describing the new
state of the network, as well as the original state prior
to the update_network call.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
cause the deletion of the resource.
update_network_postcommit is called for all changes to the
network state. It is up to the mechanism driver to ignore
state or state changes that it does not know or care about.
"""
# FIXME(lucasagomes): We can delete this conditional after
# https://bugs.launchpad.net/neutron/+bug/1739798 is fixed.
if context._plugin_context.session.is_active:
return
self._ovn_client.update_network(context.current)
def delete_network_postcommit(self, context):
"""Delete a network.
:param context: NetworkContext instance describing the current
state of the network, prior to the call to delete it.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Runtime errors are not
expected, and will not prevent the resource from being
deleted.
"""
self._ovn_client.delete_network(
context._plugin_context,
context.current['id'])
def create_subnet_precommit(self, context):
ovn_revision_numbers_db.create_initial_revision(
context._plugin_context, context.current['id'],
ovn_const.TYPE_SUBNETS)
def create_subnet_postcommit(self, context):
self._ovn_client.create_subnet(context.current,
context.network.current)
def update_subnet_postcommit(self, context):
self._ovn_client.update_subnet(
context.current, context.network.current)
def delete_subnet_postcommit(self, context):
self._ovn_client.delete_subnet(context.current['id'])
def create_port_precommit(self, context):
"""Allocate resources for a new port.
:param context: PortContext instance describing the port.
Create a new port, allocating resources as necessary in the
database. Called inside transaction context on session. Call
cannot block. Raising an exception will result in a rollback
of the current transaction.
"""
port = context.current
if ovn_utils.is_lsp_ignored(port):
return
ovn_utils.validate_and_get_data_from_binding_profile(port)
if self._is_port_provisioning_required(port, context.host):
self._insert_port_provisioning_block(context._plugin_context,
port['id'])
ovn_revision_numbers_db.create_initial_revision(
context._plugin_context, port['id'], ovn_const.TYPE_PORTS)
# in the case of router ports we also need to
# track the creation and update of the LRP OVN objects
if ovn_utils.is_lsp_router_port(port):
ovn_revision_numbers_db.create_initial_revision(
context._plugin_context, port['id'],
ovn_const.TYPE_ROUTER_PORTS)
def _is_port_provisioning_required(self, port, host, original_host=None):
vnic_type = port.get(portbindings.VNIC_TYPE, portbindings.VNIC_NORMAL)
if vnic_type not in self.supported_vnic_types:
LOG.debug('No provisioning block for port %(port_id)s due to '
'unsupported vnic_type: %(vnic_type)s',
{'port_id': port['id'], 'vnic_type': vnic_type})
return False
if port['status'] == const.PORT_STATUS_ACTIVE:
LOG.debug('No provisioning block for port %s since it is active',
port['id'])
return False
if not host:
LOG.debug('No provisioning block for port %s since it does not '
'have a host', port['id'])
return False
if host == original_host:
LOG.debug('No provisioning block for port %s since host unchanged',
port['id'])
return False
if not self._sb_ovn.chassis_exists(host):
LOG.debug('No provisioning block for port %(port_id)s since no '
'OVN chassis for host: %(host)s',
{'port_id': port['id'], 'host': host})
return False
return True
def _insert_port_provisioning_block(self, context, port_id):
# Insert a provisioning block to prevent the port from
# transitioning to active until OVN reports back that
# the port is up.
provisioning_blocks.add_provisioning_component(
context, port_id, resources.PORT,
provisioning_blocks.L2_AGENT_ENTITY
)
def _notify_dhcp_updated(self, port_id):
"""Notifies Neutron that the DHCP has been update for port."""
if provisioning_blocks.is_object_blocked(
n_context.get_admin_context(), port_id, resources.PORT):
provisioning_blocks.provisioning_complete(
n_context.get_admin_context(), port_id, resources.PORT,
provisioning_blocks.DHCP_ENTITY)
def _validate_ignored_port(self, port, original_port):
if ovn_utils.is_lsp_ignored(port):
if not ovn_utils.is_lsp_ignored(original_port):
# From not ignored port to ignored port
msg = (_('Updating device_owner to %(device_owner)s for port '
'%(port_id)s is not supported') %
{'device_owner': port['device_owner'],
'port_id': port['id']})
raise OVNPortUpdateError(resource='port', msg=msg)
elif ovn_utils.is_lsp_ignored(original_port):
# From ignored port to not ignored port
msg = (_('Updating device_owner for port %(port_id)s owned by '
'%(device_owner)s is not supported') %
{'port_id': port['id'],
'device_owner': original_port['device_owner']})
raise OVNPortUpdateError(resource='port', msg=msg)
def create_port_postcommit(self, context):
"""Create a port.
:param context: PortContext instance describing the port.
Called after the transaction completes. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
result in the deletion of the resource.
"""
port = copy.deepcopy(context.current)
port['network'] = context.network.current
self._ovn_client.create_port(port)
self._notify_dhcp_updated(port['id'])
def update_port_precommit(self, context):
"""Update resources of a port.
:param context: PortContext instance describing the new
state of the port, as well as the original state prior
to the update_port call.
Called inside transaction context on session to complete a
port update as defined by this mechanism driver. Raising an
exception will result in rollback of the transaction.
update_port_precommit is called for all changes to the port
state. It is up to the mechanism driver to ignore state or
state changes that it does not know or care about.
"""
port = context.current
original_port = context.original
self._validate_ignored_port(port, original_port)
ovn_utils.validate_and_get_data_from_binding_profile(port)
if self._is_port_provisioning_required(port, context.host,
context.original_host):
self._insert_port_provisioning_block(context._plugin_context,
port['id'])
if ovn_utils.is_lsp_router_port(port):
# handle the case when an existing port is added to a
# logical router so we need to track the creation of the lrp
if not ovn_utils.is_lsp_router_port(original_port):
ovn_revision_numbers_db.create_initial_revision(
context._plugin_context, port['id'],
ovn_const.TYPE_ROUTER_PORTS, may_exist=True)
def update_port_postcommit(self, context):
"""Update a port.
:param context: PortContext instance describing the new
state of the port, as well as the original state prior
to the update_port call.
Called after the transaction completes. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
result in the deletion of the resource.
update_port_postcommit is called for all changes to the port
state. It is up to the mechanism driver to ignore state or
state changes that it does not know or care about.
"""
port = copy.deepcopy(context.current)
port['network'] = context.network.current
original_port = copy.deepcopy(context.original)
original_port['network'] = context.network.current
# NOTE(mjozefcz): Check if port is in migration state. If so update
# the port status from DOWN to UP in order to generate 'fake'
# vif-interface-plugged event. This workaround is needed to
# perform live-migration with live_migration_wait_for_vif_plug=True.
if ((port['status'] == const.PORT_STATUS_DOWN and
ovn_const.MIGRATING_ATTR in port[portbindings.PROFILE].keys() and
port[portbindings.VIF_TYPE] == portbindings.VIF_TYPE_OVS)):
admin_context = n_context.get_admin_context()
LOG.info("Setting port %s status from DOWN to UP in order "
"to emit vif-interface-plugged event.",
port['id'])
self._plugin.update_port_status(admin_context, port['id'],
const.PORT_STATUS_ACTIVE)
# The revision has been changed. In the meantime
# port-update event already updated the OVN configuration,
# So there is no need to update it again here. Anyway it
# will fail that OVN has port with bigger revision.
return
self._ovn_client.update_port(port, port_object=original_port)
self._notify_dhcp_updated(port['id'])
def delete_port_postcommit(self, context):
"""Delete a port.
:param context: PortContext instance describing the current
state of the port, prior to the call to delete it.
Called after the transaction completes. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Runtime errors are not
expected, and will not prevent the resource from being
deleted.
"""
port = copy.deepcopy(context.current)
port['network'] = context.network.current
# FIXME(lucasagomes): PortContext does not have a session, therefore
# we need to use the _plugin_context attribute.
self._ovn_client.delete_port(context._plugin_context, port['id'],
port_object=port)
def bind_port(self, context):
"""Attempt to bind a port.
:param context: PortContext instance describing the port
This method is called outside any transaction to attempt to
establish a port binding using this mechanism driver. Bindings
may be created at each of multiple levels of a hierarchical
network, and are established from the top level downward. At
each level, the mechanism driver determines whether it can
bind to any of the network segments in the
context.segments_to_bind property, based on the value of the
context.host property, any relevant port or network
attributes, and its own knowledge of the network topology. At
the top level, context.segments_to_bind contains the static
segments of the port's network. At each lower level of
binding, it contains static or dynamic segments supplied by
the driver that bound at the level above. If the driver is
able to complete the binding of the port to any segment in
context.segments_to_bind, it must call context.set_binding
with the binding details. If it can partially bind the port,
it must call context.continue_binding with the network
segments to be used to bind at the next lower level.
If the binding results are committed after bind_port returns,
they will be seen by all mechanism drivers as
update_port_precommit and update_port_postcommit calls. But if
some other thread or process concurrently binds or updates the
port, these binding results will not be committed, and
update_port_precommit and update_port_postcommit will not be
called on the mechanism drivers with these results. Because
binding results can be discarded rather than committed,
drivers should avoid making persistent state changes in
bind_port, or else must ensure that such state changes are
eventually cleaned up.
Implementing this method explicitly declares the mechanism
driver as having the intention to bind ports. This is inspected
by the QoS service to identify the available QoS rules you
can use with ports.
"""
port = context.current
vnic_type = port.get(portbindings.VNIC_TYPE, portbindings.VNIC_NORMAL)
if vnic_type not in self.supported_vnic_types:
LOG.debug('Refusing to bind port %(port_id)s due to unsupported '
'vnic_type: %(vnic_type)s',
{'port_id': port['id'], 'vnic_type': vnic_type})
return
profile = port.get(portbindings.PROFILE)
capabilities = []
if profile:
capabilities = profile.get('capabilities', [])
if (vnic_type == portbindings.VNIC_DIRECT and
'switchdev' not in capabilities):
LOG.debug("Refusing to bind port due to unsupported vnic_type: %s "
"with no switchdev capability", portbindings.VNIC_DIRECT)
return
# OVN chassis information is needed to ensure a valid port bind.
# Collect port binding data and refuse binding if the OVN chassis
# cannot be found.
chassis_physnets = []
try:
datapath_type, iface_types, chassis_physnets = (
self._sb_ovn.get_chassis_data_for_ml2_bind_port(context.host))
iface_types = iface_types.split(',') if iface_types else []
except RuntimeError:
LOG.debug('Refusing to bind port %(port_id)s due to '
'no OVN chassis for host: %(host)s',
{'port_id': port['id'], 'host': context.host})
return
for segment_to_bind in context.segments_to_bind:
network_type = segment_to_bind['network_type']
segmentation_id = segment_to_bind['segmentation_id']
physical_network = segment_to_bind['physical_network']
LOG.debug('Attempting to bind port %(port_id)s on host %(host)s '
'for network segment with type %(network_type)s, '
'segmentation ID %(segmentation_id)s, '
'physical network %(physical_network)s',
{'port_id': port['id'],
'host': context.host,
'network_type': network_type,
'segmentation_id': segmentation_id,
'physical_network': physical_network})
# TODO(rtheis): This scenario is only valid on an upgrade from
# neutron ML2 OVS since invalid network types are prevented during
# network creation and update. The upgrade should convert invalid
# network types. Once bug/1621879 is fixed, refuse to bind
# ports with unsupported network types.
if not self._is_network_type_supported(network_type):
LOG.info('Upgrade allowing bind port %(port_id)s with '
'unsupported network type: %(network_type)s',
{'port_id': port['id'],
'network_type': network_type})
if ((network_type in ['flat', 'vlan']) and
(physical_network not in chassis_physnets)):
LOG.info('Refusing to bind port %(port_id)s on '
'host %(host)s due to the OVN chassis '
'bridge mapping physical networks '
'%(chassis_physnets)s not supporting '
'physical network: %(physical_network)s',
{'port_id': port['id'],
'host': context.host,
'chassis_physnets': chassis_physnets,
'physical_network': physical_network})
else:
if (datapath_type == ovn_const.CHASSIS_DATAPATH_NETDEV and
ovn_const.CHASSIS_IFACE_DPDKVHOSTUSER in iface_types):
vhost_user_socket = ovn_utils.ovn_vhu_sockpath(
ovn_conf.get_ovn_vhost_sock_dir(), port['id'])
vif_type = portbindings.VIF_TYPE_VHOST_USER
port[portbindings.VIF_DETAILS].update({
portbindings.VHOST_USER_SOCKET: vhost_user_socket})
vif_details = dict(self.vif_details[vif_type])
vif_details[portbindings.VHOST_USER_SOCKET] = (
vhost_user_socket)
else:
vif_type = portbindings.VIF_TYPE_OVS
vif_details = self.vif_details[vif_type]
context.set_binding(segment_to_bind[api.ID], vif_type,
vif_details)
break
def get_workers(self):
"""Get any worker instances that should have their own process
Any driver that needs to run processes separate from the API or RPC
workers, can return a sequence of worker instances.
"""
# See doc/source/design/ovn_worker.rst for more details.
return [worker.MaintenanceWorker()]
def _update_dnat_entry_if_needed(self, port_id, up=True):
"""Update DNAT entry if using distributed floating ips."""
if not ovn_conf.is_ovn_distributed_floating_ip():
return
if not self._nb_ovn:
self._nb_ovn = self._ovn_client._nb_idl
nat = self._nb_ovn.db_find('NAT',
('logical_port', '=', port_id),
('type', '=', 'dnat_and_snat')).execute()
if not nat:
return
# We take first entry as one port can only have one FIP
nat = nat[0]
# If the external_id doesn't exist, let's create at this point.
# TODO(dalvarez): Remove this code in T cycle when we're sure that
# all DNAT entries have the external_id.
if not nat['external_ids'].get(ovn_const.OVN_FIP_EXT_MAC_KEY):
self._nb_ovn.db_set('NAT', nat['_uuid'],
('external_ids',
{ovn_const.OVN_FIP_EXT_MAC_KEY:
nat['external_mac']})).execute()
if up:
mac = nat['external_ids'][ovn_const.OVN_FIP_EXT_MAC_KEY]
LOG.debug("Setting external_mac of port %s to %s",
port_id, mac)
self._nb_ovn.db_set(
'NAT', nat['_uuid'],
('external_mac', mac)).execute(check_error=True)
else:
LOG.debug("Clearing up external_mac of port %s", port_id)
self._nb_ovn.db_clear(
'NAT', nat['_uuid'], 'external_mac').execute(check_error=True)
def _should_notify_nova(self, db_port):
# NOTE(twilson) It is possible for a test to override a config option
# after the plugin has been initialized so the nova_notifier attribute
# is not set on the plugin
return (cfg.CONF.notify_nova_on_port_status_changes and
hasattr(self._plugin, 'nova_notifier') and
db_port.device_owner.startswith(
const.DEVICE_OWNER_COMPUTE_PREFIX))
def set_port_status_up(self, port_id):
# Port provisioning is complete now that OVN has reported that the
# port is up. Any provisioning block (possibly added during port
# creation or when OVN reports that the port is down) must be removed.
LOG.info("OVN reports status up for port: %s", port_id)
self._update_dnat_entry_if_needed(port_id)
self._wait_for_metadata_provisioned_if_needed(port_id)
provisioning_blocks.provisioning_complete(
n_context.get_admin_context(),
port_id,
resources.PORT,
provisioning_blocks.L2_AGENT_ENTITY)
admin_context = n_context.get_admin_context()
try:
# NOTE(lucasagomes): Router ports in OVN is never bound
# to a host given their decentralized nature. By calling
# provisioning_complete() - as above - don't do it for us
# becasue the router ports are unbind so, for OVN we are
# forcing the status here. Maybe it's something that we can
# change in core Neutron in the future.
db_port = ml2_db.get_port(admin_context, port_id)
if not db_port:
return
if db_port.device_owner in (const.DEVICE_OWNER_ROUTER_INTF,
const.DEVICE_OWNER_DVR_INTERFACE,
const.DEVICE_OWNER_ROUTER_HA_INTF):
self._plugin.update_port_status(admin_context, port_id,
const.PORT_STATUS_ACTIVE)
elif self._should_notify_nova(db_port):
self._plugin.nova_notifier.notify_port_active_direct(db_port)
except (os_db_exc.DBReferenceError, n_exc.PortNotFound):
LOG.debug('Port not found during OVN status up report: %s',
port_id)
def set_port_status_down(self, port_id):
# Port provisioning is required now that OVN has reported that the
# port is down. Insert a provisioning block and mark the port down
# in neutron. The block is inserted before the port status update
# to prevent another entity from bypassing the block with its own
# port status update.
LOG.info("OVN reports status down for port: %s", port_id)
self._update_dnat_entry_if_needed(port_id, False)
admin_context = n_context.get_admin_context()
try:
db_port = ml2_db.get_port(admin_context, port_id)
if not db_port:
return
self._insert_port_provisioning_block(admin_context, port_id)
self._plugin.update_port_status(admin_context, port_id,
const.PORT_STATUS_DOWN)
if self._should_notify_nova(db_port):
self._plugin.nova_notifier.record_port_status_changed(
db_port, const.PORT_STATUS_ACTIVE, const.PORT_STATUS_DOWN,
None)
self._plugin.nova_notifier.send_port_status(
None, None, db_port)
except (os_db_exc.DBReferenceError, n_exc.PortNotFound):
LOG.debug("Port not found during OVN status down report: %s",
port_id)
def delete_mac_binding_entries(self, external_ip):
"""Delete all MAC_Binding entries associated to this IP address"""
mac_binds = self._sb_ovn.db_find_rows(
'MAC_Binding', ('ip', '=', external_ip)).execute() or []
for entry in mac_binds:
self._sb_ovn.db_destroy('MAC_Binding', entry.uuid).execute()
def update_segment_host_mapping(self, host, phy_nets):
"""Update SegmentHostMapping in DB"""
if not host:
return
ctx = n_context.get_admin_context()
segments = segment_service_db.get_segments_with_phys_nets(
ctx, phy_nets)
available_seg_ids = {
segment['id'] for segment in segments
if segment['network_type'] in ('flat', 'vlan')}
segment_service_db.update_segment_host_mapping(
ctx, host, available_seg_ids)
def _add_segment_host_mapping_for_segment(self, resource, event, trigger,
context, segment):
phynet = segment.physical_network
if not phynet:
return
host_phynets_map = self._sb_ovn.get_chassis_hostname_and_physnets()
hosts = {host for host, phynets in host_phynets_map.items()
if phynet in phynets}
segment_service_db.map_segment_to_hosts(context, segment.id, hosts)
def _wait_for_metadata_provisioned_if_needed(self, port_id):
"""Wait for metadata service to be provisioned.
Wait until metadata service has been setup for this port in the chassis
it resides. If metadata is disabled or DHCP is not enabled for its
subnets, this function will return right away.
"""
if ovn_conf.is_ovn_metadata_enabled() and self._sb_ovn:
# Wait until metadata service has been setup for this port in the
# chassis it resides.
result = (
self._sb_ovn.get_logical_port_chassis_and_datapath(port_id))
if not result:
LOG.warning("Logical port %s doesn't exist in OVN", port_id)
return
chassis, datapath = result
if not chassis:
LOG.warning("Logical port %s is not bound to a "
"chassis", port_id)
return
# Check if the port belongs to some IPv4 subnet with DHCP enabled.
context = n_context.get_admin_context()
port = self._plugin.get_port(context, port_id)
port_subnet_ids = set(
ip['subnet_id'] for ip in port['fixed_ips'] if
n_utils.get_ip_version(ip['ip_address']) == const.IP_VERSION_4)
if not port_subnet_ids:
# The port doesn't belong to any IPv4 subnet
return
subnets = self._plugin.get_subnets(context, filters=dict(
network_id=[port['network_id']], ip_version=[4],
enable_dhcp=True))
subnet_ids = set(
s['id'] for s in subnets if s['id'] in port_subnet_ids)
if not subnet_ids:
return
try:
n_utils.wait_until_true(
lambda: datapath in
self._sb_ovn.get_chassis_metadata_networks(chassis),
timeout=METADATA_READY_WAIT_TIMEOUT,
exception=MetadataServiceReadyWaitTimeoutException)
except MetadataServiceReadyWaitTimeoutException:
# If we reach this point it means that metadata agent didn't
# provision the datapath for this port on its chassis. Either
# the agent is not running or it crashed. We'll complete the
# provisioning block though.
LOG.warning("Metadata service is not ready for port %s, check"
" networking-ovn-metadata-agent status/logs.",
port_id)
def agent_alive(self, chassis, type_):
nb_cfg = chassis.nb_cfg
key = ovn_const.OVN_LIVENESS_CHECK_EXT_ID_KEY
if type_ == ovn_const.OVN_METADATA_AGENT:
nb_cfg = int(chassis.external_ids.get(
ovn_const.OVN_AGENT_METADATA_SB_CFG_KEY, 0))
key = ovn_const.METADATA_LIVENESS_CHECK_EXT_ID_KEY
try:
updated_at = timeutils.parse_isotime(chassis.external_ids[key])
except KeyError:
updated_at = timeutils.utcnow(with_timezone=True)
# Allow a maximum of 1 difference between expected and read values
# to avoid false positives.
if self._nb_ovn.nb_global.nb_cfg - nb_cfg <= 1:
# update the time of our successful check
value = timeutils.utcnow(with_timezone=True).isoformat()
self._sb_ovn.db_set('Chassis', chassis.uuid,
('external_ids', {key: value})).execute(
check_error=True)
return True
now = timeutils.utcnow(with_timezone=True)
if (now - updated_at).total_seconds() < cfg.CONF.agent_down_time:
# down, but not yet timed out
return True
return False
def _format_agent_info(self, chassis, binary, agent_id, type_,
description, alive):
return {
'binary': binary,
'host': chassis.hostname,
'heartbeat_timestamp': timeutils.utcnow(),
'availability_zone': 'n/a',
'topic': 'n/a',
'description': description,
'configurations': {
'chassis_name': chassis.name,
'bridge-mappings':
chassis.external_ids.get('ovn-bridge-mappings', '')},
'start_flag': True,
'agent_type': type_,
'id': agent_id,
'alive': alive,
'admin_state_up': True}
def agents_from_chassis(self, chassis):
agent_dict = {}
# Check for ovn-controller / ovn-controller gateway
agent_type = ovn_const.OVN_CONTROLLER_AGENT
# Only the chassis name stays consistent after ovn-controller restart
agent_id = chassis.name
if ('enable-chassis-as-gw' in
chassis.external_ids.get('ovn-cms-options', [])):
agent_type = ovn_const.OVN_CONTROLLER_GW_AGENT
alive = self.agent_alive(chassis, agent_type)
description = chassis.external_ids.get(
ovn_const.OVN_AGENT_DESC_KEY, '')
agent_dict[agent_id] = self._format_agent_info(
chassis, 'ovn-controller', agent_id, agent_type, description,
alive)
# Check for the metadata agent
metadata_agent_id = chassis.external_ids.get(
ovn_const.OVN_AGENT_METADATA_ID_KEY)
if metadata_agent_id:
agent_type = ovn_const.OVN_METADATA_AGENT
alive = self.agent_alive(chassis, agent_type)
description = chassis.external_ids.get(
ovn_const.OVN_AGENT_METADATA_DESC_KEY, '')
agent_dict[metadata_agent_id] = self._format_agent_info(
chassis, 'networking-ovn-metadata-agent',
metadata_agent_id, agent_type, description, alive)
return agent_dict
def patch_plugin_merge(self, method_name, new_fn, op=operator.add):
old_method = getattr(self._plugin, method_name)
@functools.wraps(old_method)
def fn(slf, *args, **kwargs):
new_method = types.MethodType(new_fn, self._plugin)
results = old_method(*args, **kwargs)
return op(results, new_method(*args, _driver=self, **kwargs))
setattr(self._plugin, method_name, types.MethodType(fn, self._plugin))
def patch_plugin_choose(self, method_name, new_fn):
old_method = getattr(self._plugin, method_name)
@functools.wraps(old_method)
def fn(slf, *args, **kwargs):
new_method = types.MethodType(new_fn, self._plugin)
try:
return new_method(*args, _driver=self, **kwargs)
except KeyError:
return old_method(*args, **kwargs)
setattr(self._plugin, method_name, types.MethodType(fn, self._plugin))
def ping_chassis(self):
"""Update NB_Global.nb_cfg so that Chassis.nb_cfg will increment"""
with self._nb_ovn.create_transaction(check_error=True,
bump_nb_cfg=True) as txn:
txn.add(self._nb_ovn.check_liveness())
def populate_agents(driver):
for ch in driver._sb_ovn.tables['Chassis'].rows.values():
# update the cache, rows are hashed on uuid but it is the name that
# stays consistent across ovn-controller restarts
AGENTS.update({ch.name: ch})
def get_agents(self, context, filters=None, fields=None, _driver=None):
_driver.ping_chassis()
filters = filters or {}
agent_list = []
populate_agents(_driver)
for ch in AGENTS.values():
for agent in _driver.agents_from_chassis(ch).values():
if all(agent[k] in v for k, v in filters.items()):
agent_list.append(agent)
return agent_list
def get_agent(self, context, id, fields=None, _driver=None):
chassis = None
try:
# look up Chassis by *name*, which the id attribte is
chassis = _driver._sb_ovn.lookup('Chassis', id)
except idlutils.RowNotFound:
# If the UUID is not found, check for the metadata agent ID
for ch in _driver._sb_ovn.tables['Chassis'].rows.values():
metadata_agent_id = ch.external_ids.get(
ovn_const.OVN_AGENT_METADATA_ID_KEY)
if id == metadata_agent_id:
chassis = ch
break
else:
raise n_exc.agent.AgentNotFound(id=id)
return _driver.agents_from_chassis(chassis)[id]
def update_agent(self, context, id, agent, _driver=None):
ovn_agent = get_agent(self, None, id, _driver=_driver)
chassis_name = ovn_agent['configurations']['chassis_name']
agent_type = ovn_agent['agent_type']
agent = agent['agent']
# neutron-client always passes admin_state_up, openstack client doesn't
# and we can just fall through to raising in the case that admin_state_up
# is being set to False, otherwise the end-state will be fine
if not agent.get('admin_state_up', True):
pass
elif 'description' in agent:
_driver._sb_ovn.set_chassis_neutron_description(
chassis_name, agent['description'],
agent_type).execute(check_error=True)
return agent
else:
# admin_state_up=True w/o description
return agent
raise n_exc.BadRequest(resource='agent',
msg='OVN agent status cannot be updated')
def delete_agent(self, context, id, _driver=None):
get_agent(self, None, id, _driver=_driver)
raise n_exc.BadRequest(resource='agent',
msg='OVN agents cannot be deleted')
| 45.109938
| 79
| 0.632131
|
e135833c9b4bd886d10f86e27db08e84ad6df151
| 1,315
|
py
|
Python
|
app/core/tests/tests_admin.py
|
DiegoTGJ/django-udemy-recipe
|
0db83f064664db76d2cca240b88b5261165716d2
|
[
"MIT"
] | null | null | null |
app/core/tests/tests_admin.py
|
DiegoTGJ/django-udemy-recipe
|
0db83f064664db76d2cca240b88b5261165716d2
|
[
"MIT"
] | null | null | null |
app/core/tests/tests_admin.py
|
DiegoTGJ/django-udemy-recipe
|
0db83f064664db76d2cca240b88b5261165716d2
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class adminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='admin@test.com',
password='password123'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='test@test.com',
password='password123',
name='Nombre prueba'
)
def test_users_listed(self):
""" Test that users are listed on user page """
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_page_change(self):
"""Test that the user edit page works"""
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
""" Test that the create user page works """
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| 29.886364
| 68
| 0.635741
|
13f25f2e7a128497195a492b1b7986ba9ad43c78
| 671
|
py
|
Python
|
lib/core/functions.py
|
vasgaowei/ts-cam-voc
|
58fcb1d3b2667331d9a16810123d4b2fba23c3e1
|
[
"MIT"
] | null | null | null |
lib/core/functions.py
|
vasgaowei/ts-cam-voc
|
58fcb1d3b2667331d9a16810123d4b2fba23c3e1
|
[
"MIT"
] | null | null | null |
lib/core/functions.py
|
vasgaowei/ts-cam-voc
|
58fcb1d3b2667331d9a16810123d4b2fba23c3e1
|
[
"MIT"
] | null | null | null |
import os
import torch.backends.cudnn as cudnn
import numpy as np
from utils import fix_random_seed, backup_codes, rm
def prepare_env(cfg):
# fix random seed
fix_random_seed(cfg.BASIC.SEED)
# cudnn
cudnn.benchmark = cfg.CUDNN.BENCHMARK # Benchmark will impove the speed
cudnn.deterministic = cfg.CUDNN.DETERMINISTIC #
cudnn.enabled = cfg.CUDNN.ENABLE # Enables benchmark mode in cudnn, to enable the inbuilt cudnn auto-tuner
# backup codes
if cfg.BASIC.BACKUP_CODES:
backup_dir = os.path.join(cfg.BASIC.SAVE_DIR, 'backup')
rm(backup_dir)
backup_codes(cfg.BASIC.ROOT_DIR, backup_dir, cfg.BASIC.BACKUP_LIST)
| 29.173913
| 111
| 0.724292
|
ffa72ab0a9b5ab5bf4c2c9e8df1049062b66040e
| 3,967
|
py
|
Python
|
myanonamouse/views.py
|
OdinGitDat/WhatManager2
|
ddbce0fa1ff4e1fc44bfa726c4f7eace4adbe8a9
|
[
"MIT"
] | 1
|
2018-03-23T13:11:30.000Z
|
2018-03-23T13:11:30.000Z
|
myanonamouse/views.py
|
SavageCore/WhatManager2
|
5a0fc5af8c9168fa199124aaa2aa56495dc0a40b
|
[
"MIT"
] | null | null | null |
myanonamouse/views.py
|
SavageCore/WhatManager2
|
5a0fc5af8c9168fa199124aaa2aa56495dc0a40b
|
[
"MIT"
] | null | null | null |
import traceback
import time
from django.contrib.auth.decorators import login_required, user_passes_test
from django.http.response import HttpResponse
from WhatManager2.utils import json_return_method
from home.models import ReplicaSet, LogEntry, TorrentAlreadyAddedException
from myanonamouse import trans_sync, manage_mam
from myanonamouse.models import MAMTorrent, MAMTransTorrent
from myanonamouse.utils import MAMClient
@login_required
@user_passes_test(lambda u: u.is_superuser is True)
@json_return_method
def sync(request):
start_time = time.time()
try:
master = ReplicaSet.get_myanonamouse_master()
trans_sync.sync_all_instances_db(master)
except Exception as ex:
tb = traceback.format_exc()
LogEntry.add(request.user, u'error', u'Error syncing MyAnonaMouse master DB: {0}({1})'
.format(type(ex).__name__, ex), tb)
return {
'success': False,
'error': unicode(ex),
'traceback': tb
}
time_taken = time.time() - start_time
LogEntry.add(request.user, u'info',
u'Completed MyAnonaMouse sync in {0:.3f}s.'
.format(time_taken))
return {
'success': True
}
@login_required
@user_passes_test(lambda u: u.is_superuser is True)
@json_return_method
def add_torrent(request, torrent_id):
mam_client = MAMClient.get()
try:
m_torrent = manage_mam.add_mam_torrent(torrent_id, mam_client=mam_client)
mam_torrent = m_torrent.mam_torrent
LogEntry.add(request.user, u'action', u'Added {0} to {1}'
.format(m_torrent, m_torrent.instance))
return {
'success': True,
'title': mam_torrent.title,
}
except TorrentAlreadyAddedException:
mam_torrent = None
try:
mam_torrent = MAMTorrent.objects.get(id=torrent_id)
except MAMTorrent.DoesNotExist:
pass
LogEntry.add(request.user, u'info',
u'Tried adding MyAnonaMouse torrent_id={0}, already added.'.format(torrent_id))
return {
'success': False,
'error_code': u'already_added',
'error': u'Already added.',
'title': (mam_torrent.title if mam_torrent
else '<<< Unable to find torrent >>>'),
}
except Exception as ex:
tb = traceback.format_exc()
LogEntry.add(request.user, u'error',
u'Tried adding MyAnonaMouse torrent_id={0}. Error: {1}'
.format(torrent_id, unicode(ex)), tb)
return {
'success': False,
'error': unicode(ex),
'traceback': tb,
}
@login_required
@json_return_method
def torrents_info(request):
include_info_hash = 'info_hash' in request.GET
def get_response(r_id, r_torrent):
response = {'id': r_id}
if r_torrent is None:
response['status'] = 'missing'
else:
if include_info_hash:
response['info_hash'] = r_torrent.info_hash
if r_torrent.torrent_done == 1:
response['status'] = 'downloaded'
else:
response['status'] = 'downloading'
response['progress'] = r_torrent.torrent_done
return response
ids = [int(i) for i in request.GET['ids'].split(',')]
torrents = MAMTransTorrent.objects.filter(mam_torrent_id__in=ids)
torrents = {t.mam_torrent_id: t for t in torrents}
for torrent in torrents.itervalues():
if torrent.torrent_done < 1:
torrent.sync_t_torrent()
return [get_response(t_id, torrents.get(t_id)) for t_id in ids]
@login_required
@user_passes_test(lambda u: u.is_superuser is True)
def get_torrent_file(request, torrent_id):
torrent = MAMTorrent.objects.get(id=torrent_id)
return HttpResponse(torrent.torrent_file, content_type='application/x-bittorrent')
| 33.905983
| 100
| 0.63146
|
bb73ea26ed2834e10e5fe8e36fca26ccd9c68f5e
| 15,359
|
py
|
Python
|
fastseq/optimizer/fairseq/generate.py
|
cep21/fastseq
|
fbdb59ebbc2c08178e0551fbe59dcc27c8992822
|
[
"MIT"
] | null | null | null |
fastseq/optimizer/fairseq/generate.py
|
cep21/fastseq
|
fbdb59ebbc2c08178e0551fbe59dcc27c8992822
|
[
"MIT"
] | null | null | null |
fastseq/optimizer/fairseq/generate.py
|
cep21/fastseq
|
fbdb59ebbc2c08178e0551fbe59dcc27c8992822
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""Optimize fairseq-generate (v0.9.0)"""
import logging
import sys
from multiprocessing import Queue, JoinableQueue
from torch.multiprocessing import Process
import torch
from fairseq_cli.generate import main
from fairseq import bleu, checkpoint_utils, options, progress_bar, tasks, utils
from fairseq.data import encoders
from fairseq.meters import StopwatchMeter, TimeMeter
from fairseq.options import add_generation_args
from fairseq.utils import apply_to_sample
from fastseq.utils.api_decorator import replace
GENERATE_FINISHED = "done"
POSTPROCESS_FINISHED = None
def move_to_cpu(sample):
def _move_to_cpu(tensor):
# PyTorch has poor support for half tensors (float16) on CPU.
# Move any such tensors to float32.
if tensor.dtype in {torch.bfloat16, torch.float16}:
return tensor.cpu().to(dtype=torch.float32)
else:
return tensor.cpu()
return apply_to_sample(_move_to_cpu, sample)
class IOProcess(Process):
"""
Single process to hanlde IO and compute metrics
"""
def __init__(self, args, task, message_queue):
"""
Process to handle IO and compute metrics
Args:
args (Namespace): paramerter for model and generation
task (fairseq.tasks.fairseq_task.Fairseq):
use to load dict for detokenize
message_queue (multiprocessing.Queue): queue store output
"""
super(IOProcess, self).__init__()
self.tgt_dict = task.target_dictionary
# Generate and compute BLEU score
if args.sacrebleu:
self.scorer = bleu.SacrebleuScorer()
else:
self.scorer = bleu.Scorer(self.tgt_dict.pad(), self.tgt_dict.eos(),
self.tgt_dict.unk())
self.args = args
self.message_queue = message_queue
self.has_target = False
def run(self):
while True:
msg = self.message_queue.get()
if isinstance(msg, tuple):
t, h = msg
if hasattr(self.scorer, 'add_string'):
self.scorer.add_string(t, h)
else:
self.scorer.add(t, h)
self.has_target = True
elif msg == GENERATE_FINISHED:
if self.has_target:
print('| Generate {} with beam={}: {}'.format(
self.args.gen_subset, self.args.beam,
self.scorer.result_string()))
break
else:
print(msg)
self.message_queue.task_done()
self.message_queue.close()
self.message_queue.join_thread()
class PostProcess(Process):
'''
Use multiple process to do detokenize
'''
def __init__(self, args, task, data_queue, message_queue):
"""
Handle detokenize and belu score computation
Args:
args (Namespace): paramerter for model and generation
task (fairseq.tasks.fairseq_task.Fairseq):
use to load dict for detokenize
data_queue (multiprocessing.Queue):
queue store tensor data for detokenize
message_queue (multiprocessing.Queue): queue store output
"""
super(PostProcess, self).__init__()
# Set dictionaries
try:
self.src_dict = getattr(task, 'source_dictionary', None)
except NotImplementedError:
self.src_dict = None
self.tgt_dict = task.target_dictionary
# Load alignment dictionary for unknown word replacement
# (None if no unknown word replacement, empty if no path to align dictionary)
self.align_dict = utils.load_align_dict(args.replace_unk)
# Generate and compute BLEU score
if args.sacrebleu:
self.scorer = bleu.SacrebleuScorer()
else:
self.scorer = bleu.Scorer(self.tgt_dict.pad(), self.tgt_dict.eos(),
self.tgt_dict.unk())
self.args = args
self.task = task
self.data_queue = data_queue
self.message_queue = message_queue
if args.decode_hypothesis:
self.tokenizer = encoders.build_tokenizer(args)
self.bpe = encoders.build_bpe(args)
def _decode(self, x):
if self.bpe is not None:
x = self.bpe.decode(x)
if self.tokenizer is not None:
x = self.tokenizer.decode(x)
return x
def _detokenize(self, sample, hypos):
""" detokenize and compute BELU """
message_list = []
for i, sample_id in enumerate(sample['id'].tolist()):
has_target = sample['target'] is not None
# Remove padding
src_tokens = utils.strip_pad(
sample['net_input']['src_tokens'][i, :], self.tgt_dict.pad())
target_tokens = None
if has_target:
target_tokens = utils.strip_pad(sample['target'][i, :],
self.tgt_dict.pad()).int()
# Either retrieve the original sentences or regenerate them from tokens.
if self.align_dict is not None:
src_str = self.task.dataset(
self.args.gen_subset).src.get_original_text(sample_id)
target_str = self.task.dataset(
self.args.gen_subset).tgt.get_original_text(sample_id)
else:
if self.src_dict is not None:
src_str = self.src_dict.string(src_tokens,
self.args.remove_bpe)
else:
src_str = ""
if has_target:
target_str = self.tgt_dict.string(
target_tokens, self.args.remove_bpe, escape_unk=True)
if not self.args.quiet:
if self.src_dict is not None:
if self.args.decode_hypothesis:
message_list.append('S-{}\t{}'.format(
sample_id, self._decode(src_str)))
else:
message_list.append('S-{}\t{}'.format(
sample_id, src_str))
if has_target:
if self.args.decode_hypothesis:
message_list.append('T-{}\t{}'.format(
sample_id, self._decode(target_str)))
else:
message_list.append('T-{}\t{}'.format(
sample_id, target_str))
# Process top predictions
for j, hypo in enumerate(hypos[i][:self.args.nbest]):
hypo_tokens, hypo_str, alignment = \
utils.post_process_prediction(
hypo_tokens=hypo['tokens'].int(),
src_str=src_str,
alignment=hypo['alignment'],
align_dict=self.align_dict,
tgt_dict=self.tgt_dict,
remove_bpe=self.args.remove_bpe,
)
if not self.args.quiet:
if self.args.decode_hypothesis:
detok_hypo_str = self._decode(hypo_str)
message_list.append('D-{}\t{}\t{}'.format(
sample_id, hypo['score'], detok_hypo_str))
else:
message_list.append('H-{}\t{}\t{}'.format(
sample_id, hypo['score'], hypo_str))
message_list.append('P-{}\t{}'.format(
sample_id, ' '.join(
map(
lambda x: '{:.4f}'.format(x),
hypo['positional_scores'].tolist(),
))))
if self.args.print_alignment:
message_list.append('A-{}\t{}'.format(
sample_id, ' '.join([
'{}-{}'.format(src_idx, tgt_idx)
for src_idx, tgt_idx in alignment
])))
if self.args.print_step:
message_list.append('I-{}\t{}'.format(
sample_id, hypo['steps']))
if getattr(self.args, 'retain_iter_history', False):
message_list.append("\n".join([
'E-{}_{}\t{}'.format(sample_id, step,
utils.post_process_prediction(
h['tokens'].int(),
self.src_str, None, None,
self.tgt_dict, None)[1])
for step, h in enumerate(hypo['history'])
]))
# Score only the top hypothesis
if has_target and j == 0:
if (self.align_dict is not None or
self.args.remove_bpe is not None):
# Convert back to tokens for evaluation with unk
# replacement and/or without BPE
target_tokens = self.tgt_dict.encode_line(
target_str, add_if_not_exist=True)
if hasattr(self.scorer, 'add_string'):
self.message_queue.put((target_str, hypo_str))
else:
self.message_queue.put((target_tokens, hypo_tokens))
self.message_queue.put('\n'.join(message_list))
def run(self):
while True:
r = self.data_queue.get()
if r == GENERATE_FINISHED:
self.data_queue.put(POSTPROCESS_FINISHED)
break
elif r is POSTPROCESS_FINISHED:
self.data_queue.put(POSTPROCESS_FINISHED)
break
else:
sample, hypos = r
self._detokenize(sample, hypos)
self.data_queue.close()
self.data_queue.join_thread()
self.message_queue.close()
self.message_queue.join_thread()
self.message_queue.join()
original_add_generation_args = add_generation_args
@replace(add_generation_args)
def add_generation_args_v1(parser):
group = original_add_generation_args(parser)
# fmt: off
group.add_argument(
'--postprocess-workers',
default=1,
type=int,
choices=range(1, 128, 1),
metavar='N',
help='number of worker for post process')
group.add_argument(
'--decode-hypothesis',
action="store_true")
# fmt: on
@replace(main)
def main_v1(args):
assert args.path is not None, '--path required for generation!'
assert not args.sampling or args.nbest == args.beam, \
'--sampling requires --nbest to be equal to --beam'
assert args.replace_unk is None or args.raw_text, \
'--replace-unk requires a raw text dataset (--raw-text)'
utils.import_user_module(args)
if args.max_tokens is None and args.max_sentences is None:
args.max_tokens = 12000
print(args)
use_cuda = torch.cuda.is_available() and not args.cpu
# Load dataset splits
task = tasks.setup_task(args)
task.load_dataset(args.gen_subset)
# Set dictionaries
try:
src_dict = getattr(task, 'source_dictionary', None)
except NotImplementedError:
src_dict = None
tgt_dict = task.target_dictionary
# Load ensemble
print('| loading model(s) from {}'.format(args.path))
models, model_args_ = checkpoint_utils.load_model_ensemble(
args.path.split(':'),
arg_overrides=eval(args.model_overrides),
task=task,
)
# Optimize ensemble for generation
for model in models:
model.make_generation_fast_(
beamable_mm_beam_size=None if args.no_beamable_mm else args.beam,
need_attn=args.print_alignment,
)
if args.fp16:
model.half()
if use_cuda:
model.cuda()
# Load dataset (possibly sharded)
itr = task.get_batch_iterator(
dataset=task.dataset(args.gen_subset),
max_tokens=args.max_tokens,
max_sentences=args.max_sentences,
max_positions=utils.resolve_max_positions(
task.max_positions(),
*[model.max_positions() for model in models]),
ignore_invalid_inputs=args.skip_invalid_size_inputs_valid_test,
required_batch_size_multiple=args.required_batch_size_multiple,
num_shards=args.num_shards,
shard_id=args.shard_id,
num_workers=args.num_workers,
).next_epoch_itr(shuffle=False)
# Initialize generator
gen_timer = StopwatchMeter()
generator = task.build_generator(args)
num_sentences = 0
data_queue = Queue()
message_queue = JoinableQueue()
p_list = []
for i in range(args.postprocess_workers):
p = PostProcess(args, task, data_queue, message_queue)
p_list.append(p)
p.start()
io_process = IOProcess(args, task, message_queue)
io_process.start()
if args.use_el_attn:
task.transpose_enc_dec_kv_proj(models)
with progress_bar.build_progress_bar(args, itr) as t:
wps_meter = TimeMeter()
for sample in t:
cpu_sample = sample
if 'net_input' not in sample:
continue
sample = utils.move_to_cuda(sample) if use_cuda else sample
prefix_tokens = None
if args.prefix_size > 0:
prefix_tokens = sample['target'][:, :args.prefix_size]
gen_timer.start()
try:
hypos = task.inference_step(
generator, models, sample, prefix_tokens)
except:
logging.exception(sys.exc_info()[0])
for p in p_list:
p.terminate()
io_process.terminate()
data_queue.close()
message_queue.close()
sys.exit(1)
num_generated_tokens = sum(len(h[0]['tokens']) for h in hypos)
gen_timer.stop(num_generated_tokens)
hypos = [h[:args.nbest] for h in hypos]
hypos = move_to_cpu(hypos) if use_cuda else hypos
data_queue.put((cpu_sample, hypos))
wps_meter.update(num_generated_tokens)
t.log({'wps': round(wps_meter.avg)})
num_sentences += cpu_sample['nsentences']
data_queue.put(GENERATE_FINISHED)
for p in p_list:
p.join()
sent_throught = num_sentences / gen_timer.sum if num_sentences > 0 else 0
tokens_throught = 1. / gen_timer.avg if num_sentences > 0 else 0
message_queue.put(
'| Translated {} sentences ({} tokens) in {:.1f}s ({:.2f} sentences/s, {:.2f} tokens/s)'. # pylint: disable=line-too-long
format(num_sentences, gen_timer.n, gen_timer.sum, sent_throught,
tokens_throught))
message_queue.put(GENERATE_FINISHED)
io_process.join()
return
| 36.656325
| 129
| 0.551924
|
4f5ad602aaaaa89dcb7a419f6ecd28f84db9570b
| 4,625
|
py
|
Python
|
mushroom_rl/utils/preprocessors.py
|
jacarvalho/mushroom-rl
|
ba0a62454d771a1d3cacbec1ea9d71535f476b31
|
[
"MIT"
] | null | null | null |
mushroom_rl/utils/preprocessors.py
|
jacarvalho/mushroom-rl
|
ba0a62454d771a1d3cacbec1ea9d71535f476b31
|
[
"MIT"
] | null | null | null |
mushroom_rl/utils/preprocessors.py
|
jacarvalho/mushroom-rl
|
ba0a62454d771a1d3cacbec1ea9d71535f476b31
|
[
"MIT"
] | null | null | null |
import pickle
import numpy as np
from mushroom_rl.utils.running_stats import RunningStandardization
class StandardizationPreprocessor(object):
"""
Preprocess observations from the environment using a running
standardization.
"""
def __init__(self, mdp_info, clip_obs=10., alpha=1e-32):
"""
Constructor.
Args:
mdp_info (MDPInfo): information of the MDP;
clip_obs (float, 10.): values to clip the normalized observations;
alpha (float, 1e-32): moving average catchup parameter for the
normalization.
"""
self.clip_obs = clip_obs
self.obs_shape = mdp_info.observation_space.shape
self.obs_runstand = RunningStandardization(shape=self.obs_shape,
alpha=alpha)
def __call__(self, obs):
"""
Call function to normalize the observation.
Args:
obs (np.ndarray): observation to be normalized.
Returns:
Normalized observation array with the same shape.
"""
assert obs.shape == self.obs_shape, \
"Values given to running_norm have incorrect shape " \
"(obs shape: {}, expected shape: {})" \
.format(obs.shape, self.obs_shape)
self.obs_runstand.update_stats(obs)
norm_obs = np.clip(
(obs - self.obs_runstand.mean) / self.obs_runstand.std,
-self.clip_obs, self.clip_obs
)
return norm_obs
def get_state(self):
"""
Returns:
A dictionary with the normalization state.
"""
return self.obs_runstand.get_state()
def set_state(self, data):
"""
Set the current normalization state from the data dict.
"""
self.obs_runstand.set_state(data)
def save_state(self, path):
"""
Save the running normalization state to path.
Args:
path (str): path to save the running normalization state.
"""
with open(path, 'wb') as f:
pickle.dump(self.get_state(), f, protocol=3)
def load_state(self, path):
"""
Load the running normalization state from path.
Args:
path (string): path to load the running normalization state from.
"""
with open(path, 'rb') as f:
data = pickle.load(f)
self.set_state(data)
class MinMaxPreprocessor(StandardizationPreprocessor):
"""
Preprocess observations from the environment using the bounds of the
observation space of the environment. For observations that are not limited
falls back to using running mean standardization.
"""
def __init__(self, mdp_info, clip_obs=10., alpha=1e-32):
"""
Constructor.
Args:
mdp_info (MDPInfo): information of the MDP;
clip_obs (float, 10.): values to clip the normalized observations;
alpha (float, 1e-32): moving average catchup parameter for the
normalization.
"""
super(MinMaxPreprocessor, self).__init__(mdp_info, clip_obs,
alpha)
obs_low, obs_high = (mdp_info.observation_space.low.copy(),
mdp_info.observation_space.high.copy())
self.stand_obs_mask = np.where(
~(np.isinf(obs_low) | np.isinf(obs_high))
)
assert np.squeeze(self.stand_obs_mask).size > 0, \
"All observations have unlimited range, you should use " \
"StandardizationPreprocessor directly instead."
self.run_norm_obs = len(np.squeeze(self.stand_obs_mask)) != obs_low.shape[0]
self.obs_mean = np.zeros_like(obs_low)
self.obs_delta = np.ones_like(obs_low)
self.obs_mean[self.stand_obs_mask] = (
obs_high[self.stand_obs_mask] + obs_low[self.stand_obs_mask]) / 2.
self.obs_delta[self.stand_obs_mask] = (
obs_high[self.stand_obs_mask] - obs_low[self.stand_obs_mask]) / 2.
def __call__(self, obs):
"""
Call function to normalize the observation.
Args:
obs (np.ndarray): observation to be normalized.
Returns:
Normalized observation array with the same shape.
"""
orig_obs = obs.copy()
if self.run_norm_obs:
obs = super(MinMaxPreprocessor, self).__call__(obs)
obs[self.stand_obs_mask] = \
((orig_obs - self.obs_mean) / self.obs_delta)[self.stand_obs_mask]
return obs
| 30.228758
| 84
| 0.59373
|
cc11d75de001b7312ec5417523a06219052ecbd2
| 2,424
|
py
|
Python
|
db/tools.py
|
skriems/cherrypy-recipes
|
730188ce01511e81263b4c9645e2aa43533dbef8
|
[
"MIT"
] | null | null | null |
db/tools.py
|
skriems/cherrypy-recipes
|
730188ce01511e81263b4c9645e2aa43533dbef8
|
[
"MIT"
] | null | null | null |
db/tools.py
|
skriems/cherrypy-recipes
|
730188ce01511e81263b4c9645e2aa43533dbef8
|
[
"MIT"
] | null | null | null |
import cherrypy
class Psycopg2Tool(cherrypy.Tool):
def __init__(self):
super(Psycopg2Tool, self).__init__('on_start_resource',
self.bind_connection,
priority=20)
def _setup(self):
cherrypy.Tool._setup(self)
cherrypy.request.hooks.attach('on_end_resource',
self.commit_connection,
priority=80)
def bind_connection(self):
connection = cherrypy.engine.publish('bind-connection').pop()
cherrypy.request.db = connection
def commit_connection(self):
if not hasattr(cherrypy.request, 'db'):
return
cherrypy.request.db = None
cherrypy.engine.publish('commit-connection')
class SATool(cherrypy.Tool):
def __init__(self):
"""
The SA tool is responsible for associating a SA session
to the SA engine and attaching it to the current request.
Since we are running in a multithreaded application,
we use the scoped_session that will create a session
on a per thread basis so that you don't worry about
concurrency on the session object itself.
This tools binds a session to the engine each time
a requests starts and commits/rollbacks whenever
the request terminates.
"""
cherrypy.Tool.__init__(self, 'on_start_resource',
self.bind_session,
priority=20)
def _setup(self):
cherrypy.Tool._setup(self)
cherrypy.request.hooks.attach('on_end_resource',
self.commit_transaction,
priority=80)
def bind_session(self):
"""
Attaches a session to the request's scope by requesting
the SA plugin to bind a session to the SA engine.
"""
session = cherrypy.engine.publish('bind-session').pop()
cherrypy.request.db = session
def commit_transaction(self):
"""
Commits the current transaction or rolls back
if an error occurs. Removes the session handle
from the request's scope.
"""
if not hasattr(cherrypy.request, 'db'):
return
cherrypy.request.db = None
cherrypy.engine.publish('commit-session')
| 35.130435
| 69
| 0.580033
|
ba6d162379436e53bd8ea2056d362c37012b3824
| 6,608
|
py
|
Python
|
demo/demo_chunkator/tests.py
|
alpaca-technology/django-chunkator
|
10c0b3ce63f6bcb1833ff1518725c68a3d452807
|
[
"MIT"
] | null | null | null |
demo/demo_chunkator/tests.py
|
alpaca-technology/django-chunkator
|
10c0b3ce63f6bcb1833ff1518725c68a3d452807
|
[
"MIT"
] | null | null | null |
demo/demo_chunkator/tests.py
|
alpaca-technology/django-chunkator
|
10c0b3ce63f6bcb1833ff1518725c68a3d452807
|
[
"MIT"
] | null | null | null |
import six
from django.test import TestCase
from chunkator import MissingPkFieldException
from chunkator import chunkator
from chunkator import chunkator_page
from demo_chunkator.models import Book
from demo_chunkator.models import Cover
from demo_chunkator.models import Profile
from demo_chunkator.models import User
class ChunkatorTestCase(TestCase):
def setUp(self):
super(ChunkatorTestCase, self).setUp()
for nb in range(20):
Book.objects.create(
title="Title #{}".format(nb),
author="Author #{}".format(nb)
)
def test_chunks_queryset(self):
# small step
chunks = chunkator(Book.objects.all(), 1)
result = []
for item in chunks:
self.assertTrue(isinstance(item, Book))
result.append(item.pk)
self.assertEqual(len(result), 20)
self.assertEqual(len(result), len(set(result))) # no duplicates
result = []
# larger chunks
chunks = chunkator(Book.objects.all(), 10)
for item in chunks:
self.assertTrue(isinstance(item, Book))
result.append(item.pk)
self.assertEqual(len(result), 20)
self.assertEqual(len(result), len(set(result))) # no duplicates
result = []
# larger than QS chunks
chunks = chunkator(Book.objects.all(), 50)
for item in chunks:
self.assertTrue(isinstance(item, Book), "{}".format(item))
result.append(item.pk)
self.assertEqual(len(result), 20)
self.assertEqual(len(result), len(set(result))) # no duplicates
def test_chunks_numqueries(self):
# Make sure we only run 2 queries
# One for each slice
with self.assertNumQueries(2):
chunks = chunkator(Book.objects.all(), 12)
for item in chunks:
self.assertTrue(isinstance(item, Book))
# Make sure we only run 3 queries
# One for each slice, plus the "empty" one
with self.assertNumQueries(3):
chunks = chunkator(Book.objects.all(), 10)
for item in chunks:
self.assertTrue(isinstance(item, Book))
# If the chunk size is bigger than the number of items, only one query.
with self.assertNumQueries(1):
chunks = chunkator(Book.objects.all(), 1000)
for item in chunks:
self.assertTrue(isinstance(item, Book))
class ChunkatorUUIDTestCase(TestCase):
def setUp(self):
super(ChunkatorUUIDTestCase, self).setUp()
User.objects.create(name='Terry Pratchett')
User.objects.create(name='Iain Banks')
def test_chunk_uuid(self):
result = []
chunks = chunkator(User.objects.all(), 10)
for item in chunks:
self.assertTrue(isinstance(item, User))
result.append(item.pk)
self.assertEqual(len(result), 2)
self.assertEqual(len(result), len(set(result))) # no duplicates
class ChunkatorOrderTestCase(TestCase):
def setUp(self):
super(ChunkatorOrderTestCase, self).setUp()
book = Book.objects.create(
title="The Player of Games",
author="Banks, Iain"
)
Cover.objects.create(book=book, code='player')
book = Book.objects.create(
title="Guards! Guards!",
author="Pratchett, Terry"
)
Cover.objects.create(book=book, code='guards')
user = User.objects.create(name='Wonder Woman')
Profile.objects.create(user=user, avatar='wonderful')
user = User.objects.create(name='Wolverine')
Profile.objects.create(user=user, avatar='wolfy')
def test_order_by_default(self):
items = list(chunkator(Book.objects.all(), 10))
self.assertEqual(items[0].pk, 1)
self.assertEqual(items[1].pk, 2)
uuids = sorted(User.objects.values_list('pk', flat=True))
items = list(chunkator(User.objects.all(), 10))
self.assertEqual(items[0].pk, uuids[0])
self.assertEqual(items[1].pk, uuids[1])
def test_order_by_with_onetoone_pk(self):
items = list(chunkator(Cover.objects.all(), 10))
self.assertEqual(items[0].pk, 1)
self.assertEqual(items[1].pk, 2)
items = list(chunkator(Cover.objects.all(), 10))
self.assertEqual(items[0].pk, 1)
self.assertEqual(items[1].pk, 2)
class ChunkatorValuesTestCase(TestCase):
def setUp(self):
super(ChunkatorValuesTestCase, self).setUp()
User.objects.create(name='Wonder Woman')
User.objects.create(name='Wolverine')
def test_chunk_uuid(self):
result = []
chunks = chunkator(User.objects.all().values("pk", "name"), 10)
for item in chunks:
self.assertTrue(isinstance(item, dict))
result.append(item['pk'])
self.assertEqual(len(result), 2)
self.assertEqual(len(result), len(set(result))) # no duplicates
def test_chunk_missing_pk(self):
with self.assertRaises(MissingPkFieldException):
result = chunkator(User.objects.all().values("name"), 10)
six.next(result)
class ChunkatorWhereTest(TestCase):
def setUp(self):
super(ChunkatorWhereTest, self).setUp()
User.objects.create(name='Wonder Woman')
User.objects.create(name='Wolverine')
User.objects.create(name='ChuckNorris')
def test_query_log(self):
query_log_output = six.StringIO()
qs = User.objects.all()
# We loop here only to dig into the generator and force execution
for item in chunkator(qs, 1, query_log=query_log_output):
_ = item # noqa
contents = query_log_output.getvalue()
query_log_output.close()
queries = contents.split('\n')
self.assertEqual(len(queries), 5, queries)
queries = queries[:4] # the last one is empty string
for query in queries:
# Should be 0 for the first query
# Should occur once for other queries
self.assertTrue(query.count('."uuid" >') <= 1, query)
class ChunkatorPageTest(TestCase):
def setUp(self):
super(ChunkatorPageTest, self).setUp()
for nb in range(3):
Book.objects.create(
title="Title #{}".format(nb),
author="Author #{}".format(nb)
)
def test_chunkator_page(self):
qs = Book.objects.all().values('pk')
pages = list(chunkator_page(qs, 2))
self.assertEqual(pages, [[{'pk': 1}, {'pk': 2}], [{'pk': 3}]])
| 34.962963
| 79
| 0.612137
|
bfecd7931922d03074686e5ef20295a4b2c938bd
| 485
|
py
|
Python
|
disturbance/migrations/0098_auto_20200702_1347.py
|
thakurpriya1990/disturbance
|
47f9ce5ae5f1b02d97ace11f1041e96daf7e4556
|
[
"Apache-2.0"
] | 1
|
2020-06-30T04:47:42.000Z
|
2020-06-30T04:47:42.000Z
|
disturbance/migrations/0098_auto_20200702_1347.py
|
thakurpriya1990/disturbance
|
47f9ce5ae5f1b02d97ace11f1041e96daf7e4556
|
[
"Apache-2.0"
] | 16
|
2020-03-11T08:25:46.000Z
|
2022-03-02T08:14:40.000Z
|
disturbance/migrations/0098_auto_20200702_1347.py
|
thakurpriya1990/disturbance
|
47f9ce5ae5f1b02d97ace11f1041e96daf7e4556
|
[
"Apache-2.0"
] | 9
|
2020-01-30T17:37:38.000Z
|
2021-09-30T02:22:24.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2020-07-02 05:47
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('disturbance', '0097_remove_annualrentalfee_payment_type'),
]
operations = [
migrations.RenameField(
model_name='annualrentalfeeinvoice',
old_name='annual_rent_fee',
new_name='annual_rental_fee',
),
]
| 23.095238
| 68
| 0.651546
|
aad6e136ea5d7053f0aaa852aaf91a6a9eff909a
| 360
|
py
|
Python
|
Testing/If settning.py
|
AndersHam/DAT120_eksempler
|
b9324459ca7fd8323053b328a6a43c0e11bb92a5
|
[
"MIT"
] | null | null | null |
Testing/If settning.py
|
AndersHam/DAT120_eksempler
|
b9324459ca7fd8323053b328a6a43c0e11bb92a5
|
[
"MIT"
] | null | null | null |
Testing/If settning.py
|
AndersHam/DAT120_eksempler
|
b9324459ca7fd8323053b328a6a43c0e11bb92a5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 7 17:54:46 2021
@author: -anders-
"""
tall_streng = input("Skriv inn et heltall: ")
tallet = int(tall_streng)
if tallet < 0:
print("Tallet er nagativt")
print("")
elif tallet == 0:
print("Tallet er lik 0")
else:
print("Tallet er ikke negativt")
# Dette er en komentar :)
| 18.947368
| 45
| 0.627778
|
2cd0e3d9bb8ceeac6944793ca1e63ce9a0840863
| 683
|
py
|
Python
|
saleor/lib/python3.7/site-packages/django_prices_openexchangerates/management/commands/update_exchange_rates.py
|
cxsper/saleor
|
5566ddcdaf8f72ba872eca869798e66eb9cdae44
|
[
"BSD-3-Clause"
] | 2
|
2019-12-06T15:40:14.000Z
|
2020-07-29T21:30:35.000Z
|
saleor/lib/python3.7/site-packages/django_prices_openexchangerates/management/commands/update_exchange_rates.py
|
cxsper/saleor
|
5566ddcdaf8f72ba872eca869798e66eb9cdae44
|
[
"BSD-3-Clause"
] | 13
|
2020-03-24T17:53:51.000Z
|
2022-02-10T20:01:14.000Z
|
saleor/lib/python3.7/site-packages/django_prices_openexchangerates/management/commands/update_exchange_rates.py
|
cxsper/saleor
|
5566ddcdaf8f72ba872eca869798e66eb9cdae44
|
[
"BSD-3-Clause"
] | null | null | null |
from django.core.management.base import BaseCommand
from ...tasks import update_conversion_rates, create_conversion_dates
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--all',
action='store_true',
dest='all_currencies',
default=False,
help='Create entries for all currencies')
def handle(self, *args, **options):
if options['all_currencies']:
all_rates = create_conversion_dates()
else:
all_rates = update_conversion_rates()
for conversion_rate in all_rates:
self.stdout.write('%s' % (conversion_rate, ))
| 29.695652
| 69
| 0.629575
|
d2aaa1020842af416b1b7c9de2199a318f49fa80
| 542
|
py
|
Python
|
vgg16.py
|
matonima/lightweight-classifier
|
35fd188595d771b738ec4e84501e45ca7110fb5a
|
[
"CC0-1.0"
] | null | null | null |
vgg16.py
|
matonima/lightweight-classifier
|
35fd188595d771b738ec4e84501e45ca7110fb5a
|
[
"CC0-1.0"
] | null | null | null |
vgg16.py
|
matonima/lightweight-classifier
|
35fd188595d771b738ec4e84501e45ca7110fb5a
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 15 09:36:23 2020
@author: tonim
"""
from tensorflow.keras.applications.vgg16 import VGG16
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications.vgg16 import preprocess_input
import numpy as np
model=VGG16(include_top=False, weights='imagenet')
img_path = '1.jpg'
img=image.load_img(img_path,grayscale=False, color_mode='rgb', target_size=(224,224) )
x=image.img_to_array(img)
x=np.expand_dims(x, axis=0)
x=preprocess_input(x)
f=model.predict(x)
| 28.526316
| 87
| 0.747232
|
780a06192dcf8c743f72eb577df3ad1d3cadd657
| 68,719
|
py
|
Python
|
Lib/asyncio/base_events.py
|
coderkids3/cpython
|
103e9e54395f5a72403b5c339a7cc89ec3dc83e4
|
[
"PSF-2.0"
] | 1
|
2018-10-05T20:17:22.000Z
|
2018-10-05T20:17:22.000Z
|
Lib/asyncio/base_events.py
|
ukbaranwal/cpython
|
addf8afb43af58b9bf56a0ecfd0f316dd60ac0c3
|
[
"PSF-2.0"
] | 3
|
2017-03-21T19:48:38.000Z
|
2017-03-22T01:06:56.000Z
|
Lib/asyncio/base_events.py
|
ukbaranwal/cpython
|
addf8afb43af58b9bf56a0ecfd0f316dd60ac0c3
|
[
"PSF-2.0"
] | null | null | null |
"""Base implementation of event loop.
The event loop can be broken up into a multiplexer (the part
responsible for notifying us of I/O events) and the event loop proper,
which wraps a multiplexer with functionality for scheduling callbacks,
immediately or at a given time in the future.
Whenever a public API takes a callback, subsequent positional
arguments will be passed to the callback if/when it is called. This
avoids the proliferation of trivial lambdas implementing closures.
Keyword arguments for the callback are not supported; this is a
conscious design decision, leaving the door open for keyword arguments
to modify the meaning of the API call itself.
"""
import collections
import collections.abc
import concurrent.futures
import heapq
import itertools
import logging
import os
import socket
import subprocess
import threading
import time
import traceback
import sys
import warnings
import weakref
try:
import ssl
except ImportError: # pragma: no cover
ssl = None
from . import constants
from . import coroutines
from . import events
from . import exceptions
from . import futures
from . import protocols
from . import sslproto
from . import tasks
from . import transports
from .log import logger
__all__ = 'BaseEventLoop',
# Minimum number of _scheduled timer handles before cleanup of
# cancelled handles is performed.
_MIN_SCHEDULED_TIMER_HANDLES = 100
# Minimum fraction of _scheduled timer handles that are cancelled
# before cleanup of cancelled handles is performed.
_MIN_CANCELLED_TIMER_HANDLES_FRACTION = 0.5
# Exceptions which must not call the exception handler in fatal error
# methods (_fatal_error())
_FATAL_ERROR_IGNORE = (BrokenPipeError,
ConnectionResetError, ConnectionAbortedError)
if ssl is not None:
_FATAL_ERROR_IGNORE = _FATAL_ERROR_IGNORE + (ssl.SSLCertVerificationError,)
_HAS_IPv6 = hasattr(socket, 'AF_INET6')
# Maximum timeout passed to select to avoid OS limitations
MAXIMUM_SELECT_TIMEOUT = 24 * 3600
def _format_handle(handle):
cb = handle._callback
if isinstance(getattr(cb, '__self__', None), tasks.Task):
# format the task
return repr(cb.__self__)
else:
return str(handle)
def _format_pipe(fd):
if fd == subprocess.PIPE:
return '<pipe>'
elif fd == subprocess.STDOUT:
return '<stdout>'
else:
return repr(fd)
def _set_reuseport(sock):
if not hasattr(socket, 'SO_REUSEPORT'):
raise ValueError('reuse_port not supported by socket module')
else:
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except OSError:
raise ValueError('reuse_port not supported by socket module, '
'SO_REUSEPORT defined but not implemented.')
def _ipaddr_info(host, port, family, type, proto):
# Try to skip getaddrinfo if "host" is already an IP. Users might have
# handled name resolution in their own code and pass in resolved IPs.
if not hasattr(socket, 'inet_pton'):
return
if proto not in {0, socket.IPPROTO_TCP, socket.IPPROTO_UDP} or \
host is None:
return None
if type == socket.SOCK_STREAM:
proto = socket.IPPROTO_TCP
elif type == socket.SOCK_DGRAM:
proto = socket.IPPROTO_UDP
else:
return None
if port is None:
port = 0
elif isinstance(port, bytes) and port == b'':
port = 0
elif isinstance(port, str) and port == '':
port = 0
else:
# If port's a service name like "http", don't skip getaddrinfo.
try:
port = int(port)
except (TypeError, ValueError):
return None
if family == socket.AF_UNSPEC:
afs = [socket.AF_INET]
if _HAS_IPv6:
afs.append(socket.AF_INET6)
else:
afs = [family]
if isinstance(host, bytes):
host = host.decode('idna')
if '%' in host:
# Linux's inet_pton doesn't accept an IPv6 zone index after host,
# like '::1%lo0'.
return None
for af in afs:
try:
socket.inet_pton(af, host)
# The host has already been resolved.
if _HAS_IPv6 and af == socket.AF_INET6:
return af, type, proto, '', (host, port, 0, 0)
else:
return af, type, proto, '', (host, port)
except OSError:
pass
# "host" is not an IP address.
return None
def _run_until_complete_cb(fut):
if not fut.cancelled():
exc = fut.exception()
if isinstance(exc, BaseException) and not isinstance(exc, Exception):
# Issue #22429: run_forever() already finished, no need to
# stop it.
return
futures._get_loop(fut).stop()
class _SendfileFallbackProtocol(protocols.Protocol):
def __init__(self, transp):
if not isinstance(transp, transports._FlowControlMixin):
raise TypeError("transport should be _FlowControlMixin instance")
self._transport = transp
self._proto = transp.get_protocol()
self._should_resume_reading = transp.is_reading()
self._should_resume_writing = transp._protocol_paused
transp.pause_reading()
transp.set_protocol(self)
if self._should_resume_writing:
self._write_ready_fut = self._transport._loop.create_future()
else:
self._write_ready_fut = None
async def drain(self):
if self._transport.is_closing():
raise ConnectionError("Connection closed by peer")
fut = self._write_ready_fut
if fut is None:
return
await fut
def connection_made(self, transport):
raise RuntimeError("Invalid state: "
"connection should have been established already.")
def connection_lost(self, exc):
if self._write_ready_fut is not None:
# Never happens if peer disconnects after sending the whole content
# Thus disconnection is always an exception from user perspective
if exc is None:
self._write_ready_fut.set_exception(
ConnectionError("Connection is closed by peer"))
else:
self._write_ready_fut.set_exception(exc)
self._proto.connection_lost(exc)
def pause_writing(self):
if self._write_ready_fut is not None:
return
self._write_ready_fut = self._transport._loop.create_future()
def resume_writing(self):
if self._write_ready_fut is None:
return
self._write_ready_fut.set_result(False)
self._write_ready_fut = None
def data_received(self, data):
raise RuntimeError("Invalid state: reading should be paused")
def eof_received(self):
raise RuntimeError("Invalid state: reading should be paused")
async def restore(self):
self._transport.set_protocol(self._proto)
if self._should_resume_reading:
self._transport.resume_reading()
if self._write_ready_fut is not None:
# Cancel the future.
# Basically it has no effect because protocol is switched back,
# no code should wait for it anymore.
self._write_ready_fut.cancel()
if self._should_resume_writing:
self._proto.resume_writing()
class Server(events.AbstractServer):
def __init__(self, loop, sockets, protocol_factory, ssl_context, backlog,
ssl_handshake_timeout):
self._loop = loop
self._sockets = sockets
self._active_count = 0
self._waiters = []
self._protocol_factory = protocol_factory
self._backlog = backlog
self._ssl_context = ssl_context
self._ssl_handshake_timeout = ssl_handshake_timeout
self._serving = False
self._serving_forever_fut = None
def __repr__(self):
return f'<{self.__class__.__name__} sockets={self.sockets!r}>'
def _attach(self):
assert self._sockets is not None
self._active_count += 1
def _detach(self):
assert self._active_count > 0
self._active_count -= 1
if self._active_count == 0 and self._sockets is None:
self._wakeup()
def _wakeup(self):
waiters = self._waiters
self._waiters = None
for waiter in waiters:
if not waiter.done():
waiter.set_result(waiter)
def _start_serving(self):
if self._serving:
return
self._serving = True
for sock in self._sockets:
sock.listen(self._backlog)
self._loop._start_serving(
self._protocol_factory, sock, self._ssl_context,
self, self._backlog, self._ssl_handshake_timeout)
def get_loop(self):
return self._loop
def is_serving(self):
return self._serving
@property
def sockets(self):
if self._sockets is None:
return []
return list(self._sockets)
def close(self):
sockets = self._sockets
if sockets is None:
return
self._sockets = None
for sock in sockets:
self._loop._stop_serving(sock)
self._serving = False
if (self._serving_forever_fut is not None and
not self._serving_forever_fut.done()):
self._serving_forever_fut.cancel()
self._serving_forever_fut = None
if self._active_count == 0:
self._wakeup()
async def start_serving(self):
self._start_serving()
# Skip one loop iteration so that all 'loop.add_reader'
# go through.
await tasks.sleep(0, loop=self._loop)
async def serve_forever(self):
if self._serving_forever_fut is not None:
raise RuntimeError(
f'server {self!r} is already being awaited on serve_forever()')
if self._sockets is None:
raise RuntimeError(f'server {self!r} is closed')
self._start_serving()
self._serving_forever_fut = self._loop.create_future()
try:
await self._serving_forever_fut
except exceptions.CancelledError:
try:
self.close()
await self.wait_closed()
finally:
raise
finally:
self._serving_forever_fut = None
async def wait_closed(self):
if self._sockets is None or self._waiters is None:
return
waiter = self._loop.create_future()
self._waiters.append(waiter)
await waiter
class BaseEventLoop(events.AbstractEventLoop):
def __init__(self):
self._timer_cancelled_count = 0
self._closed = False
self._stopping = False
self._ready = collections.deque()
self._scheduled = []
self._default_executor = None
self._internal_fds = 0
# Identifier of the thread running the event loop, or None if the
# event loop is not running
self._thread_id = None
self._clock_resolution = time.get_clock_info('monotonic').resolution
self._exception_handler = None
self.set_debug(coroutines._is_debug_mode())
# In debug mode, if the execution of a callback or a step of a task
# exceed this duration in seconds, the slow callback/task is logged.
self.slow_callback_duration = 0.1
self._current_handle = None
self._task_factory = None
self._coroutine_origin_tracking_enabled = False
self._coroutine_origin_tracking_saved_depth = None
# A weak set of all asynchronous generators that are
# being iterated by the loop.
self._asyncgens = weakref.WeakSet()
# Set to True when `loop.shutdown_asyncgens` is called.
self._asyncgens_shutdown_called = False
def __repr__(self):
return (
f'<{self.__class__.__name__} running={self.is_running()} '
f'closed={self.is_closed()} debug={self.get_debug()}>'
)
def create_future(self):
"""Create a Future object attached to the loop."""
return futures.Future(loop=self)
def create_task(self, coro, *, name=None):
"""Schedule a coroutine object.
Return a task object.
"""
self._check_closed()
if self._task_factory is None:
task = tasks.Task(coro, loop=self, name=name)
if task._source_traceback:
del task._source_traceback[-1]
else:
task = self._task_factory(self, coro)
tasks._set_task_name(task, name)
return task
def set_task_factory(self, factory):
"""Set a task factory that will be used by loop.create_task().
If factory is None the default task factory will be set.
If factory is a callable, it should have a signature matching
'(loop, coro)', where 'loop' will be a reference to the active
event loop, 'coro' will be a coroutine object. The callable
must return a Future.
"""
if factory is not None and not callable(factory):
raise TypeError('task factory must be a callable or None')
self._task_factory = factory
def get_task_factory(self):
"""Return a task factory, or None if the default one is in use."""
return self._task_factory
def _make_socket_transport(self, sock, protocol, waiter=None, *,
extra=None, server=None):
"""Create socket transport."""
raise NotImplementedError
def _make_ssl_transport(
self, rawsock, protocol, sslcontext, waiter=None,
*, server_side=False, server_hostname=None,
extra=None, server=None,
ssl_handshake_timeout=None,
call_connection_made=True):
"""Create SSL transport."""
raise NotImplementedError
def _make_datagram_transport(self, sock, protocol,
address=None, waiter=None, extra=None):
"""Create datagram transport."""
raise NotImplementedError
def _make_read_pipe_transport(self, pipe, protocol, waiter=None,
extra=None):
"""Create read pipe transport."""
raise NotImplementedError
def _make_write_pipe_transport(self, pipe, protocol, waiter=None,
extra=None):
"""Create write pipe transport."""
raise NotImplementedError
async def _make_subprocess_transport(self, protocol, args, shell,
stdin, stdout, stderr, bufsize,
extra=None, **kwargs):
"""Create subprocess transport."""
raise NotImplementedError
def _write_to_self(self):
"""Write a byte to self-pipe, to wake up the event loop.
This may be called from a different thread.
The subclass is responsible for implementing the self-pipe.
"""
raise NotImplementedError
def _process_events(self, event_list):
"""Process selector events."""
raise NotImplementedError
def _check_closed(self):
if self._closed:
raise RuntimeError('Event loop is closed')
def _asyncgen_finalizer_hook(self, agen):
self._asyncgens.discard(agen)
if not self.is_closed():
self.create_task(agen.aclose())
# Wake up the loop if the finalizer was called from
# a different thread.
self._write_to_self()
def _asyncgen_firstiter_hook(self, agen):
if self._asyncgens_shutdown_called:
warnings.warn(
f"asynchronous generator {agen!r} was scheduled after "
f"loop.shutdown_asyncgens() call",
ResourceWarning, source=self)
self._asyncgens.add(agen)
async def shutdown_asyncgens(self):
"""Shutdown all active asynchronous generators."""
self._asyncgens_shutdown_called = True
if not len(self._asyncgens):
# If Python version is <3.6 or we don't have any asynchronous
# generators alive.
return
closing_agens = list(self._asyncgens)
self._asyncgens.clear()
results = await tasks.gather(
*[ag.aclose() for ag in closing_agens],
return_exceptions=True,
loop=self)
for result, agen in zip(results, closing_agens):
if isinstance(result, Exception):
self.call_exception_handler({
'message': f'an error occurred during closing of '
f'asynchronous generator {agen!r}',
'exception': result,
'asyncgen': agen
})
def run_forever(self):
"""Run until stop() is called."""
self._check_closed()
if self.is_running():
raise RuntimeError('This event loop is already running')
if events._get_running_loop() is not None:
raise RuntimeError(
'Cannot run the event loop while another loop is running')
self._set_coroutine_origin_tracking(self._debug)
self._thread_id = threading.get_ident()
old_agen_hooks = sys.get_asyncgen_hooks()
sys.set_asyncgen_hooks(firstiter=self._asyncgen_firstiter_hook,
finalizer=self._asyncgen_finalizer_hook)
try:
events._set_running_loop(self)
while True:
self._run_once()
if self._stopping:
break
finally:
self._stopping = False
self._thread_id = None
events._set_running_loop(None)
self._set_coroutine_origin_tracking(False)
sys.set_asyncgen_hooks(*old_agen_hooks)
def run_until_complete(self, future):
"""Run until the Future is done.
If the argument is a coroutine, it is wrapped in a Task.
WARNING: It would be disastrous to call run_until_complete()
with the same coroutine twice -- it would wrap it in two
different Tasks and that can't be good.
Return the Future's result, or raise its exception.
"""
self._check_closed()
new_task = not futures.isfuture(future)
future = tasks.ensure_future(future, loop=self)
if new_task:
# An exception is raised if the future didn't complete, so there
# is no need to log the "destroy pending task" message
future._log_destroy_pending = False
future.add_done_callback(_run_until_complete_cb)
try:
self.run_forever()
except:
if new_task and future.done() and not future.cancelled():
# The coroutine raised a BaseException. Consume the exception
# to not log a warning, the caller doesn't have access to the
# local task.
future.exception()
raise
finally:
future.remove_done_callback(_run_until_complete_cb)
if not future.done():
raise RuntimeError('Event loop stopped before Future completed.')
return future.result()
def stop(self):
"""Stop running the event loop.
Every callback already scheduled will still run. This simply informs
run_forever to stop looping after a complete iteration.
"""
self._stopping = True
def close(self):
"""Close the event loop.
This clears the queues and shuts down the executor,
but does not wait for the executor to finish.
The event loop must not be running.
"""
if self.is_running():
raise RuntimeError("Cannot close a running event loop")
if self._closed:
return
if self._debug:
logger.debug("Close %r", self)
self._closed = True
self._ready.clear()
self._scheduled.clear()
executor = self._default_executor
if executor is not None:
self._default_executor = None
executor.shutdown(wait=False)
def is_closed(self):
"""Returns True if the event loop was closed."""
return self._closed
def __del__(self):
if not self.is_closed():
warnings.warn(f"unclosed event loop {self!r}", ResourceWarning,
source=self)
if not self.is_running():
self.close()
def is_running(self):
"""Returns True if the event loop is running."""
return (self._thread_id is not None)
def time(self):
"""Return the time according to the event loop's clock.
This is a float expressed in seconds since an epoch, but the
epoch, precision, accuracy and drift are unspecified and may
differ per event loop.
"""
return time.monotonic()
def call_later(self, delay, callback, *args, context=None):
"""Arrange for a callback to be called at a given time.
Return a Handle: an opaque object with a cancel() method that
can be used to cancel the call.
The delay can be an int or float, expressed in seconds. It is
always relative to the current time.
Each callback will be called exactly once. If two callbacks
are scheduled for exactly the same time, it undefined which
will be called first.
Any positional arguments after the callback will be passed to
the callback when it is called.
"""
timer = self.call_at(self.time() + delay, callback, *args,
context=context)
if timer._source_traceback:
del timer._source_traceback[-1]
return timer
def call_at(self, when, callback, *args, context=None):
"""Like call_later(), but uses an absolute time.
Absolute time corresponds to the event loop's time() method.
"""
self._check_closed()
if self._debug:
self._check_thread()
self._check_callback(callback, 'call_at')
timer = events.TimerHandle(when, callback, args, self, context)
if timer._source_traceback:
del timer._source_traceback[-1]
heapq.heappush(self._scheduled, timer)
timer._scheduled = True
return timer
def call_soon(self, callback, *args, context=None):
"""Arrange for a callback to be called as soon as possible.
This operates as a FIFO queue: callbacks are called in the
order in which they are registered. Each callback will be
called exactly once.
Any positional arguments after the callback will be passed to
the callback when it is called.
"""
self._check_closed()
if self._debug:
self._check_thread()
self._check_callback(callback, 'call_soon')
handle = self._call_soon(callback, args, context)
if handle._source_traceback:
del handle._source_traceback[-1]
return handle
def _check_callback(self, callback, method):
if (coroutines.iscoroutine(callback) or
coroutines.iscoroutinefunction(callback)):
raise TypeError(
f"coroutines cannot be used with {method}()")
if not callable(callback):
raise TypeError(
f'a callable object was expected by {method}(), '
f'got {callback!r}')
def _call_soon(self, callback, args, context):
handle = events.Handle(callback, args, self, context)
if handle._source_traceback:
del handle._source_traceback[-1]
self._ready.append(handle)
return handle
def _check_thread(self):
"""Check that the current thread is the thread running the event loop.
Non-thread-safe methods of this class make this assumption and will
likely behave incorrectly when the assumption is violated.
Should only be called when (self._debug == True). The caller is
responsible for checking this condition for performance reasons.
"""
if self._thread_id is None:
return
thread_id = threading.get_ident()
if thread_id != self._thread_id:
raise RuntimeError(
"Non-thread-safe operation invoked on an event loop other "
"than the current one")
def call_soon_threadsafe(self, callback, *args, context=None):
"""Like call_soon(), but thread-safe."""
self._check_closed()
if self._debug:
self._check_callback(callback, 'call_soon_threadsafe')
handle = self._call_soon(callback, args, context)
if handle._source_traceback:
del handle._source_traceback[-1]
self._write_to_self()
return handle
def run_in_executor(self, executor, func, *args):
self._check_closed()
if self._debug:
self._check_callback(func, 'run_in_executor')
if executor is None:
executor = self._default_executor
if executor is None:
executor = concurrent.futures.ThreadPoolExecutor()
self._default_executor = executor
return futures.wrap_future(
executor.submit(func, *args), loop=self)
def set_default_executor(self, executor):
if not isinstance(executor, concurrent.futures.ThreadPoolExecutor):
warnings.warn(
'Using the default executor that is not an instance of '
'ThreadPoolExecutor is deprecated and will be prohibited '
'in Python 3.9',
DeprecationWarning, 2)
self._default_executor = executor
def _getaddrinfo_debug(self, host, port, family, type, proto, flags):
msg = [f"{host}:{port!r}"]
if family:
msg.append(f'family={family!r}')
if type:
msg.append(f'type={type!r}')
if proto:
msg.append(f'proto={proto!r}')
if flags:
msg.append(f'flags={flags!r}')
msg = ', '.join(msg)
logger.debug('Get address info %s', msg)
t0 = self.time()
addrinfo = socket.getaddrinfo(host, port, family, type, proto, flags)
dt = self.time() - t0
msg = f'Getting address info {msg} took {dt * 1e3:.3f}ms: {addrinfo!r}'
if dt >= self.slow_callback_duration:
logger.info(msg)
else:
logger.debug(msg)
return addrinfo
async def getaddrinfo(self, host, port, *,
family=0, type=0, proto=0, flags=0):
if self._debug:
getaddr_func = self._getaddrinfo_debug
else:
getaddr_func = socket.getaddrinfo
return await self.run_in_executor(
None, getaddr_func, host, port, family, type, proto, flags)
async def getnameinfo(self, sockaddr, flags=0):
return await self.run_in_executor(
None, socket.getnameinfo, sockaddr, flags)
async def sock_sendfile(self, sock, file, offset=0, count=None,
*, fallback=True):
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
self._check_sendfile_params(sock, file, offset, count)
try:
return await self._sock_sendfile_native(sock, file,
offset, count)
except exceptions.SendfileNotAvailableError as exc:
if not fallback:
raise
return await self._sock_sendfile_fallback(sock, file,
offset, count)
async def _sock_sendfile_native(self, sock, file, offset, count):
# NB: sendfile syscall is not supported for SSL sockets and
# non-mmap files even if sendfile is supported by OS
raise exceptions.SendfileNotAvailableError(
f"syscall sendfile is not available for socket {sock!r} "
"and file {file!r} combination")
async def _sock_sendfile_fallback(self, sock, file, offset, count):
if offset:
file.seek(offset)
blocksize = (
min(count, constants.SENDFILE_FALLBACK_READBUFFER_SIZE)
if count else constants.SENDFILE_FALLBACK_READBUFFER_SIZE
)
buf = bytearray(blocksize)
total_sent = 0
try:
while True:
if count:
blocksize = min(count - total_sent, blocksize)
if blocksize <= 0:
break
view = memoryview(buf)[:blocksize]
read = await self.run_in_executor(None, file.readinto, view)
if not read:
break # EOF
await self.sock_sendall(sock, view)
total_sent += read
return total_sent
finally:
if total_sent > 0 and hasattr(file, 'seek'):
file.seek(offset + total_sent)
def _check_sendfile_params(self, sock, file, offset, count):
if 'b' not in getattr(file, 'mode', 'b'):
raise ValueError("file should be opened in binary mode")
if not sock.type == socket.SOCK_STREAM:
raise ValueError("only SOCK_STREAM type sockets are supported")
if count is not None:
if not isinstance(count, int):
raise TypeError(
"count must be a positive integer (got {!r})".format(count))
if count <= 0:
raise ValueError(
"count must be a positive integer (got {!r})".format(count))
if not isinstance(offset, int):
raise TypeError(
"offset must be a non-negative integer (got {!r})".format(
offset))
if offset < 0:
raise ValueError(
"offset must be a non-negative integer (got {!r})".format(
offset))
async def create_connection(
self, protocol_factory, host=None, port=None,
*, ssl=None, family=0,
proto=0, flags=0, sock=None,
local_addr=None, server_hostname=None,
ssl_handshake_timeout=None):
"""Connect to a TCP server.
Create a streaming transport connection to a given Internet host and
port: socket family AF_INET or socket.AF_INET6 depending on host (or
family if specified), socket type SOCK_STREAM. protocol_factory must be
a callable returning a protocol instance.
This method is a coroutine which will try to establish the connection
in the background. When successful, the coroutine returns a
(transport, protocol) pair.
"""
if server_hostname is not None and not ssl:
raise ValueError('server_hostname is only meaningful with ssl')
if server_hostname is None and ssl:
# Use host as default for server_hostname. It is an error
# if host is empty or not set, e.g. when an
# already-connected socket was passed or when only a port
# is given. To avoid this error, you can pass
# server_hostname='' -- this will bypass the hostname
# check. (This also means that if host is a numeric
# IP/IPv6 address, we will attempt to verify that exact
# address; this will probably fail, but it is possible to
# create a certificate for a specific IP address, so we
# don't judge it here.)
if not host:
raise ValueError('You must set server_hostname '
'when using ssl without a host')
server_hostname = host
if ssl_handshake_timeout is not None and not ssl:
raise ValueError(
'ssl_handshake_timeout is only meaningful with ssl')
if host is not None or port is not None:
if sock is not None:
raise ValueError(
'host/port and sock can not be specified at the same time')
infos = await self._ensure_resolved(
(host, port), family=family,
type=socket.SOCK_STREAM, proto=proto, flags=flags, loop=self)
if not infos:
raise OSError('getaddrinfo() returned empty list')
if local_addr is not None:
laddr_infos = await self._ensure_resolved(
local_addr, family=family,
type=socket.SOCK_STREAM, proto=proto,
flags=flags, loop=self)
if not laddr_infos:
raise OSError('getaddrinfo() returned empty list')
exceptions = []
for family, type, proto, cname, address in infos:
try:
sock = socket.socket(family=family, type=type, proto=proto)
sock.setblocking(False)
if local_addr is not None:
for _, _, _, _, laddr in laddr_infos:
try:
sock.bind(laddr)
break
except OSError as exc:
msg = (
f'error while attempting to bind on '
f'address {laddr!r}: '
f'{exc.strerror.lower()}'
)
exc = OSError(exc.errno, msg)
exceptions.append(exc)
else:
sock.close()
sock = None
continue
if self._debug:
logger.debug("connect %r to %r", sock, address)
await self.sock_connect(sock, address)
except OSError as exc:
if sock is not None:
sock.close()
exceptions.append(exc)
except:
if sock is not None:
sock.close()
raise
else:
break
else:
if len(exceptions) == 1:
raise exceptions[0]
else:
# If they all have the same str(), raise one.
model = str(exceptions[0])
if all(str(exc) == model for exc in exceptions):
raise exceptions[0]
# Raise a combined exception so the user can see all
# the various error messages.
raise OSError('Multiple exceptions: {}'.format(
', '.join(str(exc) for exc in exceptions)))
else:
if sock is None:
raise ValueError(
'host and port was not specified and no sock specified')
if sock.type != socket.SOCK_STREAM:
# We allow AF_INET, AF_INET6, AF_UNIX as long as they
# are SOCK_STREAM.
# We support passing AF_UNIX sockets even though we have
# a dedicated API for that: create_unix_connection.
# Disallowing AF_UNIX in this method, breaks backwards
# compatibility.
raise ValueError(
f'A Stream Socket was expected, got {sock!r}')
transport, protocol = await self._create_connection_transport(
sock, protocol_factory, ssl, server_hostname,
ssl_handshake_timeout=ssl_handshake_timeout)
if self._debug:
# Get the socket from the transport because SSL transport closes
# the old socket and creates a new SSL socket
sock = transport.get_extra_info('socket')
logger.debug("%r connected to %s:%r: (%r, %r)",
sock, host, port, transport, protocol)
return transport, protocol
async def _create_connection_transport(
self, sock, protocol_factory, ssl,
server_hostname, server_side=False,
ssl_handshake_timeout=None):
sock.setblocking(False)
protocol = protocol_factory()
waiter = self.create_future()
if ssl:
sslcontext = None if isinstance(ssl, bool) else ssl
transport = self._make_ssl_transport(
sock, protocol, sslcontext, waiter,
server_side=server_side, server_hostname=server_hostname,
ssl_handshake_timeout=ssl_handshake_timeout)
else:
transport = self._make_socket_transport(sock, protocol, waiter)
try:
await waiter
except:
transport.close()
raise
return transport, protocol
async def sendfile(self, transport, file, offset=0, count=None,
*, fallback=True):
"""Send a file to transport.
Return the total number of bytes which were sent.
The method uses high-performance os.sendfile if available.
file must be a regular file object opened in binary mode.
offset tells from where to start reading the file. If specified,
count is the total number of bytes to transmit as opposed to
sending the file until EOF is reached. File position is updated on
return or also in case of error in which case file.tell()
can be used to figure out the number of bytes
which were sent.
fallback set to True makes asyncio to manually read and send
the file when the platform does not support the sendfile syscall
(e.g. Windows or SSL socket on Unix).
Raise SendfileNotAvailableError if the system does not support
sendfile syscall and fallback is False.
"""
if transport.is_closing():
raise RuntimeError("Transport is closing")
mode = getattr(transport, '_sendfile_compatible',
constants._SendfileMode.UNSUPPORTED)
if mode is constants._SendfileMode.UNSUPPORTED:
raise RuntimeError(
f"sendfile is not supported for transport {transport!r}")
if mode is constants._SendfileMode.TRY_NATIVE:
try:
return await self._sendfile_native(transport, file,
offset, count)
except exceptions.SendfileNotAvailableError as exc:
if not fallback:
raise
if not fallback:
raise RuntimeError(
f"fallback is disabled and native sendfile is not "
f"supported for transport {transport!r}")
return await self._sendfile_fallback(transport, file,
offset, count)
async def _sendfile_native(self, transp, file, offset, count):
raise exceptions.SendfileNotAvailableError(
"sendfile syscall is not supported")
async def _sendfile_fallback(self, transp, file, offset, count):
if offset:
file.seek(offset)
blocksize = min(count, 16384) if count else 16384
buf = bytearray(blocksize)
total_sent = 0
proto = _SendfileFallbackProtocol(transp)
try:
while True:
if count:
blocksize = min(count - total_sent, blocksize)
if blocksize <= 0:
return total_sent
view = memoryview(buf)[:blocksize]
read = file.readinto(view)
if not read:
return total_sent # EOF
await proto.drain()
transp.write(view)
total_sent += read
finally:
if total_sent > 0 and hasattr(file, 'seek'):
file.seek(offset + total_sent)
await proto.restore()
async def start_tls(self, transport, protocol, sslcontext, *,
server_side=False,
server_hostname=None,
ssl_handshake_timeout=None):
"""Upgrade transport to TLS.
Return a new transport that *protocol* should start using
immediately.
"""
if ssl is None:
raise RuntimeError('Python ssl module is not available')
if not isinstance(sslcontext, ssl.SSLContext):
raise TypeError(
f'sslcontext is expected to be an instance of ssl.SSLContext, '
f'got {sslcontext!r}')
if not getattr(transport, '_start_tls_compatible', False):
raise TypeError(
f'transport {transport!r} is not supported by start_tls()')
waiter = self.create_future()
ssl_protocol = sslproto.SSLProtocol(
self, protocol, sslcontext, waiter,
server_side, server_hostname,
ssl_handshake_timeout=ssl_handshake_timeout,
call_connection_made=False)
# Pause early so that "ssl_protocol.data_received()" doesn't
# have a chance to get called before "ssl_protocol.connection_made()".
transport.pause_reading()
transport.set_protocol(ssl_protocol)
conmade_cb = self.call_soon(ssl_protocol.connection_made, transport)
resume_cb = self.call_soon(transport.resume_reading)
try:
await waiter
except Exception:
transport.close()
conmade_cb.cancel()
resume_cb.cancel()
raise
return ssl_protocol._app_transport
async def create_datagram_endpoint(self, protocol_factory,
local_addr=None, remote_addr=None, *,
family=0, proto=0, flags=0,
reuse_address=None, reuse_port=None,
allow_broadcast=None, sock=None):
"""Create datagram connection."""
if sock is not None:
if sock.type != socket.SOCK_DGRAM:
raise ValueError(
f'A UDP Socket was expected, got {sock!r}')
if (local_addr or remote_addr or
family or proto or flags or
reuse_address or reuse_port or allow_broadcast):
# show the problematic kwargs in exception msg
opts = dict(local_addr=local_addr, remote_addr=remote_addr,
family=family, proto=proto, flags=flags,
reuse_address=reuse_address, reuse_port=reuse_port,
allow_broadcast=allow_broadcast)
problems = ', '.join(f'{k}={v}' for k, v in opts.items() if v)
raise ValueError(
f'socket modifier keyword arguments can not be used '
f'when sock is specified. ({problems})')
sock.setblocking(False)
r_addr = None
else:
if not (local_addr or remote_addr):
if family == 0:
raise ValueError('unexpected address family')
addr_pairs_info = (((family, proto), (None, None)),)
elif hasattr(socket, 'AF_UNIX') and family == socket.AF_UNIX:
for addr in (local_addr, remote_addr):
if addr is not None and not isinstance(addr, str):
raise TypeError('string is expected')
addr_pairs_info = (((family, proto),
(local_addr, remote_addr)), )
else:
# join address by (family, protocol)
addr_infos = collections.OrderedDict()
for idx, addr in ((0, local_addr), (1, remote_addr)):
if addr is not None:
assert isinstance(addr, tuple) and len(addr) == 2, (
'2-tuple is expected')
infos = await self._ensure_resolved(
addr, family=family, type=socket.SOCK_DGRAM,
proto=proto, flags=flags, loop=self)
if not infos:
raise OSError('getaddrinfo() returned empty list')
for fam, _, pro, _, address in infos:
key = (fam, pro)
if key not in addr_infos:
addr_infos[key] = [None, None]
addr_infos[key][idx] = address
# each addr has to have info for each (family, proto) pair
addr_pairs_info = [
(key, addr_pair) for key, addr_pair in addr_infos.items()
if not ((local_addr and addr_pair[0] is None) or
(remote_addr and addr_pair[1] is None))]
if not addr_pairs_info:
raise ValueError('can not get address information')
exceptions = []
if reuse_address is None:
reuse_address = os.name == 'posix' and sys.platform != 'cygwin'
for ((family, proto),
(local_address, remote_address)) in addr_pairs_info:
sock = None
r_addr = None
try:
sock = socket.socket(
family=family, type=socket.SOCK_DGRAM, proto=proto)
if reuse_address:
sock.setsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if reuse_port:
_set_reuseport(sock)
if allow_broadcast:
sock.setsockopt(
socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.setblocking(False)
if local_addr:
sock.bind(local_address)
if remote_addr:
await self.sock_connect(sock, remote_address)
r_addr = remote_address
except OSError as exc:
if sock is not None:
sock.close()
exceptions.append(exc)
except:
if sock is not None:
sock.close()
raise
else:
break
else:
raise exceptions[0]
protocol = protocol_factory()
waiter = self.create_future()
transport = self._make_datagram_transport(
sock, protocol, r_addr, waiter)
if self._debug:
if local_addr:
logger.info("Datagram endpoint local_addr=%r remote_addr=%r "
"created: (%r, %r)",
local_addr, remote_addr, transport, protocol)
else:
logger.debug("Datagram endpoint remote_addr=%r created: "
"(%r, %r)",
remote_addr, transport, protocol)
try:
await waiter
except:
transport.close()
raise
return transport, protocol
async def _ensure_resolved(self, address, *,
family=0, type=socket.SOCK_STREAM,
proto=0, flags=0, loop):
host, port = address[:2]
info = _ipaddr_info(host, port, family, type, proto)
if info is not None:
# "host" is already a resolved IP.
return [info]
else:
return await loop.getaddrinfo(host, port, family=family, type=type,
proto=proto, flags=flags)
async def _create_server_getaddrinfo(self, host, port, family, flags):
infos = await self._ensure_resolved((host, port), family=family,
type=socket.SOCK_STREAM,
flags=flags, loop=self)
if not infos:
raise OSError(f'getaddrinfo({host!r}) returned empty list')
return infos
async def create_server(
self, protocol_factory, host=None, port=None,
*,
family=socket.AF_UNSPEC,
flags=socket.AI_PASSIVE,
sock=None,
backlog=100,
ssl=None,
reuse_address=None,
reuse_port=None,
ssl_handshake_timeout=None,
start_serving=True):
"""Create a TCP server.
The host parameter can be a string, in that case the TCP server is
bound to host and port.
The host parameter can also be a sequence of strings and in that case
the TCP server is bound to all hosts of the sequence. If a host
appears multiple times (possibly indirectly e.g. when hostnames
resolve to the same IP address), the server is only bound once to that
host.
Return a Server object which can be used to stop the service.
This method is a coroutine.
"""
if isinstance(ssl, bool):
raise TypeError('ssl argument must be an SSLContext or None')
if ssl_handshake_timeout is not None and ssl is None:
raise ValueError(
'ssl_handshake_timeout is only meaningful with ssl')
if host is not None or port is not None:
if sock is not None:
raise ValueError(
'host/port and sock can not be specified at the same time')
if reuse_address is None:
reuse_address = os.name == 'posix' and sys.platform != 'cygwin'
sockets = []
if host == '':
hosts = [None]
elif (isinstance(host, str) or
not isinstance(host, collections.abc.Iterable)):
hosts = [host]
else:
hosts = host
fs = [self._create_server_getaddrinfo(host, port, family=family,
flags=flags)
for host in hosts]
infos = await tasks.gather(*fs, loop=self)
infos = set(itertools.chain.from_iterable(infos))
completed = False
try:
for res in infos:
af, socktype, proto, canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
except socket.error:
# Assume it's a bad family/type/protocol combination.
if self._debug:
logger.warning('create_server() failed to create '
'socket.socket(%r, %r, %r)',
af, socktype, proto, exc_info=True)
continue
sockets.append(sock)
if reuse_address:
sock.setsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
if reuse_port:
_set_reuseport(sock)
# Disable IPv4/IPv6 dual stack support (enabled by
# default on Linux) which makes a single socket
# listen on both address families.
if (_HAS_IPv6 and
af == socket.AF_INET6 and
hasattr(socket, 'IPPROTO_IPV6')):
sock.setsockopt(socket.IPPROTO_IPV6,
socket.IPV6_V6ONLY,
True)
try:
sock.bind(sa)
except OSError as err:
raise OSError(err.errno, 'error while attempting '
'to bind on address %r: %s'
% (sa, err.strerror.lower())) from None
completed = True
finally:
if not completed:
for sock in sockets:
sock.close()
else:
if sock is None:
raise ValueError('Neither host/port nor sock were specified')
if sock.type != socket.SOCK_STREAM:
raise ValueError(f'A Stream Socket was expected, got {sock!r}')
sockets = [sock]
for sock in sockets:
sock.setblocking(False)
server = Server(self, sockets, protocol_factory,
ssl, backlog, ssl_handshake_timeout)
if start_serving:
server._start_serving()
# Skip one loop iteration so that all 'loop.add_reader'
# go through.
await tasks.sleep(0, loop=self)
if self._debug:
logger.info("%r is serving", server)
return server
async def connect_accepted_socket(
self, protocol_factory, sock,
*, ssl=None,
ssl_handshake_timeout=None):
"""Handle an accepted connection.
This is used by servers that accept connections outside of
asyncio but that use asyncio to handle connections.
This method is a coroutine. When completed, the coroutine
returns a (transport, protocol) pair.
"""
if sock.type != socket.SOCK_STREAM:
raise ValueError(f'A Stream Socket was expected, got {sock!r}')
if ssl_handshake_timeout is not None and not ssl:
raise ValueError(
'ssl_handshake_timeout is only meaningful with ssl')
transport, protocol = await self._create_connection_transport(
sock, protocol_factory, ssl, '', server_side=True,
ssl_handshake_timeout=ssl_handshake_timeout)
if self._debug:
# Get the socket from the transport because SSL transport closes
# the old socket and creates a new SSL socket
sock = transport.get_extra_info('socket')
logger.debug("%r handled: (%r, %r)", sock, transport, protocol)
return transport, protocol
async def connect_read_pipe(self, protocol_factory, pipe):
protocol = protocol_factory()
waiter = self.create_future()
transport = self._make_read_pipe_transport(pipe, protocol, waiter)
try:
await waiter
except:
transport.close()
raise
if self._debug:
logger.debug('Read pipe %r connected: (%r, %r)',
pipe.fileno(), transport, protocol)
return transport, protocol
async def connect_write_pipe(self, protocol_factory, pipe):
protocol = protocol_factory()
waiter = self.create_future()
transport = self._make_write_pipe_transport(pipe, protocol, waiter)
try:
await waiter
except:
transport.close()
raise
if self._debug:
logger.debug('Write pipe %r connected: (%r, %r)',
pipe.fileno(), transport, protocol)
return transport, protocol
def _log_subprocess(self, msg, stdin, stdout, stderr):
info = [msg]
if stdin is not None:
info.append(f'stdin={_format_pipe(stdin)}')
if stdout is not None and stderr == subprocess.STDOUT:
info.append(f'stdout=stderr={_format_pipe(stdout)}')
else:
if stdout is not None:
info.append(f'stdout={_format_pipe(stdout)}')
if stderr is not None:
info.append(f'stderr={_format_pipe(stderr)}')
logger.debug(' '.join(info))
async def subprocess_shell(self, protocol_factory, cmd, *,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=False,
shell=True, bufsize=0,
**kwargs):
if not isinstance(cmd, (bytes, str)):
raise ValueError("cmd must be a string")
if universal_newlines:
raise ValueError("universal_newlines must be False")
if not shell:
raise ValueError("shell must be True")
if bufsize != 0:
raise ValueError("bufsize must be 0")
protocol = protocol_factory()
debug_log = None
if self._debug:
# don't log parameters: they may contain sensitive information
# (password) and may be too long
debug_log = 'run shell command %r' % cmd
self._log_subprocess(debug_log, stdin, stdout, stderr)
transport = await self._make_subprocess_transport(
protocol, cmd, True, stdin, stdout, stderr, bufsize, **kwargs)
if self._debug and debug_log is not None:
logger.info('%s: %r', debug_log, transport)
return transport, protocol
async def subprocess_exec(self, protocol_factory, program, *args,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=False,
shell=False, bufsize=0, **kwargs):
if universal_newlines:
raise ValueError("universal_newlines must be False")
if shell:
raise ValueError("shell must be False")
if bufsize != 0:
raise ValueError("bufsize must be 0")
popen_args = (program,) + args
for arg in popen_args:
if not isinstance(arg, (str, bytes)):
raise TypeError(
f"program arguments must be a bytes or text string, "
f"not {type(arg).__name__}")
protocol = protocol_factory()
debug_log = None
if self._debug:
# don't log parameters: they may contain sensitive information
# (password) and may be too long
debug_log = f'execute program {program!r}'
self._log_subprocess(debug_log, stdin, stdout, stderr)
transport = await self._make_subprocess_transport(
protocol, popen_args, False, stdin, stdout, stderr,
bufsize, **kwargs)
if self._debug and debug_log is not None:
logger.info('%s: %r', debug_log, transport)
return transport, protocol
def get_exception_handler(self):
"""Return an exception handler, or None if the default one is in use.
"""
return self._exception_handler
def set_exception_handler(self, handler):
"""Set handler as the new event loop exception handler.
If handler is None, the default exception handler will
be set.
If handler is a callable object, it should have a
signature matching '(loop, context)', where 'loop'
will be a reference to the active event loop, 'context'
will be a dict object (see `call_exception_handler()`
documentation for details about context).
"""
if handler is not None and not callable(handler):
raise TypeError(f'A callable object or None is expected, '
f'got {handler!r}')
self._exception_handler = handler
def default_exception_handler(self, context):
"""Default exception handler.
This is called when an exception occurs and no exception
handler is set, and can be called by a custom exception
handler that wants to defer to the default behavior.
This default handler logs the error message and other
context-dependent information. In debug mode, a truncated
stack trace is also appended showing where the given object
(e.g. a handle or future or task) was created, if any.
The context parameter has the same meaning as in
`call_exception_handler()`.
"""
message = context.get('message')
if not message:
message = 'Unhandled exception in event loop'
exception = context.get('exception')
if exception is not None:
exc_info = (type(exception), exception, exception.__traceback__)
else:
exc_info = False
if ('source_traceback' not in context and
self._current_handle is not None and
self._current_handle._source_traceback):
context['handle_traceback'] = \
self._current_handle._source_traceback
log_lines = [message]
for key in sorted(context):
if key in {'message', 'exception'}:
continue
value = context[key]
if key == 'source_traceback':
tb = ''.join(traceback.format_list(value))
value = 'Object created at (most recent call last):\n'
value += tb.rstrip()
elif key == 'handle_traceback':
tb = ''.join(traceback.format_list(value))
value = 'Handle created at (most recent call last):\n'
value += tb.rstrip()
else:
value = repr(value)
log_lines.append(f'{key}: {value}')
logger.error('\n'.join(log_lines), exc_info=exc_info)
def call_exception_handler(self, context):
"""Call the current event loop's exception handler.
The context argument is a dict containing the following keys:
- 'message': Error message;
- 'exception' (optional): Exception object;
- 'future' (optional): Future instance;
- 'task' (optional): Task instance;
- 'handle' (optional): Handle instance;
- 'protocol' (optional): Protocol instance;
- 'transport' (optional): Transport instance;
- 'socket' (optional): Socket instance;
- 'asyncgen' (optional): Asynchronous generator that caused
the exception.
New keys maybe introduced in the future.
Note: do not overload this method in an event loop subclass.
For custom exception handling, use the
`set_exception_handler()` method.
"""
if self._exception_handler is None:
try:
self.default_exception_handler(context)
except Exception:
# Second protection layer for unexpected errors
# in the default implementation, as well as for subclassed
# event loops with overloaded "default_exception_handler".
logger.error('Exception in default exception handler',
exc_info=True)
else:
try:
self._exception_handler(self, context)
except Exception as exc:
# Exception in the user set custom exception handler.
try:
# Let's try default handler.
self.default_exception_handler({
'message': 'Unhandled error in exception handler',
'exception': exc,
'context': context,
})
except Exception:
# Guard 'default_exception_handler' in case it is
# overloaded.
logger.error('Exception in default exception handler '
'while handling an unexpected error '
'in custom exception handler',
exc_info=True)
def _add_callback(self, handle):
"""Add a Handle to _scheduled (TimerHandle) or _ready."""
assert isinstance(handle, events.Handle), 'A Handle is required here'
if handle._cancelled:
return
assert not isinstance(handle, events.TimerHandle)
self._ready.append(handle)
def _add_callback_signalsafe(self, handle):
"""Like _add_callback() but called from a signal handler."""
self._add_callback(handle)
self._write_to_self()
def _timer_handle_cancelled(self, handle):
"""Notification that a TimerHandle has been cancelled."""
if handle._scheduled:
self._timer_cancelled_count += 1
def _run_once(self):
"""Run one full iteration of the event loop.
This calls all currently ready callbacks, polls for I/O,
schedules the resulting callbacks, and finally schedules
'call_later' callbacks.
"""
sched_count = len(self._scheduled)
if (sched_count > _MIN_SCHEDULED_TIMER_HANDLES and
self._timer_cancelled_count / sched_count >
_MIN_CANCELLED_TIMER_HANDLES_FRACTION):
# Remove delayed calls that were cancelled if their number
# is too high
new_scheduled = []
for handle in self._scheduled:
if handle._cancelled:
handle._scheduled = False
else:
new_scheduled.append(handle)
heapq.heapify(new_scheduled)
self._scheduled = new_scheduled
self._timer_cancelled_count = 0
else:
# Remove delayed calls that were cancelled from head of queue.
while self._scheduled and self._scheduled[0]._cancelled:
self._timer_cancelled_count -= 1
handle = heapq.heappop(self._scheduled)
handle._scheduled = False
timeout = None
if self._ready or self._stopping:
timeout = 0
elif self._scheduled:
# Compute the desired timeout.
when = self._scheduled[0]._when
timeout = min(max(0, when - self.time()), MAXIMUM_SELECT_TIMEOUT)
event_list = self._selector.select(timeout)
self._process_events(event_list)
# Handle 'later' callbacks that are ready.
end_time = self.time() + self._clock_resolution
while self._scheduled:
handle = self._scheduled[0]
if handle._when >= end_time:
break
handle = heapq.heappop(self._scheduled)
handle._scheduled = False
self._ready.append(handle)
# This is the only place where callbacks are actually *called*.
# All other places just add them to ready.
# Note: We run all currently scheduled callbacks, but not any
# callbacks scheduled by callbacks run this time around --
# they will be run the next time (after another I/O poll).
# Use an idiom that is thread-safe without using locks.
ntodo = len(self._ready)
for i in range(ntodo):
handle = self._ready.popleft()
if handle._cancelled:
continue
if self._debug:
try:
self._current_handle = handle
t0 = self.time()
handle._run()
dt = self.time() - t0
if dt >= self.slow_callback_duration:
logger.warning('Executing %s took %.3f seconds',
_format_handle(handle), dt)
finally:
self._current_handle = None
else:
handle._run()
handle = None # Needed to break cycles when an exception occurs.
def _set_coroutine_origin_tracking(self, enabled):
if bool(enabled) == bool(self._coroutine_origin_tracking_enabled):
return
if enabled:
self._coroutine_origin_tracking_saved_depth = (
sys.get_coroutine_origin_tracking_depth())
sys.set_coroutine_origin_tracking_depth(
constants.DEBUG_STACK_DEPTH)
else:
sys.set_coroutine_origin_tracking_depth(
self._coroutine_origin_tracking_saved_depth)
self._coroutine_origin_tracking_enabled = enabled
def get_debug(self):
return self._debug
def set_debug(self, enabled):
self._debug = enabled
if self.is_running():
self.call_soon_threadsafe(self._set_coroutine_origin_tracking, enabled)
| 38.519619
| 83
| 0.572113
|
ffe9e60d5fe6f8ad55dec5f0f516007ad7d3483a
| 2,255
|
py
|
Python
|
code/ASR/Adapter/balanced_sampler.py
|
lw0517/transferlearning
|
230df8850b09f896a799ad865072c0164f45fadc
|
[
"MIT"
] | 9,657
|
2017-05-01T03:29:35.000Z
|
2022-03-31T21:25:30.000Z
|
code/ASR/Adapter/balanced_sampler.py
|
xiaohuihui-com/transferlearning
|
17583db86db19709ff483a24590f0d5b88e25fe5
|
[
"MIT"
] | 262
|
2017-09-16T09:33:02.000Z
|
2022-03-30T05:08:45.000Z
|
code/ASR/Adapter/balanced_sampler.py
|
xiaohuihui-com/transferlearning
|
17583db86db19709ff483a24590f0d5b88e25fe5
|
[
"MIT"
] | 3,273
|
2017-05-01T06:28:31.000Z
|
2022-03-31T09:57:48.000Z
|
import torch
import torch.utils.data
import random
import collections
import logging
import numpy as np
from torch.utils.data.sampler import BatchSampler, WeightedRandomSampler
# https://github.com/khornlund/pytorch-balanced-sampler
class BalancedBatchSampler(torch.utils.data.sampler.Sampler):
'''
https://github.com/galatolofederico/pytorch-balanced-batch/blob/master/sampler.py
'''
def __init__(self, dataset, labels=None):
self.labels = labels
self.dataset = collections.defaultdict(list)
self.balanced_max = 0
# Save all the indices for all the classes
for idx in range(0, len(dataset)):
label = self._get_label(dataset, idx)
#break
self.dataset[label].append(idx)
self.balanced_max = max(self.balanced_max, len(self.dataset[label]))
#len(self.dataset[label]) if len(self.dataset[label]) > self.balanced_max else self.balanced_max
# Oversample the classes with fewer elements than the max
for label in self.dataset:
while len(self.dataset[label]) < self.balanced_max:
self.dataset[label].append(random.choice(self.dataset[label]))
self.keys = list(self.dataset.keys())
logging.warning(self.keys)
self.currentkey = 0
self.indices = [-1] * len(self.keys)
def __iter__(self):
while self.indices[self.currentkey] < self.balanced_max - 1:
self.indices[self.currentkey] += 1
yield self.dataset[self.keys[self.currentkey]][self.indices[self.currentkey]]
self.currentkey = (self.currentkey + 1) % len(self.keys)
self.indices = [-1] * len(self.keys)
def _get_label(self, dataset, idx):
#logging.warning(len(dataset))
# logging.warning(dataset[idx])
return dataset[idx][0][1]['category']#[1]['output'][0]['token'].split(' ')[1]
# def _get_label(self, dataset, idx, labels = None):
# if self.labels is not None:
# return self.labels[idx].item()
# else:
# raise Exception("You should pass the tensor of labels to the constructor as second argument")
def __len__(self):
return self.balanced_max * len(self.keys)
| 40.267857
| 108
| 0.644789
|
a713c1ead95cc87d472a75c70e163ec1253f42be
| 369
|
py
|
Python
|
Programas de exercicios/exercicio060.py
|
Elvis-Almeida/Python
|
91f1427f2aea37f7019b185dc6cb77edc8babc03
|
[
"MIT"
] | null | null | null |
Programas de exercicios/exercicio060.py
|
Elvis-Almeida/Python
|
91f1427f2aea37f7019b185dc6cb77edc8babc03
|
[
"MIT"
] | null | null | null |
Programas de exercicios/exercicio060.py
|
Elvis-Almeida/Python
|
91f1427f2aea37f7019b185dc6cb77edc8babc03
|
[
"MIT"
] | null | null | null |
#calculando fatorial
'''import math
n = int(input('Digite um valor: '))
print('Seu fatorial é {}'.format(math.factorial(n)))'''
n1 = int(input('Digite um valor: '))
n = n1
r = 1
c = n1
print('Calculando...')
print('{}! = '.format(n1), end = '')
while n > 0:
print(c, end = ' ')
print('x 'if c > 1 else '= ', end = '')
c -= 1
r = r * n
n = n - 1
print(r)
| 20.5
| 55
| 0.531165
|
64259f54be4b16e5e924d53fb3c8f8705ab12bd6
| 20,889
|
py
|
Python
|
sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/databoxedge/v2019_03_01/aio/operations/_bandwidth_schedules_operations.py
|
GoWang/azure-sdk-for-python
|
f241e3734a50953c2a37c10d2d84eb4c013b3ba0
|
[
"MIT"
] | 2
|
2021-03-24T06:26:11.000Z
|
2021-04-18T15:55:59.000Z
|
sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/databoxedge/v2019_03_01/aio/operations/_bandwidth_schedules_operations.py
|
GoWang/azure-sdk-for-python
|
f241e3734a50953c2a37c10d2d84eb4c013b3ba0
|
[
"MIT"
] | 2
|
2021-11-03T06:10:36.000Z
|
2021-12-01T06:29:39.000Z
|
sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/databoxedge/v2019_03_01/aio/operations/_bandwidth_schedules_operations.py
|
GoWang/azure-sdk-for-python
|
f241e3734a50953c2a37c10d2d84eb4c013b3ba0
|
[
"MIT"
] | 1
|
2021-05-19T02:55:10.000Z
|
2021-05-19T02:55:10.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BandwidthSchedulesOperations:
"""BandwidthSchedulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.databoxedge.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_data_box_edge_device(
self,
device_name: str,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.BandwidthSchedulesList"]:
"""Gets all the bandwidth schedules for a data box edge/gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BandwidthSchedulesList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databoxedge.models.BandwidthSchedulesList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BandwidthSchedulesList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_data_box_edge_device.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BandwidthSchedulesList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_data_box_edge_device.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/bandwidthSchedules'} # type: ignore
async def get(
self,
device_name: str,
name: str,
resource_group_name: str,
**kwargs
) -> "_models.BandwidthSchedule":
"""Gets the properties of the specified bandwidth schedule.
:param device_name: The device name.
:type device_name: str
:param name: The bandwidth schedule name.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BandwidthSchedule, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.models.BandwidthSchedule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BandwidthSchedule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BandwidthSchedule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/bandwidthSchedules/{name}'} # type: ignore
async def _create_or_update_initial(
self,
device_name: str,
name: str,
resource_group_name: str,
parameters: "_models.BandwidthSchedule",
**kwargs
) -> Optional["_models.BandwidthSchedule"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BandwidthSchedule"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'BandwidthSchedule')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BandwidthSchedule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/bandwidthSchedules/{name}'} # type: ignore
async def begin_create_or_update(
self,
device_name: str,
name: str,
resource_group_name: str,
parameters: "_models.BandwidthSchedule",
**kwargs
) -> AsyncLROPoller["_models.BandwidthSchedule"]:
"""Creates or updates a bandwidth schedule.
:param device_name: The device name.
:type device_name: str
:param name: The bandwidth schedule name which needs to be added/updated.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param parameters: The bandwidth schedule to be added or updated.
:type parameters: ~azure.mgmt.databoxedge.models.BandwidthSchedule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BandwidthSchedule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databoxedge.models.BandwidthSchedule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BandwidthSchedule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
device_name=device_name,
name=name,
resource_group_name=resource_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BandwidthSchedule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/bandwidthSchedules/{name}'} # type: ignore
async def _delete_initial(
self,
device_name: str,
name: str,
resource_group_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/bandwidthSchedules/{name}'} # type: ignore
async def begin_delete(
self,
device_name: str,
name: str,
resource_group_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified bandwidth schedule.
:param device_name: The device name.
:type device_name: str
:param name: The bandwidth schedule name.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
device_name=device_name,
name=name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/bandwidthSchedules/{name}'} # type: ignore
| 48.692308
| 224
| 0.665087
|
bbae1d39c7591eeb0a2994eec8875c86034b20c2
| 3,983
|
py
|
Python
|
packages/meta/setup.py
|
lesperry/Metagenomics
|
a1d8b7d96b32ab83cebe513e889b6ef82f7c1dd6
|
[
"CC-BY-3.0"
] | null | null | null |
packages/meta/setup.py
|
lesperry/Metagenomics
|
a1d8b7d96b32ab83cebe513e889b6ef82f7c1dd6
|
[
"CC-BY-3.0"
] | 2
|
2020-08-19T18:14:59.000Z
|
2020-08-20T01:19:12.000Z
|
packages/meta/setup.py
|
lesperry/Metagenomics
|
a1d8b7d96b32ab83cebe513e889b6ef82f7c1dd6
|
[
"CC-BY-3.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import ast
import os
import re
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
SOURCE_DIR = "galaxy"
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('%s/project_galaxy_meta.py' % SOURCE_DIR, 'rb') as f:
init_contents = f.read().decode('utf-8')
def get_var(var_name):
pattern = re.compile(r'%s\s+=\s+(.*)' % var_name)
match = pattern.search(init_contents).group(1)
return str(ast.literal_eval(match))
version = get_var("__version__")
PROJECT_NAME = get_var("PROJECT_NAME")
PROJECT_URL = get_var("PROJECT_URL")
PROJECT_AUTHOR = get_var("PROJECT_AUTHOR")
PROJECT_EMAIL = get_var("PROJECT_EMAIL")
PROJECT_DESCRIPTION = get_var("PROJECT_DESCRIPTION")
TEST_DIR = 'tests'
PACKAGES = []
ENTRY_POINTS = '''
[console_scripts]
'''
PACKAGE_DATA = {
# Be sure to update MANIFEST.in for source dist.
}
PACKAGE_DIR = {
SOURCE_DIR: SOURCE_DIR,
}
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
if os.path.exists("requirements.txt"):
requirements = open("requirements.txt").read().split("\n")
else:
# In tox, it will cover them anyway.
requirements = []
test_requirements = open("test-requirements.txt").read().split("\n")
# TODO:
# package_data={
# 'galaxy': [
# 'config/sample/*',
# 'exceptions/error_codes.json',
# 'datatypes/converters/*.xml',
# 'datatypes/display_applications/configs/*/*.xml',
# 'datatypes/set_metadata_tool.xml',
# 'jobs/runners/util/job_script/CLUSTER_SLOTS_STATEMENT.sh',
# 'jobs/runners/util/job_script/MEMORY_STATEMENT.sh',
# 'jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh',
# 'tools/imp_exp/imp_history_from_archive.xml',
# 'tools/imp_exp/exp_history_to_archive.xml',
# 'tools/data_fetch.xml',
# 'model/migrate/migrate.cfg',
# 'dependencies/*.txt',
# 'util/docutils_template.txt',
# ],
# 'tool_shed': [
# 'galaxy_install/migrate/migrate.cfg',
# 'galaxy_install/migrate/scripts/*',
# 'scripts/bootstrap_tool_shed/parse_run_sh_args.sh',
# 'scripts/bootstrap_tool_shed/bootstrap_tool_shed.sh',
# 'scripts/bootstrap_tool_shed/user_info.xml',
# ],
# },
# package_dir={'': 'lib'},
# include_package_data=True,
setup(
name=PROJECT_NAME,
version=version,
description=PROJECT_DESCRIPTION,
long_description=readme + '\n\n' + history,
long_description_content_type='text/x-rst',
author=PROJECT_AUTHOR,
author_email=PROJECT_EMAIL,
url=PROJECT_URL,
packages=PACKAGES,
entry_points=ENTRY_POINTS,
package_data=PACKAGE_DATA,
package_dir=PACKAGE_DIR,
include_package_data=True,
install_requires=requirements,
extras_require={
'postgresql': ['psycopg2-binary'],
},
license="AFL",
zip_safe=False,
keywords='galaxy',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'License :: OSI Approved :: Academic Free License (AFL)',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Software Development',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Testing',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
test_suite=TEST_DIR,
tests_require=test_requirements
)
| 31.117188
| 73
| 0.631936
|
e4263d213b40f27791b51277ed6a197f2e76b29b
| 1,158
|
py
|
Python
|
app/covidbed/repository/user.py
|
opencovid19-fr/coordination-dispo-lits
|
d252df23f570f6a83a5c6af36056ed5f2cf02fe0
|
[
"MIT"
] | 2
|
2020-04-22T08:43:00.000Z
|
2020-05-03T17:46:16.000Z
|
app/covidbed/repository/user.py
|
dbaelipro/coordination-dispo-lits
|
d252df23f570f6a83a5c6af36056ed5f2cf02fe0
|
[
"MIT"
] | 8
|
2020-04-07T16:36:51.000Z
|
2020-04-26T10:34:26.000Z
|
app/covidbed/repository/user.py
|
dbaelipro/coordination-dispo-lits
|
d252df23f570f6a83a5c6af36056ed5f2cf02fe0
|
[
"MIT"
] | 4
|
2020-04-18T22:53:33.000Z
|
2021-06-04T09:31:13.000Z
|
from covidbed.model import User, Organization, Platform, FinessEtablissement, Address, Company, OrganizationType, Region
from covidbed.repository.orga import create_organization
def find_users():
return [user.json for user in User.query]
def get_user_by_id(_id):
"""
:param _id:
:return:
"""
return User.query.filter_by(id=_id).first()
def create_user(params, organization=None, platform=None):
"""
:param params:
:return:
"""
assert isinstance(params, dict)
assert organization is None or isinstance(organization, dict)
assert platform is None or isinstance(platform, dict)
if organization:
org = create_organization(**organization)
elif platform:
org = Platform(**platform)
org.save()
else:
org = None
user = User(organization=org, **params)
user.save()
return user
def get_user_by_email(email):
return User.query.filter_by(email=email).first()
def get_or_create_region(**kwargs):
reg = Region.query.filter(Region.code == kwargs["code"]).first()
if not reg:
reg = Region(**kwargs)
reg.save()
return reg
| 23.16
| 120
| 0.668394
|
fa8d8e6d91f50edca9f0574236e634ed27cbdb12
| 30,454
|
py
|
Python
|
synapse/rest/admin/users.py
|
jdreichmann/synapse
|
6fde6aa9c02d35e0a908437ea49b275df9b58427
|
[
"Apache-2.0"
] | 1
|
2020-11-04T14:12:27.000Z
|
2020-11-04T14:12:27.000Z
|
synapse/rest/admin/users.py
|
jdreichmann/synapse
|
6fde6aa9c02d35e0a908437ea49b275df9b58427
|
[
"Apache-2.0"
] | null | null | null |
synapse/rest/admin/users.py
|
jdreichmann/synapse
|
6fde6aa9c02d35e0a908437ea49b275df9b58427
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import hmac
import logging
from http import HTTPStatus
from typing import TYPE_CHECKING, Tuple
from synapse.api.constants import UserTypes
from synapse.api.errors import Codes, NotFoundError, SynapseError
from synapse.http.servlet import (
RestServlet,
assert_params_in_dict,
parse_boolean,
parse_integer,
parse_json_object_from_request,
parse_string,
)
from synapse.http.site import SynapseRequest
from synapse.rest.admin._base import (
admin_patterns,
assert_requester_is_admin,
assert_user_is_admin,
historical_admin_path_patterns,
)
from synapse.types import JsonDict, UserID
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
_GET_PUSHERS_ALLOWED_KEYS = {
"app_display_name",
"app_id",
"data",
"device_display_name",
"kind",
"lang",
"profile_tag",
"pushkey",
}
class UsersRestServlet(RestServlet):
PATTERNS = historical_admin_path_patterns("/users/(?P<user_id>[^/]*)$")
def __init__(self, hs):
self.hs = hs
self.store = hs.get_datastore()
self.auth = hs.get_auth()
self.admin_handler = hs.get_admin_handler()
async def on_GET(self, request, user_id):
target_user = UserID.from_string(user_id)
await assert_requester_is_admin(self.auth, request)
if not self.hs.is_mine(target_user):
raise SynapseError(400, "Can only users a local user")
ret = await self.store.get_users()
return 200, ret
class UsersRestServletV2(RestServlet):
PATTERNS = admin_patterns("/users$", "v2")
"""Get request to list all local users.
This needs user to have administrator access in Synapse.
GET /_synapse/admin/v2/users?from=0&limit=10&guests=false
returns:
200 OK with list of users if success otherwise an error.
The parameters `from` and `limit` are required only for pagination.
By default, a `limit` of 100 is used.
The parameter `user_id` can be used to filter by user id.
The parameter `name` can be used to filter by user id or display name.
The parameter `guests` can be used to exclude guest users.
The parameter `deactivated` can be used to include deactivated users.
"""
def __init__(self, hs):
self.hs = hs
self.store = hs.get_datastore()
self.auth = hs.get_auth()
self.admin_handler = hs.get_admin_handler()
async def on_GET(self, request):
await assert_requester_is_admin(self.auth, request)
start = parse_integer(request, "from", default=0)
limit = parse_integer(request, "limit", default=100)
user_id = parse_string(request, "user_id", default=None)
name = parse_string(request, "name", default=None)
guests = parse_boolean(request, "guests", default=True)
deactivated = parse_boolean(request, "deactivated", default=False)
users, total = await self.store.get_users_paginate(
start, limit, user_id, name, guests, deactivated
)
ret = {"users": users, "total": total}
if len(users) >= limit:
ret["next_token"] = str(start + len(users))
return 200, ret
class UserRestServletV2(RestServlet):
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]+)$", "v2")
"""Get request to list user details.
This needs user to have administrator access in Synapse.
GET /_synapse/admin/v2/users/<user_id>
returns:
200 OK with user details if success otherwise an error.
Put request to allow an administrator to add or modify a user.
This needs user to have administrator access in Synapse.
We use PUT instead of POST since we already know the id of the user
object to create. POST could be used to create guests.
PUT /_synapse/admin/v2/users/<user_id>
{
"password": "secret",
"displayname": "User"
}
returns:
201 OK with new user object if user was created or
200 OK with modified user object if user was modified
otherwise an error.
"""
def __init__(self, hs):
self.hs = hs
self.auth = hs.get_auth()
self.admin_handler = hs.get_admin_handler()
self.store = hs.get_datastore()
self.auth_handler = hs.get_auth_handler()
self.profile_handler = hs.get_profile_handler()
self.set_password_handler = hs.get_set_password_handler()
self.deactivate_account_handler = hs.get_deactivate_account_handler()
self.registration_handler = hs.get_registration_handler()
self.pusher_pool = hs.get_pusherpool()
async def on_GET(self, request, user_id):
await assert_requester_is_admin(self.auth, request)
target_user = UserID.from_string(user_id)
if not self.hs.is_mine(target_user):
raise SynapseError(400, "Can only lookup local users")
ret = await self.admin_handler.get_user(target_user)
if not ret:
raise NotFoundError("User not found")
return 200, ret
async def on_PUT(self, request, user_id):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
target_user = UserID.from_string(user_id)
body = parse_json_object_from_request(request)
if not self.hs.is_mine(target_user):
raise SynapseError(400, "This endpoint can only be used with local users")
user = await self.admin_handler.get_user(target_user)
user_id = target_user.to_string()
if user: # modify user
if "displayname" in body:
await self.profile_handler.set_displayname(
target_user, requester, body["displayname"], True
)
if "threepids" in body:
# check for required parameters for each threepid
for threepid in body["threepids"]:
assert_params_in_dict(threepid, ["medium", "address"])
# remove old threepids from user
threepids = await self.store.user_get_threepids(user_id)
for threepid in threepids:
try:
await self.auth_handler.delete_threepid(
user_id, threepid["medium"], threepid["address"], None
)
except Exception:
logger.exception("Failed to remove threepids")
raise SynapseError(500, "Failed to remove threepids")
# add new threepids to user
current_time = self.hs.get_clock().time_msec()
for threepid in body["threepids"]:
await self.auth_handler.add_threepid(
user_id, threepid["medium"], threepid["address"], current_time
)
if "avatar_url" in body and type(body["avatar_url"]) == str:
await self.profile_handler.set_avatar_url(
target_user, requester, body["avatar_url"], True
)
if "admin" in body:
set_admin_to = bool(body["admin"])
if set_admin_to != user["admin"]:
auth_user = requester.user
if target_user == auth_user and not set_admin_to:
raise SynapseError(400, "You may not demote yourself.")
await self.store.set_server_admin(target_user, set_admin_to)
if "password" in body:
if not isinstance(body["password"], str) or len(body["password"]) > 512:
raise SynapseError(400, "Invalid password")
else:
new_password = body["password"]
logout_devices = True
new_password_hash = await self.auth_handler.hash(new_password)
await self.set_password_handler.set_password(
target_user.to_string(),
new_password_hash,
logout_devices,
requester,
)
if "deactivated" in body:
deactivate = body["deactivated"]
if not isinstance(deactivate, bool):
raise SynapseError(
400, "'deactivated' parameter is not of type boolean"
)
if deactivate and not user["deactivated"]:
await self.deactivate_account_handler.deactivate_account(
target_user.to_string(), False
)
elif not deactivate and user["deactivated"]:
if "password" not in body:
raise SynapseError(
400, "Must provide a password to re-activate an account."
)
await self.deactivate_account_handler.activate_account(
target_user.to_string()
)
user = await self.admin_handler.get_user(target_user)
return 200, user
else: # create user
password = body.get("password")
password_hash = None
if password is not None:
if not isinstance(password, str) or len(password) > 512:
raise SynapseError(400, "Invalid password")
password_hash = await self.auth_handler.hash(password)
admin = body.get("admin", None)
user_type = body.get("user_type", None)
displayname = body.get("displayname", None)
if user_type is not None and user_type not in UserTypes.ALL_USER_TYPES:
raise SynapseError(400, "Invalid user type")
user_id = await self.registration_handler.register_user(
localpart=target_user.localpart,
password_hash=password_hash,
admin=bool(admin),
default_display_name=displayname,
user_type=user_type,
by_admin=True,
)
if "threepids" in body:
# check for required parameters for each threepid
for threepid in body["threepids"]:
assert_params_in_dict(threepid, ["medium", "address"])
current_time = self.hs.get_clock().time_msec()
for threepid in body["threepids"]:
await self.auth_handler.add_threepid(
user_id, threepid["medium"], threepid["address"], current_time
)
if (
self.hs.config.email_enable_notifs
and self.hs.config.email_notif_for_new_users
):
await self.pusher_pool.add_pusher(
user_id=user_id,
access_token=None,
kind="email",
app_id="m.email",
app_display_name="Email Notifications",
device_display_name=threepid["address"],
pushkey=threepid["address"],
lang=None, # We don't know a user's language here
data={},
)
if "avatar_url" in body and type(body["avatar_url"]) == str:
await self.profile_handler.set_avatar_url(
user_id, requester, body["avatar_url"], True
)
ret = await self.admin_handler.get_user(target_user)
return 201, ret
class UserRegisterServlet(RestServlet):
"""
Attributes:
NONCE_TIMEOUT (int): Seconds until a generated nonce won't be accepted
nonces (dict[str, int]): The nonces that we will accept. A dict of
nonce to the time it was generated, in int seconds.
"""
PATTERNS = historical_admin_path_patterns("/register")
NONCE_TIMEOUT = 60
def __init__(self, hs):
self.auth_handler = hs.get_auth_handler()
self.reactor = hs.get_reactor()
self.nonces = {}
self.hs = hs
def _clear_old_nonces(self):
"""
Clear out old nonces that are older than NONCE_TIMEOUT.
"""
now = int(self.reactor.seconds())
for k, v in list(self.nonces.items()):
if now - v > self.NONCE_TIMEOUT:
del self.nonces[k]
def on_GET(self, request):
"""
Generate a new nonce.
"""
self._clear_old_nonces()
nonce = self.hs.get_secrets().token_hex(64)
self.nonces[nonce] = int(self.reactor.seconds())
return 200, {"nonce": nonce}
async def on_POST(self, request):
self._clear_old_nonces()
if not self.hs.config.registration_shared_secret:
raise SynapseError(400, "Shared secret registration is not enabled")
body = parse_json_object_from_request(request)
if "nonce" not in body:
raise SynapseError(400, "nonce must be specified", errcode=Codes.BAD_JSON)
nonce = body["nonce"]
if nonce not in self.nonces:
raise SynapseError(400, "unrecognised nonce")
# Delete the nonce, so it can't be reused, even if it's invalid
del self.nonces[nonce]
if "username" not in body:
raise SynapseError(
400, "username must be specified", errcode=Codes.BAD_JSON
)
else:
if not isinstance(body["username"], str) or len(body["username"]) > 512:
raise SynapseError(400, "Invalid username")
username = body["username"].encode("utf-8")
if b"\x00" in username:
raise SynapseError(400, "Invalid username")
if "password" not in body:
raise SynapseError(
400, "password must be specified", errcode=Codes.BAD_JSON
)
else:
password = body["password"]
if not isinstance(password, str) or len(password) > 512:
raise SynapseError(400, "Invalid password")
password_bytes = password.encode("utf-8")
if b"\x00" in password_bytes:
raise SynapseError(400, "Invalid password")
password_hash = await self.auth_handler.hash(password)
admin = body.get("admin", None)
user_type = body.get("user_type", None)
displayname = body.get("displayname", None)
if user_type is not None and user_type not in UserTypes.ALL_USER_TYPES:
raise SynapseError(400, "Invalid user type")
got_mac = body["mac"]
want_mac_builder = hmac.new(
key=self.hs.config.registration_shared_secret.encode(),
digestmod=hashlib.sha1,
)
want_mac_builder.update(nonce.encode("utf8"))
want_mac_builder.update(b"\x00")
want_mac_builder.update(username)
want_mac_builder.update(b"\x00")
want_mac_builder.update(password_bytes)
want_mac_builder.update(b"\x00")
want_mac_builder.update(b"admin" if admin else b"notadmin")
if user_type:
want_mac_builder.update(b"\x00")
want_mac_builder.update(user_type.encode("utf8"))
want_mac = want_mac_builder.hexdigest()
if not hmac.compare_digest(want_mac.encode("ascii"), got_mac.encode("ascii")):
raise SynapseError(403, "HMAC incorrect")
# Reuse the parts of RegisterRestServlet to reduce code duplication
from synapse.rest.client.v2_alpha.register import RegisterRestServlet
register = RegisterRestServlet(self.hs)
user_id = await register.registration_handler.register_user(
localpart=body["username"].lower(),
password_hash=password_hash,
admin=bool(admin),
user_type=user_type,
default_display_name=displayname,
by_admin=True,
)
result = await register._create_registration_details(user_id, body)
return 200, result
class WhoisRestServlet(RestServlet):
PATTERNS = historical_admin_path_patterns("/whois/(?P<user_id>[^/]*)")
def __init__(self, hs):
self.hs = hs
self.auth = hs.get_auth()
self.admin_handler = hs.get_admin_handler()
async def on_GET(self, request, user_id):
target_user = UserID.from_string(user_id)
requester = await self.auth.get_user_by_req(request)
auth_user = requester.user
if target_user != auth_user:
await assert_user_is_admin(self.auth, auth_user)
if not self.hs.is_mine(target_user):
raise SynapseError(400, "Can only whois a local user")
ret = await self.admin_handler.get_whois(target_user)
return 200, ret
class DeactivateAccountRestServlet(RestServlet):
PATTERNS = historical_admin_path_patterns("/deactivate/(?P<target_user_id>[^/]*)")
def __init__(self, hs):
self._deactivate_account_handler = hs.get_deactivate_account_handler()
self.auth = hs.get_auth()
async def on_POST(self, request, target_user_id):
await assert_requester_is_admin(self.auth, request)
body = parse_json_object_from_request(request, allow_empty_body=True)
erase = body.get("erase", False)
if not isinstance(erase, bool):
raise SynapseError(
HTTPStatus.BAD_REQUEST,
"Param 'erase' must be a boolean, if given",
Codes.BAD_JSON,
)
UserID.from_string(target_user_id)
result = await self._deactivate_account_handler.deactivate_account(
target_user_id, erase
)
if result:
id_server_unbind_result = "success"
else:
id_server_unbind_result = "no-support"
return 200, {"id_server_unbind_result": id_server_unbind_result}
class AccountValidityRenewServlet(RestServlet):
PATTERNS = historical_admin_path_patterns("/account_validity/validity$")
def __init__(self, hs):
"""
Args:
hs (synapse.server.HomeServer): server
"""
self.hs = hs
self.account_activity_handler = hs.get_account_validity_handler()
self.auth = hs.get_auth()
async def on_POST(self, request):
await assert_requester_is_admin(self.auth, request)
body = parse_json_object_from_request(request)
if "user_id" not in body:
raise SynapseError(400, "Missing property 'user_id' in the request body")
expiration_ts = await self.account_activity_handler.renew_account_for_user(
body["user_id"],
body.get("expiration_ts"),
not body.get("enable_renewal_emails", True),
)
res = {"expiration_ts": expiration_ts}
return 200, res
class ResetPasswordRestServlet(RestServlet):
"""Post request to allow an administrator reset password for a user.
This needs user to have administrator access in Synapse.
Example:
http://localhost:8008/_synapse/admin/v1/reset_password/
@user:to_reset_password?access_token=admin_access_token
JsonBodyToSend:
{
"new_password": "secret"
}
Returns:
200 OK with empty object if success otherwise an error.
"""
PATTERNS = historical_admin_path_patterns(
"/reset_password/(?P<target_user_id>[^/]*)"
)
def __init__(self, hs):
self.store = hs.get_datastore()
self.hs = hs
self.auth = hs.get_auth()
self.auth_handler = hs.get_auth_handler()
self._set_password_handler = hs.get_set_password_handler()
async def on_POST(self, request, target_user_id):
"""Post request to allow an administrator reset password for a user.
This needs user to have administrator access in Synapse.
"""
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
UserID.from_string(target_user_id)
params = parse_json_object_from_request(request)
assert_params_in_dict(params, ["new_password"])
new_password = params["new_password"]
logout_devices = params.get("logout_devices", True)
new_password_hash = await self.auth_handler.hash(new_password)
await self._set_password_handler.set_password(
target_user_id, new_password_hash, logout_devices, requester
)
return 200, {}
class SearchUsersRestServlet(RestServlet):
"""Get request to search user table for specific users according to
search term.
This needs user to have administrator access in Synapse.
Example:
http://localhost:8008/_synapse/admin/v1/search_users/
@admin:user?access_token=admin_access_token&term=alice
Returns:
200 OK with json object {list[dict[str, Any]], count} or empty object.
"""
PATTERNS = historical_admin_path_patterns("/search_users/(?P<target_user_id>[^/]*)")
def __init__(self, hs):
self.hs = hs
self.store = hs.get_datastore()
self.auth = hs.get_auth()
async def on_GET(self, request, target_user_id):
"""Get request to search user table for specific users according to
search term.
This needs user to have a administrator access in Synapse.
"""
await assert_requester_is_admin(self.auth, request)
target_user = UserID.from_string(target_user_id)
# To allow all users to get the users list
# if not is_admin and target_user != auth_user:
# raise AuthError(403, "You are not a server admin")
if not self.hs.is_mine(target_user):
raise SynapseError(400, "Can only users a local user")
term = parse_string(request, "term", required=True)
logger.info("term: %s ", term)
ret = await self.store.search_users(term)
return 200, ret
class UserAdminServlet(RestServlet):
"""
Get or set whether or not a user is a server administrator.
Note that only local users can be server administrators, and that an
administrator may not demote themselves.
Only server administrators can use this API.
Examples:
* Get
GET /_synapse/admin/v1/users/@nonadmin:example.com/admin
response on success:
{
"admin": false
}
* Set
PUT /_synapse/admin/v1/users/@reivilibre:librepush.net/admin
request body:
{
"admin": true
}
response on success:
{}
"""
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/admin$")
def __init__(self, hs):
self.hs = hs
self.store = hs.get_datastore()
self.auth = hs.get_auth()
async def on_GET(self, request, user_id):
await assert_requester_is_admin(self.auth, request)
target_user = UserID.from_string(user_id)
if not self.hs.is_mine(target_user):
raise SynapseError(400, "Only local users can be admins of this homeserver")
is_admin = await self.store.is_server_admin(target_user)
return 200, {"admin": is_admin}
async def on_PUT(self, request, user_id):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
auth_user = requester.user
target_user = UserID.from_string(user_id)
body = parse_json_object_from_request(request)
assert_params_in_dict(body, ["admin"])
if not self.hs.is_mine(target_user):
raise SynapseError(400, "Only local users can be admins of this homeserver")
set_admin_to = bool(body["admin"])
if target_user == auth_user and not set_admin_to:
raise SynapseError(400, "You may not demote yourself.")
await self.store.set_server_admin(target_user, set_admin_to)
return 200, {}
class UserMembershipRestServlet(RestServlet):
"""
Get room list of an user.
"""
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]+)/joined_rooms$")
def __init__(self, hs):
self.is_mine = hs.is_mine
self.auth = hs.get_auth()
self.store = hs.get_datastore()
async def on_GET(self, request, user_id):
await assert_requester_is_admin(self.auth, request)
if not self.is_mine(UserID.from_string(user_id)):
raise SynapseError(400, "Can only lookup local users")
user = await self.store.get_user_by_id(user_id)
if user is None:
raise NotFoundError("Unknown user")
room_ids = await self.store.get_rooms_for_user(user_id)
ret = {"joined_rooms": list(room_ids), "total": len(room_ids)}
return 200, ret
class PushersRestServlet(RestServlet):
"""
Gets information about all pushers for a specific `user_id`.
Example:
http://localhost:8008/_synapse/admin/v1/users/
@user:server/pushers
Returns:
pushers: Dictionary containing pushers information.
total: Number of pushers in dictonary `pushers`.
"""
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/pushers$")
def __init__(self, hs):
self.is_mine = hs.is_mine
self.store = hs.get_datastore()
self.auth = hs.get_auth()
async def on_GET(
self, request: SynapseRequest, user_id: str
) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self.auth, request)
if not self.is_mine(UserID.from_string(user_id)):
raise SynapseError(400, "Can only lookup local users")
if not await self.store.get_user_by_id(user_id):
raise NotFoundError("User not found")
pushers = await self.store.get_pushers_by_user_id(user_id)
filtered_pushers = [
{k: v for k, v in p.items() if k in _GET_PUSHERS_ALLOWED_KEYS}
for p in pushers
]
return 200, {"pushers": filtered_pushers, "total": len(filtered_pushers)}
class UserMediaRestServlet(RestServlet):
"""
Gets information about all uploaded local media for a specific `user_id`.
Example:
http://localhost:8008/_synapse/admin/v1/users/
@user:server/media
Args:
The parameters `from` and `limit` are required for pagination.
By default, a `limit` of 100 is used.
Returns:
A list of media and an integer representing the total number of
media that exist given for this user
"""
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]+)/media$")
def __init__(self, hs):
self.is_mine = hs.is_mine
self.auth = hs.get_auth()
self.store = hs.get_datastore()
async def on_GET(
self, request: SynapseRequest, user_id: str
) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self.auth, request)
if not self.is_mine(UserID.from_string(user_id)):
raise SynapseError(400, "Can only lookup local users")
user = await self.store.get_user_by_id(user_id)
if user is None:
raise NotFoundError("Unknown user")
start = parse_integer(request, "from", default=0)
limit = parse_integer(request, "limit", default=100)
if start < 0:
raise SynapseError(
400,
"Query parameter from must be a string representing a positive integer.",
errcode=Codes.INVALID_PARAM,
)
if limit < 0:
raise SynapseError(
400,
"Query parameter limit must be a string representing a positive integer.",
errcode=Codes.INVALID_PARAM,
)
media, total = await self.store.get_local_media_by_user_paginate(
start, limit, user_id
)
ret = {"media": media, "total": total}
if (start + limit) < total:
ret["next_token"] = start + len(media)
return 200, ret
class UserTokenRestServlet(RestServlet):
"""An admin API for logging in as a user.
Example:
POST /_synapse/admin/v1/users/@test:example.com/login
{}
200 OK
{
"access_token": "<some_token>"
}
"""
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/login$")
def __init__(self, hs: "HomeServer"):
self.hs = hs
self.store = hs.get_datastore()
self.auth = hs.get_auth()
self.auth_handler = hs.get_auth_handler()
async def on_POST(self, request, user_id):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
auth_user = requester.user
if not self.hs.is_mine_id(user_id):
raise SynapseError(400, "Only local users can be logged in as")
body = parse_json_object_from_request(request, allow_empty_body=True)
valid_until_ms = body.get("valid_until_ms")
if valid_until_ms and not isinstance(valid_until_ms, int):
raise SynapseError(400, "'valid_until_ms' parameter must be an int")
if auth_user.to_string() == user_id:
raise SynapseError(400, "Cannot use admin API to login as self")
token = await self.auth_handler.get_access_token_for_user_id(
user_id=auth_user.to_string(),
device_id=None,
valid_until_ms=valid_until_ms,
puppets_user_id=user_id,
)
return 200, {"access_token": token}
| 34.489241
| 90
| 0.613187
|
e2864b9c1414f6e7c47f2b95b0830c792b025cc6
| 6,230
|
py
|
Python
|
faker/providers/currency/es_ES/__init__.py
|
jeffwright13/faker
|
9192d5143d5f1b832cc0f44b3f7ee89ca28c975a
|
[
"MIT"
] | 3
|
2019-03-06T03:02:25.000Z
|
2021-11-26T07:30:43.000Z
|
faker/providers/currency/es_ES/__init__.py
|
Haytam222/faker
|
9929afc9c9fd4bd75f2ad4b7eb9c132e67e66ce8
|
[
"MIT"
] | 2
|
2021-05-12T06:25:57.000Z
|
2022-03-01T04:16:03.000Z
|
env/lib/python3.9/site-packages/faker/providers/currency/es_ES/__init__.py
|
simotwo/AbileneParadox-ddd
|
c85961efb37aba43c0d99ed1c36d083507e2b2d3
|
[
"MIT"
] | 2
|
2021-09-22T12:47:59.000Z
|
2021-12-10T08:18:23.000Z
|
from .. import Provider as CurrencyProvider
class Provider(CurrencyProvider):
# Format: (code, name)
currencies = (
("AED", "Dírham de los Emiratos Árabes Unidos"),
("AFN", "Afghaní"),
("ALL", "Lek albanés"),
("AMD", "Dram armenio"),
("ANG", "Florín de las Antillas Holandesas"),
("AOA", "Kwanza angoleño"),
("ARS", "Peso argentino"),
("AUD", "Dólar australiano"),
("AWG", "Florín arubeño"),
("AZN", "Manat azerbaiyano"),
("BAM", "Marco bosnioherzegovino"),
("BBD", "Dólar barbadense"),
("BDT", "Taka bangladesí"),
("BGN", "Lev búlgaro"),
("BHD", "Dinar bahreiní"),
("BIF", "Franco burundés"),
("BMD", "Dólar de Bermudas"),
("BND", "Dólar bruneano"),
("BOB", "Boliviano"),
("BRL", "Real brasileño"),
("BSD", "Dólar bahameño"),
("BTN", "Ngultrum butanés"),
("BWP", "Pula de Botswana"),
("BYR", "Rublio bielurruso"),
("BZD", "Dólar beliceño"),
("CAD", "Dólar canadiense"),
("CDF", "Franco congolés"),
("CHF", "Franco suizo"),
("CLP", "Peso chileno"),
("CNY", "Yuan"),
("COP", "Peso colombiano"),
("CRC", "Colón costarricense"),
("CUC", "Peso cubano convertible"),
("CUP", "Peso subano"),
("CVE", "Escudo de Cabo Verde"),
("CZK", "Corona checa"),
("DJF", "Franco yibutiano"),
("DKK", "Corona danesa"),
("DOP", "Peso dominicano"),
("DZD", "Dinar argelino"),
("EGP", "Libra egipcia"),
("ERN", "Nafka"),
("ETB", "Bir de Etiopía"),
("EUR", "Euro"),
("FJD", "Dólar fiyiano"),
("FKP", "Libra de las islas Falkland"),
("GBP", "Libra esterlina"),
("GEL", "Larí georgiano"),
("GGP", "Libra de Guernsey"),
("GHS", "Cedi"),
("GIP", "Libra de Gibraltar"),
("GMD", "Dalasi"),
("GNF", "Franco guineano"),
("GTQ", "Quetzal guatemalteco"),
("GYD", "Dólar guyanés"),
("HKD", "Dólar hongkonés"),
("HNL", "Lempira hondureño"),
("HRK", "Kuna croata"),
("HTG", "Gourde haitiano"),
("HUF", "Forinto húngaro"),
("IDR", "Rupia indonesia"),
("ILS", "Séquel israelí"),
("NIS", "Nuevo Séquel israelí"),
("IMP", "Libra manesa"),
("INR", "Rupia india"),
("IQD", "Dinar iraquí"),
("IRR", "Rial iraní"),
("ISK", "Corona islandesa"),
("JEP", "Libra de Jersey"),
("JMD", "Dólar jamaicano"),
("JOD", "Dinar jordano"),
("JPY", "Yen japonés"),
("KES", "Chelín keniano"),
("KGS", "Som kirguís"),
("KHR", "Riel camboyano"),
("KMF", "Franco comorense"),
("KPW", "Won norcoreano"),
("KRW", "Krahn Occidental"),
("KWD", "Dinar kuwaití"),
("KYD", "Dólar de las islas Cayman"),
("KZT", "Tenge kazako"),
("LAK", "Kip laosiano"),
("LBP", "Libra libanesa"),
("LKR", "Rupia esrilanquesa"),
("LRD", "Dólar liberiano"),
("LSL", "Loti lesothense"),
("LTL", "Litas lituana"),
("LYD", "Dinar libio"),
("MAD", "Dirham marroquí"),
("MDL", "Leu moldavo"),
("MGA", "Ariary malgache"),
("MKD", "Denar normacedonio"),
("MMK", "Kyat birmano"),
("MNT", "Tugrik mongol"),
("MOP", "Pataca macaense"),
("MRO", "Ouguiya mauritano"),
("MUR", "Rupia mauritana"),
("MVR", "Rupia de Maldivas"),
("MWK", "Kwacha malauí"),
("MXN", "Peso mexicano"),
("MYR", "Ringgit"),
("MZN", "Metical mozambiqueño"),
("NAD", "Dólar namibio"),
("NGN", "Naira nigeriano"),
("NIO", "Córdoba nicaragüense"),
("NOK", "Corona noruega"),
("NPR", "Rupia nepalí"),
("NZD", "Dólar neozelandés"),
("OMR", "Rial omaní"),
("PAB", "Balboa panameño"),
("PEN", "Sol peruano"),
("PGK", "Kina"),
("PHP", "Peso filipino"),
("PKR", "Rupia pakistaní"),
("PLN", "Złoty polaco"),
("PYG", "Guaraní paraguayo"),
("QAR", "Riyal catarí"),
("RON", "Leu rumano"),
("RSD", "Dinar serbio"),
("RUB", "Rublo ruso"),
("RWF", "Franco ruandés"),
("SAR", "Riyal saudí"),
("SBD", "Dólar de las islas Solomon"),
("SCR", "Rupia seychellense"),
("SDG", "Libra sudanesa"),
("SEK", "Corona sueca"),
("SGD", "Dólar de Singapur"),
("SHP", "Libra de Santa Elena"),
("SLL", "Leona"),
("SOS", "Chelín somalí"),
("SPL", "Luigino"),
("SRD", "Dólar surinamés"),
("STD", "Dobra santotomense"),
("SVC", "Colón salvadoreño"),
("SYP", "Libra siria"),
("SZL", "Lilangeni"),
("THB", "Baht tailandés"),
("TJS", "Somoni tayiko"),
("TMT", "Manat turcomano"),
("TND", "Dinar tunecino"),
("TOP", "Pa'anga tongano"),
("TRY", "Lira turca"),
("TTD", "Dólar de Trinidad and Tobago"),
("TVD", "Dólar tuvaluano"),
("TWD", "Nuevo dólar taiwanés"),
("TZS", "Chelín tanzano"),
("UAH", "Grivna ucraniano"),
("UGX", "Chelín ugandés"),
("USD", "Dólar de Estados Unidos"),
("UYU", "Peso uruguayo"),
("UZS", "Soʻm Uzbekistani"),
("VEF", "Bolívar venezolano"),
("VND", "Đồng vietnamita"),
("VUV", "Vanuatu vatu"),
("WST", "Tālā samoano"),
("XAF", "Franco centro africano"),
("XCD", "Dólar del Caribe Oriental"),
("XDR", "Derechos especiales de giro"),
("XOF", "Franco de África occidental"),
("XPF", "Franco CFP"),
("YER", "Rial yemení"),
("ZAR", "Rand sudafricano"),
("ZMW", "Kwacha zambiano"),
("ZWD", "Dólar zimbabuense"),
)
price_formats = ["#,##", "%#,##", "%##,##", "%.###,##", "%#.###,##"]
def pricetag(self):
return (
self.numerify(self.random_element(self.price_formats))
+ "\N{no-break space}\N{euro sign}"
)
| 34.611111
| 72
| 0.46565
|
672d594e627779008d915363c19133bfdd508ad9
| 251
|
py
|
Python
|
learningpythonthehardway/ex15.py
|
stephaneAG/Python_tests
|
dc0a8819b4f49f50f17b3ffcf009c082535e1dbe
|
[
"MIT"
] | null | null | null |
learningpythonthehardway/ex15.py
|
stephaneAG/Python_tests
|
dc0a8819b4f49f50f17b3ffcf009c082535e1dbe
|
[
"MIT"
] | null | null | null |
learningpythonthehardway/ex15.py
|
stephaneAG/Python_tests
|
dc0a8819b4f49f50f17b3ffcf009c082535e1dbe
|
[
"MIT"
] | null | null | null |
# to run : ' python ex15.py ex15_sample.txt '
from sys import argv
script, filename = argv
txt = open(filename) # creates a file object
print "Here, the content of the file %r:" % filename
print txt.read() # read from the file object
txt.close()
| 19.307692
| 52
| 0.705179
|
5923d6372f049b3f7bb5eeef4b9b305b5b332995
| 790
|
py
|
Python
|
rootCA.py
|
MNedashkivskyi/text-chat-system
|
3363df44afde24f1b0f1b5fd2f50de5057fbda1d
|
[
"MIT"
] | null | null | null |
rootCA.py
|
MNedashkivskyi/text-chat-system
|
3363df44afde24f1b0f1b5fd2f50de5057fbda1d
|
[
"MIT"
] | null | null | null |
rootCA.py
|
MNedashkivskyi/text-chat-system
|
3363df44afde24f1b0f1b5fd2f50de5057fbda1d
|
[
"MIT"
] | null | null | null |
from OpenSSL import crypto
import os
class RootCA:
def __init__(self, ca_path, crl_path):
self.store = crypto.X509Store()
self.store.set_flags(crypto.X509StoreFlags.CRL_CHECK)
if os.path.exists(ca_path):
self.store.add_cert(crypto.load_certificate(crypto.FILETYPE_PEM, open(ca_path, "rb").read()))
if os.path.exists(crl_path):
self.store.add_crl(crypto.load_crl(crypto.FILETYPE_PEM, open(crl_path, "rb").read()))
def verify_cert(self, cert):
cert = crypto.load_certificate(crypto.FILETYPE_ASN1, cert)
context = crypto.X509StoreContext(self.store, cert)
try:
context.verify_certificate()
return True
except crypto.X509StoreContextError:
return False
| 28.214286
| 105
| 0.659494
|
5b24e7ed2febbcfeb2a897ec9a929f926a2ebde3
| 6,927
|
py
|
Python
|
sarveshcli/packages/reminder.py
|
sonkarsr/Tutor
|
0a70bdccefa96c6d83653b563693585bd6009cff
|
[
"MIT"
] | null | null | null |
sarveshcli/packages/reminder.py
|
sonkarsr/Tutor
|
0a70bdccefa96c6d83653b563693585bd6009cff
|
[
"MIT"
] | null | null | null |
sarveshcli/packages/reminder.py
|
sonkarsr/Tutor
|
0a70bdccefa96c6d83653b563693585bd6009cff
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from datetime import datetime as dt
from uuid import uuid4
from threading import Timer
from fileHandler import write_file, read_file, str2date
from utilities.lexicalSimilarity import score_sentence, compare_sentence
from utilities.textParser import parse_number, parse_date
from utilities.GeneralUtilities import (
error, info, MACOS, IS_MACOS, unsupported
)
if not IS_MACOS:
import notify2
def sort(data):
"""
Sort list of reminders by time (oldest first).
"""
return sorted(data, key=lambda k: (k['time']))
def find_reminder(string):
"""
Find reminder by name.
Search for the given name in the reminderList. A match is determined by similarity
between request and the available reminder names.
"""
nameList = [k['name'] for k in reminderList['items']]
if not len(nameList):
return (-1, [])
index, score, indexList = compare_sentence(nameList, string)
if score < 1.0 and not reminderList['items'][index]['hidden']:
return (index, indexList)
return (-1, [])
def showAlarm(notification, name):
info(name)
notification.show()
def showNotification(name, body):
"""
Show a notification immediately.
"""
notify2.Notification(name, body).show()
def addReminder(name, time, uuid, body='', urgency=0, hidden=True):
"""
Queue reminder.
Show notification at the specified time. With the given name as title and an optional body
for further information.
The mandatory is used to identify the reminder and remove it with removeReminder().
If the reminder should show up in the list printed by 'remind print' hidden (default: True)
should be set to false. In this case the reminder is requeued at startup. If reminders are
used e.g. with a todo list for due dates, hidden should probably be set to true so that the
list is not cluttered with automatically created data.
If the reminder needs a different priority, it can be set with urgency to critical (=2),
high (=1) or normal (=0, default).
"""
waitTime = time - dt.now()
n = notify2.Notification(name, body)
n.set_urgency(urgency)
timerList[uuid] = Timer(waitTime.total_seconds(), showAlarm, [n, name])
timerList[uuid].start()
newItem = {'name': name, 'time': time, 'hidden': hidden, 'uuid': uuid}
reminderList['items'].append(newItem)
reminderList['items'] = sort(reminderList['items'])
write_file("reminderlist.txt", reminderList)
def removeReminder(uuid):
"""
Remove and cancel previously added reminder identified by the given uuid.
"""
if uuid in timerList:
timerList[uuid].cancel()
timerList.pop(uuid)
for index, e in enumerate(reminderList['items']):
if uuid == e['uuid']:
reminderList['items'].remove(reminderList['items'][index])
break
write_file("reminderlist.txt", reminderList)
actions = {}
def addAction(function, trigger=[], minArgs=0):
"""
Add a new action to the list of all available actions.
:param function: Local function name that should be called when matched
:param trigger: List of trigger words or sentences
:param minArgs: Minimum number of arguments needed for given function
"""
actions[function] = {'trigger': trigger, 'minArgs': minArgs}
addAction("handlerAdd", ["add", "new", "create"], minArgs=1)
def handler_add(data):
skip, time = parse_date(data)
if skip:
addReminder(
name=" ".join(data.split()[skip:]), time=time, hidden=False, uuid=uuid4().hex)
addAction("handlerRemove", ["remove", "delete", "destroy"], minArgs=1)
def handler_remove(data):
skip, number = parse_number(data)
if skip:
index = number - 1
else:
index, index_list = find_reminder(data)
if 0 <= index < len(reminderList['items']):
info("Removed reminder: \"{0}\"".format(
reminderList['items'][index]['name']))
removeReminder(reminderList['items'][index]['uuid'])
else:
error("Could not find selected reminder")
addAction("handlerList", ["list", "print", "show"])
def handler_list(data):
count = 0
for index, en in enumerate(reminderList['items']):
if not en['hidden']:
print("<{0}> {2}: {1}".format(count + 1, en['time'], en['name']))
count += 1
if count == 0:
info("Reminder list is empty. Add a new entry with 'remind add <time> <name>'")
addAction("handlerClear", ["clear"])
def handler_clear(data):
reminderList['items'] = [k for k in reminderList['items'] if k['hidden']]
write_file("reminderlist.txt", reminderList)
@unsupported(platform=MACOS)
def reminder_handler(data):
"""
Handle the command string for reminders.
"""
indices = []
score = 100
action = 0
min_args = 0
# Select the best trigger match from the actions list
for key in actions:
found_match = False
for trigger in actions[key]['trigger']:
new_score, index_list = score_sentence(data, trigger, distance_penalty=0.5, additional_target_penalty=0,
word_match_penalty=0.5)
if found_match and len(index_list) > len(indices):
# A match for this action was already found.
# But this trigger matches more words.
indices = index_list
if new_score < score:
if not found_match:
indices = index_list
min_args = actions[key]['minArgs']
found_match = True
score = new_score
action = key
if not action:
return
data = data.split()
for j in sorted(indices, reverse=True):
del data[j]
if len(data) < min_args:
error("Not enough arguments for specified command {0}".format(action))
return
data = " ".join(data)
globals()[action](data)
@unsupported(platform=MACOS, silent=True)
def reminder_quit():
"""
This function has to be called when shutting down. It terminates all waiting threads.
"""
try:
for index, el in timerList.iteritems():
el.cancel()
except:
for index, el in timerList.items():
el.cancel()
if not IS_MACOS:
timerList = {}
reminderList = read_file("reminderlist.txt", {'items': []})
reminderList['items'] = sort(reminderList['items'])
reminderList['items'] = [
i for i in reminderList['items'] if not i['hidden']]
notify2.init("Jarvis")
for e in reminderList['items']:
e['time'] = str2date(e['time'])
waitTime = e['time'] - dt.now()
n = notify2.Notification(e['name'])
n.set_urgency(0)
timerList[e['uuid']] = Timer(
waitTime.total_seconds(), showAlarm, [n, e['name']])
timerList[e['uuid']].start()
| 31.343891
| 116
| 0.63072
|
11519e2067a7d34d10186815c4553f60764dbb33
| 3,384
|
py
|
Python
|
models/modules/custom2.py
|
JRC1995/CapsuleRoutingEncoders
|
a99c666f33a2ea98f60b944df19d699ee3f3b009
|
[
"MIT"
] | null | null | null |
models/modules/custom2.py
|
JRC1995/CapsuleRoutingEncoders
|
a99c666f33a2ea98f60b944df19d699ee3f3b009
|
[
"MIT"
] | null | null | null |
models/modules/custom2.py
|
JRC1995/CapsuleRoutingEncoders
|
a99c666f33a2ea98f60b944df19d699ee3f3b009
|
[
"MIT"
] | null | null | null |
import torch as T
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import numpy as np
import math
class routing(nn.Module):
def __init__(self, D, n_in, n_out, in_dim, out_dim, device):
super(routing, self).__init__()
self.D = D
self.in_dim = in_dim
self.out_dim = out_dim
self.n_in = n_in
self.n_out = n_out
self.epsilon = 1.0e-8
self.pad_inf = T.tensor(float("-inf")).to(device)
self.zeros = T.tensor(0.0).to(device)
self.Wcap = nn.Parameter(T.randn(n_in, D, in_dim).float().to(device))
self.Bcap = nn.Parameter(T.zeros(n_in, in_dim).float().to(device))
nn.init.xavier_uniform_(self.Wcap.data)
self.bij = T.zeros(1, self.n_in, self.n_out, 1).float().to(device)
self.Wvotes = nn.Parameter(T.randn(n_in, n_out, in_dim, out_dim))
self.Bvotes = nn.Parameter(T.zeros(n_in, n_out, 1, out_dim))
self.score = nn.Linear(out_dim, 1)
self.alpha_score = nn.Linear(out_dim, 1)
self.fe1 = nn.Linear(self.out_dim, self.out_dim)
self.fe2 = nn.Linear(self.out_dim, self.out_dim)
self.fn1 = nn.Linear(self.out_dim, self.out_dim)
self.fn2 = nn.Linear(self.out_dim, self.out_dim)
nn.init.xavier_uniform_(self.Wvotes.data)
def forward(self, x, mask, iters=3):
N, n_in, D = x.size()
attention_mask = T.where(mask == float(0),
self.pad_inf,
self.zeros).view(N, n_in, 1, 1)
Wcap = self.Wcap.view(1, self.n_in, D, self.in_dim)
Bcap = self.Bcap.view(1, self.n_in, self.in_dim)
x = F.gelu(T.matmul(x, Wcap) + Bcap)
x = x.view(N, n_in, self.in_dim)
x = x.view(N, n_in, 1, self.in_dim)
if self.n_in == 1:
Wvotes = self.Wvotes.repeat(n_in, 1, 1, 1)
else:
Wvotes = self.Wvotes
votes_ij = T.einsum('ijdh,...icd->...ijch', Wvotes, x) + self.Bvotes
in_caps = n_in
out_caps = self.n_out
votes_ij = votes_ij.view(N, in_caps, out_caps, self.out_dim)
mask = mask.view(N, in_caps, 1, 1)
votes_ij = votes_ij*mask
fn_votes_ij = self.fn1(votes_ij)
fe_votes_ij = self.fe1(votes_ij)
bij = self.score(votes_ij)
aij = F.softmax(bij, dim=2)*mask
for i in range(iters):
if i != 0:
old_vj = vj.clone()
new_vj = F.gelu(T.sum(aij*votes_ij, dim=1))
alpha = T.sigmoid(self.alpha_score(new_vj))
vj = alpha*new_vj + (1-alpha)*old_vj
else:
vj = F.gelu(T.sum(aij*votes_ij, dim=1))
if i != iters-1:
fe_votes_ij_ = fe_votes_ij.view(N, in_caps, out_caps, self.out_dim)
fn_votes_ij_ = fn_votes_ij.view(N, in_caps, out_caps, self.out_dim)
vj_ = vj.view(N, 1, out_caps, self.out_dim)
E = T.sum(fe_votes_ij_*self.fe2(vj_), dim=-1, keepdim=True)
M = -T.sum(T.abs(fn_votes_ij_-self.fn2(vj_)), dim=-1, keepdim=True)
#E = E - T.mean(E, dim=2, keepdim=True)
#M = M - T.mean(M, dim=2, keepdim=True)
aij = T.tanh(E)*T.sigmoid(M*mask+attention_mask)
vj = vj.view(N, out_caps, self.out_dim)
return vj
| 33.176471
| 83
| 0.559397
|
d0e4e0ed849c84c76009830d68a383cffe6dc6a4
| 1,778
|
py
|
Python
|
allink_core/core_apps/allink_image_svg/cms_plugins.py
|
allink/allink-core
|
cf2727f26192d8dee89d76feb262bc4760f36f5e
|
[
"BSD-3-Clause"
] | 5
|
2017-03-13T08:49:45.000Z
|
2022-03-05T20:05:56.000Z
|
allink_core/core_apps/allink_image_svg/cms_plugins.py
|
allink/allink-core
|
cf2727f26192d8dee89d76feb262bc4760f36f5e
|
[
"BSD-3-Clause"
] | 28
|
2019-10-21T08:32:18.000Z
|
2022-02-10T13:16:38.000Z
|
allink_core/core_apps/allink_image_svg/cms_plugins.py
|
allink/allink-core
|
cf2727f26192d8dee89d76feb262bc4760f36f5e
|
[
"BSD-3-Clause"
] | null | null | null |
from django.utils.translation import ugettext_lazy as _
from django import forms
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from .models import AllinkImageSVGPlugin
from allink_core.core.forms.mixins import AllinkInternalLinkFieldMixin
from allink_core.core.forms.fields import SelectLinkField
class AllinkImageSVGPluginForm(AllinkInternalLinkFieldMixin, forms.ModelForm):
internal_link = SelectLinkField(label='Link Internal', required=False)
class Meta:
model = AllinkImageSVGPlugin
exclude = ('page', 'position', 'placeholder', 'language', 'plugin_type')
def __init__(self, *args, **kwargs):
super(AllinkImageSVGPluginForm, self).__init__(*args, **kwargs)
@plugin_pool.register_plugin
class CMSAllinkImageSVGPlugin(CMSPluginBase):
name = 'SVG Image'
module = 'Generic'
render_template = 'allink_image_svg/content.html'
model = AllinkImageSVGPlugin
form = AllinkImageSVGPluginForm
def get_fieldsets(self, request, obj=None):
fieldsets = [
(None, {
'fields': [
'picture',
('is_inline', 'is_fullwidth')
]
}),
('Link settings', {
'classes': ('collapse',),
'fields': (
'internal_link',
'link_url',
('link_mailto', 'link_phone'),
'link_anchor',
'link_file',
'link_target',
)
}),
]
return fieldsets
def render(self, context, instance, placeholder):
context['instance'] = instance
context['placeholder'] = placeholder
return context
| 30.655172
| 80
| 0.603487
|
c1a50e43e0af4eae0c1e9519cd9a68ad922d9f06
| 2,697
|
py
|
Python
|
scripts/dmhy.py
|
not94/AlfredWorkflow
|
b6dba720c9b1759112d0aa553f58beed0f9daad9
|
[
"MIT"
] | null | null | null |
scripts/dmhy.py
|
not94/AlfredWorkflow
|
b6dba720c9b1759112d0aa553f58beed0f9daad9
|
[
"MIT"
] | null | null | null |
scripts/dmhy.py
|
not94/AlfredWorkflow
|
b6dba720c9b1759112d0aa553f58beed0f9daad9
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
import json
import sys
import os
from urllib.parse import quote
from lib import feedparser
class RSSSpider(object):
# rss url
BASE_URL = "https://www.dmhy.org/topics/rss/rss.xml"
SEARCH_URL = "https://www.dmhy.org/topics/rss/rss.xml?keyword={}"
TEAM_URL = "https://share.dmhy.org/topics/rss/team_id/{}/rss.xml"
CATEGORY_URL = "https://share.dmhy.org/topics/rss/sort_id/{}/rss.xml"
# command enum
COMMAND_KEYWORD = "keyword" # 关键字订阅
COMMAND_TEAM = "team" # 联盟订阅
COMMAND_CATEGORY = "category" # 分类订阅
# command url map
COMMAND_URL_MAP = {
COMMAND_KEYWORD: SEARCH_URL,
COMMAND_TEAM: TEAM_URL,
COMMAND_CATEGORY: CATEGORY_URL
}
def __init__(self, command, arg=None):
self.command = command
self.arg = arg
self._raw = None
def make_complete_url(self):
return self.COMMAND_URL_MAP.\
get(self.command, self.BASE_URL).format(quote(self.arg))
def init_raw_data(self):
if self._raw is None:
url = self.make_complete_url()
self._raw = feedparser.parse(url)
@property
def items(self):
self.init_raw_data()
return self._raw.entries
@staticmethod
def make_title(title):
"""去掉第一个[]"""
begin_marks = ["[", "【"]
end_marks = ["]", "】"]
begin_index = end_index = None
for i, s in enumerate(title):
if s in begin_marks:
begin_index = i
if s in end_marks:
end_index = i
break
if begin_index is not None and end_index is not None:
return title[:begin_index] + title[end_index + 1:]
return title
@staticmethod
def get_item_magnet(item):
for link in item.links:
if link.type == "application/x-bittorrent":
return link.href
return ''
def serialize(self):
return json.dumps(
{
"items": [
{
"title": self.make_title(item.title),
"subtitle": item.title,
"arg": item.link,
"variables": {
"link": item.link,
"magnet": self.get_item_magnet(item)
}
}
for item in self.items
]
},
ensure_ascii=False
)
if __name__ == '__main__':
command = os.environ.get("command", RSSSpider.COMMAND_KEYWORD)
keyword = u"".join(sys.argv[1:])
print(RSSSpider(command, keyword).serialize())
| 28.09375
| 73
| 0.534668
|
5a3b4f707e8b1d013f4b01a0434c903a08371b17
| 1,441
|
py
|
Python
|
recipe/build.py
|
dougalsutherland/cudatoolkit-dev-feedstock
|
4e15984e6801b52e740911f7724951b3fcf6153c
|
[
"BSD-3-Clause"
] | 1
|
2020-07-12T09:18:58.000Z
|
2020-07-12T09:18:58.000Z
|
recipe/build.py
|
dougalsutherland/cudatoolkit-dev-feedstock
|
4e15984e6801b52e740911f7724951b3fcf6153c
|
[
"BSD-3-Clause"
] | null | null | null |
recipe/build.py
|
dougalsutherland/cudatoolkit-dev-feedstock
|
4e15984e6801b52e740911f7724951b3fcf6153c
|
[
"BSD-3-Clause"
] | 1
|
2021-04-17T15:55:18.000Z
|
2021-04-17T15:55:18.000Z
|
from __future__ import print_function
import argparse
import json
import os
import shutil
import stat
from pathlib import Path
def copy_files(src, dst):
def set_chmod(file_name):
# Do a simple chmod +x for a file within python
st = os.stat(file_name)
os.chmod(file_name, st.st_mode | stat.S_IXOTH)
try:
if os.path.isfile(src):
shutil.copy(src, dst)
set_chmod(dst)
except FileExistsError:
pass
def _main(args):
prefix_dir_path = Path(os.environ["PREFIX"])
prefix_bin_dir_path = prefix_dir_path / "bin"
recipe_dir_path = Path(os.environ["RECIPE_DIR"])
scripts_dir_path = recipe_dir_path / "scripts"
shutil.copytree(scripts_dir_path, prefix_dir_path / "scripts")
# Copy cudatoolkit-dev-post-install.py to $PREFIX/bin
src = recipe_dir_path / "cudatoolkit-dev-post-install.py"
dst = prefix_bin_dir_path
copy_files(src, dst)
with open(prefix_bin_dir_path / "cudatoolkit-dev-extra-args.json", "w") as f:
f.write(json.dumps(args))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Build script for cudatoolkit-dev")
parser.add_argument("release", action="store", type=str)
parser.add_argument("version_build", action="store", type=str)
parser.add_argument("driver_version", action="store", type=str)
results = parser.parse_args()
args = vars(results)
_main(args)
| 28.82
| 84
| 0.691881
|
68a0598e12820642f5307fbaebedf80c431b834e
| 395
|
py
|
Python
|
compose/cli/signals.py
|
matthieudelaro/dockernut
|
998f614c6ad018873f3b3aee58841c62e1b160da
|
[
"Apache-2.0"
] | 8,120
|
2016-12-05T06:37:45.000Z
|
2022-03-21T14:45:20.000Z
|
compose/cli/signals.py
|
matthieudelaro/dockernut
|
998f614c6ad018873f3b3aee58841c62e1b160da
|
[
"Apache-2.0"
] | 213
|
2016-12-05T09:57:37.000Z
|
2018-04-05T18:55:14.000Z
|
compose/cli/signals.py
|
matthieudelaro/dockernut
|
998f614c6ad018873f3b3aee58841c62e1b160da
|
[
"Apache-2.0"
] | 1,140
|
2016-12-05T06:50:43.000Z
|
2022-03-23T08:28:32.000Z
|
from __future__ import absolute_import
from __future__ import unicode_literals
import signal
class ShutdownException(Exception):
pass
def shutdown(signal, frame):
raise ShutdownException()
def set_signal_handler(handler):
signal.signal(signal.SIGINT, handler)
signal.signal(signal.SIGTERM, handler)
def set_signal_handler_to_shutdown():
set_signal_handler(shutdown)
| 17.954545
| 42
| 0.789873
|
96c5b6f8cc1f78f23160028a5ae65ef4cc26d8a8
| 2,832
|
py
|
Python
|
main.py
|
jangeador/esp8266
|
27f508a9bcea2912ff86e4cb384e5f1256d9fb2e
|
[
"MIT"
] | null | null | null |
main.py
|
jangeador/esp8266
|
27f508a9bcea2912ff86e4cb384e5f1256d9fb2e
|
[
"MIT"
] | null | null | null |
main.py
|
jangeador/esp8266
|
27f508a9bcea2912ff86e4cb384e5f1256d9fb2e
|
[
"MIT"
] | null | null | null |
import time
import dht
import machine
import network
from umqtt.simple import MQTTClient
from config import CONFIG
from esp8266_i2c_lcd import I2cLcd
client = None
sensor_pin = None
temperature = None
humidity = None
def topic_name(topic):
return b"/".join([b"sensors", CONFIG['client_id'], topic])
def setup_pins():
global sensor_pin
sensor_pin = machine.Pin(CONFIG['sensor_pin'])
def callback(topic, msg):
pass
def connect_and_subscribe():
global client
client = MQTTClient(CONFIG['client_id'], CONFIG['broker'],
user=CONFIG['mqtt_user'],
password=CONFIG['mqtt_password'])
client.set_last_will(topic_name(b'lwt'), 'our lwt')
client.set_callback(callback)
client.connect()
print("Connected to {}".format(CONFIG['broker']))
for topic in (b'config', b'control'):
t = topic_name(topic)
client.subscribe(t)
print("Subscribed to {}".format(t))
def write_to_lcd(msg=["Hello LCD"]):
from machine import I2C
scl = machine.Pin(2)
sda = machine.Pin(5)
i2c = I2C(scl=scl, sda=sda, freq=400000)
# find out the address of the device
devices = i2c.scan()
if len(devices) == 0:
print("no i2c device")
else:
print("i2c devices found: ", len(devices))
for device in devices:
print("Decimal address: ", device, " | Hexa address: ", hex(device))
lcd = I2cLcd(i2c, 0x3f, 4, 20)
lcd.putstr("\n".join(msg))
def check_and_report_temp():
import ujson as json
global temperature, humidity
d = dht.DHT22(sensor_pin)
try:
d.measure()
temperature = (d.temperature() * 9 / 5) + 32
humidity = d.humidity()
th = {'temperature': str(temperature), 'humidity': str(humidity)}
print(th)
client.publish(topic_name(b'DHT'), bytes(json.dumps(th), 'utf-8'))
except OSError as e:
print(e)
def setup():
connect_and_subscribe()
setup_pins()
def main_loop():
i = 0
current_screen = ''
while 1:
client.check_msg()
if i % 60 == 0:
check_and_report_temp()
sta_if = network.WLAN(network.STA_IF)
next_screen = [str(sta_if.ifconfig()[0]),
'{}\u00b0 F'.format(temperature),
'{} % humidity'.format(humidity),
str(CONFIG['client_id'])]
if current_screen != next_screen:
write_to_lcd(msg=next_screen)
current_screen = next_screen
i += 1
time.sleep(1)
def teardown():
try:
client.disconnect()
print("Disconnected.")
except Exception:
print("Couldn't disconnect cleanly.")
if __name__ == '__main__':
# load_config()
setup()
main_loop()
| 24.626087
| 80
| 0.590395
|
07caf64108ce7e3f5f6b6dc727d89f1ab209f0cf
| 13,271
|
py
|
Python
|
log_mito_act/model_306.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
log_mito_act/model_306.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
log_mito_act/model_306.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
# exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('ParpC')
Monomer('Xiap', ['Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd'])
Monomer('C3pro', ['Apop'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU'])
Monomer('ApafA')
Monomer('BidM', ['BaxM'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 76500.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None) + BidU(C8A=None) | C8A(BidU=1) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1) % BidU(C8A=1) >> C8A(BidU=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None) | Xiap(Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None) >> Xiap(Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, CytoCM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, CytoCM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, CytoCM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, CytoCM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None), C8A_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None), C8pro_0)
Initial(C3pro(Apop=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
| 79.467066
| 614
| 0.809736
|
72ef685abf2dd8231e03046ea126652e6323f5a1
| 1,013
|
py
|
Python
|
python/add_two_numbers.py
|
soumasish/leetcode
|
7c626800c391733ea32c1b7321c5c24c9b6731a6
|
[
"MIT"
] | 10
|
2019-04-29T17:02:31.000Z
|
2021-07-04T16:20:59.000Z
|
python/add_two_numbers.py
|
soumasish/leetcode
|
7c626800c391733ea32c1b7321c5c24c9b6731a6
|
[
"MIT"
] | null | null | null |
python/add_two_numbers.py
|
soumasish/leetcode
|
7c626800c391733ea32c1b7321c5c24c9b6731a6
|
[
"MIT"
] | 8
|
2019-04-24T18:26:02.000Z
|
2021-06-26T05:47:17.000Z
|
"""Created by sgoswami on 10/7/17."""
"""You are given two non-empty linked lists representing two non-negative integers. The digits are stored in
reverse order and each of their nodes contain a single digit. Add the two numbers and return it as a linked list.
You may assume the two numbers do not contain any leading zero, except the number 0 itself."""
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
p = root = ListNode(0)
carry = 0
while l1 or l2 or carry:
v1 = v2 = 0
if l1:
v1 = l1.val
l1 = l1.next
if l2:
v2 = l2.val
l2 = l2.next
carry, val = divmod(v1 + v2 + carry, 10)
p.next = ListNode(val)
p = p.next
return root.next
| 25.974359
| 113
| 0.541955
|
cb7e4e8f8dc982057b8270c5328b23cdd50b7801
| 3,895
|
py
|
Python
|
c7n/resources/secretsmanager.py
|
chris-angeli-rft/cloud-custodian
|
5ff331b114a591dbaf6d672e30ceefb7ae64a5dd
|
[
"Apache-2.0"
] | 8
|
2021-05-18T02:22:03.000Z
|
2021-09-11T02:49:04.000Z
|
c7n/resources/secretsmanager.py
|
chris-angeli-rft/cloud-custodian
|
5ff331b114a591dbaf6d672e30ceefb7ae64a5dd
|
[
"Apache-2.0"
] | 1
|
2021-04-26T04:38:35.000Z
|
2021-04-26T04:38:35.000Z
|
c7n/resources/secretsmanager.py
|
chris-angeli-rft/cloud-custodian
|
5ff331b114a591dbaf6d672e30ceefb7ae64a5dd
|
[
"Apache-2.0"
] | 1
|
2020-12-28T23:21:30.000Z
|
2020-12-28T23:21:30.000Z
|
# Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from c7n.manager import resources
from c7n.filters import iamaccess
from c7n.query import QueryResourceManager, TypeInfo
from c7n.tags import RemoveTag, Tag, TagActionFilter, TagDelayedAction
from c7n.utils import local_session
@resources.register('secrets-manager')
class SecretsManager(QueryResourceManager):
permissions = ('secretsmanager:ListSecretVersionIds',)
class resource_type(TypeInfo):
service = 'secretsmanager'
enum_spec = ('list_secrets', 'SecretList', None)
detail_spec = ('describe_secret', 'SecretId', 'ARN', None)
cfn_type = 'AWS::SecretsManager::Secret'
arn = id = 'ARN'
name = 'Name'
SecretsManager.filter_registry.register('marked-for-op', TagActionFilter)
@SecretsManager.filter_registry.register('cross-account')
class CrossAccountAccessFilter(iamaccess.CrossAccountAccessFilter):
policy_annotation = "c7n:AccessPolicy"
permissions = ("secretsmanager:GetResourcePolicy",)
def process(self, resources, event=None):
self.client = local_session(self.manager.session_factory).client('secretsmanager')
return super(CrossAccountAccessFilter, self).process(resources)
def get_resource_policy(self, r):
if self.policy_annotation in r:
return r[self.policy_annotation]
r[self.policy_annotation] = p = self.client.get_resource_policy(
SecretId=r['Name']).get('ResourcePolicy', None)
return p
@SecretsManager.action_registry.register('tag')
class TagSecretsManagerResource(Tag):
"""Action to create tag(s) on a Secret resource
:example:
.. code-block:: yaml
policies:
- name: tag-secret
resource: secrets-manager
actions:
- type: tag
key: tag-key
value: tag-value
"""
permissions = ('secretsmanager:TagResource',)
def process_resource_set(self, client, resources, new_tags):
for r in resources:
tags = {t['Key']: t['Value'] for t in r['Tags']}
for t in new_tags:
tags[t['Key']] = t['Value']
formatted_tags = [{'Key': k, 'Value': v} for k, v in tags.items()]
client.tag_resource(SecretId=r['ARN'], Tags=formatted_tags)
@SecretsManager.action_registry.register('remove-tag')
class RemoveTagSecretsManagerResource(RemoveTag):
"""Action to remove tag(s) on a Secret resource
:example:
.. code-block:: yaml
policies:
- name: untag-secret
resource: secrets-manager
actions:
- type: remove-tag
tags: ['tag-to-be-removed']
"""
permissions = ('secretsmanager:UntagResource',)
def process_resource_set(self, client, resources, keys):
for r in resources:
client.untag_resource(SecretId=r['ARN'], TagKeys=keys)
@SecretsManager.action_registry.register('mark-for-op')
class MarkSecretForOp(TagDelayedAction):
"""Action to mark a Secret resource for deferred action :example:
.. code-block:: yaml
policies:
- name: mark-secret-for-delete
resource: secrets-manager
actions:
- type: mark-for-op
op: tag
days: 1
"""
| 32.190083
| 90
| 0.656483
|
770d9912e735d979b4f031a1d4a127ee166e32e8
| 12,966
|
py
|
Python
|
python/dgl/nn/pytorch/conv/gatconv.py
|
joshcarty/dgl
|
4464b9734c1061bd84325a54883c5046031def37
|
[
"Apache-2.0"
] | 4
|
2018-12-25T14:59:08.000Z
|
2021-07-02T12:36:40.000Z
|
python/dgl/nn/pytorch/conv/gatconv.py
|
joshcarty/dgl
|
4464b9734c1061bd84325a54883c5046031def37
|
[
"Apache-2.0"
] | 1
|
2020-12-26T12:43:54.000Z
|
2020-12-26T12:43:54.000Z
|
python/dgl/nn/pytorch/conv/gatconv.py
|
joshcarty/dgl
|
4464b9734c1061bd84325a54883c5046031def37
|
[
"Apache-2.0"
] | 4
|
2020-12-26T10:39:36.000Z
|
2020-12-26T12:38:52.000Z
|
"""Torch modules for graph attention networks(GAT)."""
# pylint: disable= no-member, arguments-differ, invalid-name
import torch as th
from torch import nn
from .... import function as fn
from ....ops import edge_softmax
from ....base import DGLError
from ..utils import Identity
from ....utils import expand_as_pair
# pylint: enable=W0235
class GATConv(nn.Module):
r"""
Description
-----------
Apply `Graph Attention Network <https://arxiv.org/pdf/1710.10903.pdf>`__
over an input signal.
.. math::
h_i^{(l+1)} = \sum_{j\in \mathcal{N}(i)} \alpha_{i,j} W^{(l)} h_j^{(l)}
where :math:`\alpha_{ij}` is the attention score bewteen node :math:`i` and
node :math:`j`:
.. math::
\alpha_{ij}^{l} &= \mathrm{softmax_i} (e_{ij}^{l})
e_{ij}^{l} &= \mathrm{LeakyReLU}\left(\vec{a}^T [W h_{i} \| W h_{j}]\right)
Parameters
----------
in_feats : int, or pair of ints
Input feature size; i.e, the number of dimensions of :math:`h_i^{(l)}`.
GATConv can be applied on homogeneous graph and unidirectional
`bipartite graph <https://docs.dgl.ai/generated/dgl.bipartite.html?highlight=bipartite>`__.
If the layer is to be applied to a unidirectional bipartite graph, ``in_feats``
specifies the input feature size on both the source and destination nodes. If
a scalar is given, the source and destination node feature size would take the
same value.
out_feats : int
Output feature size; i.e, the number of dimensions of :math:`h_i^{(l+1)}`.
num_heads : int
Number of heads in Multi-Head Attention.
feat_drop : float, optional
Dropout rate on feature. Defaults: ``0``.
attn_drop : float, optional
Dropout rate on attention weight. Defaults: ``0``.
negative_slope : float, optional
LeakyReLU angle of negative slope. Defaults: ``0.2``.
residual : bool, optional
If True, use residual connection. Defaults: ``False``.
activation : callable activation function/layer or None, optional.
If not None, applies an activation function to the updated node features.
Default: ``None``.
allow_zero_in_degree : bool, optional
If there are 0-in-degree nodes in the graph, output for those nodes will be invalid
since no message will be passed to those nodes. This is harmful for some applications
causing silent performance regression. This module will raise a DGLError if it detects
0-in-degree nodes in input graph. By setting ``True``, it will suppress the check
and let the users handle it by themselves. Defaults: ``False``.
Note
----
Zero in-degree nodes will lead to invalid output value. This is because no message
will be passed to those nodes, the aggregation function will be appied on empty input.
A common practice to avoid this is to add a self-loop for each node in the graph if
it is homogeneous, which can be achieved by:
>>> g = ... # a DGLGraph
>>> g = dgl.add_self_loop(g)
Calling ``add_self_loop`` will not work for some graphs, for example, heterogeneous graph
since the edge type can not be decided for self_loop edges. Set ``allow_zero_in_degree``
to ``True`` for those cases to unblock the code and handle zere-in-degree nodes manually.
A common practise to handle this is to filter out the nodes with zere-in-degree when use
after conv.
Examples
--------
>>> import dgl
>>> import numpy as np
>>> import torch as th
>>> from dgl.nn import GATConv
>>> # Case 1: Homogeneous graph
>>> g = dgl.graph(([0,1,2,3,2,5], [1,2,3,4,0,3]))
>>> g = dgl.add_self_loop(g)
>>> feat = th.ones(6, 10)
>>> gatconv = GATConv(10, 2, num_heads=3)
>>> res = gatconv(g, feat)
>>> res
tensor([[[ 3.4570, 1.8634],
[ 1.3805, -0.0762],
[ 1.0390, -1.1479]],
[[ 3.4570, 1.8634],
[ 1.3805, -0.0762],
[ 1.0390, -1.1479]],
[[ 3.4570, 1.8634],
[ 1.3805, -0.0762],
[ 1.0390, -1.1479]],
[[ 3.4570, 1.8634],
[ 1.3805, -0.0762],
[ 1.0390, -1.1479]],
[[ 3.4570, 1.8634],
[ 1.3805, -0.0762],
[ 1.0390, -1.1479]],
[[ 3.4570, 1.8634],
[ 1.3805, -0.0762],
[ 1.0390, -1.1479]]], grad_fn=<BinaryReduceBackward>)
>>> # Case 2: Unidirectional bipartite graph
>>> u = [0, 1, 0, 0, 1]
>>> v = [0, 1, 2, 3, 2]
>>> g = dgl.bipartite((u, v))
>>> u_feat = th.tensor(np.random.rand(2, 5).astype(np.float32))
>>> v_feat = th.tensor(np.random.rand(4, 10).astype(np.float32))
>>> gatconv = GATConv((5,10), 2, 3)
>>> res = gatconv(g, (u_feat, v_feat))
>>> res
tensor([[[-0.6066, 1.0268],
[-0.5945, -0.4801],
[ 0.1594, 0.3825]],
[[ 0.0268, 1.0783],
[ 0.5041, -1.3025],
[ 0.6568, 0.7048]],
[[-0.2688, 1.0543],
[-0.0315, -0.9016],
[ 0.3943, 0.5347]],
[[-0.6066, 1.0268],
[-0.5945, -0.4801],
[ 0.1594, 0.3825]]], grad_fn=<BinaryReduceBackward>)
"""
def __init__(self,
in_feats,
out_feats,
num_heads,
feat_drop=0.,
attn_drop=0.,
negative_slope=0.2,
residual=False,
activation=None,
allow_zero_in_degree=False):
super(GATConv, self).__init__()
self._num_heads = num_heads
self._in_src_feats, self._in_dst_feats = expand_as_pair(in_feats)
self._out_feats = out_feats
self._allow_zero_in_degree = allow_zero_in_degree
if isinstance(in_feats, tuple):
self.fc_src = nn.Linear(
self._in_src_feats, out_feats * num_heads, bias=False)
self.fc_dst = nn.Linear(
self._in_dst_feats, out_feats * num_heads, bias=False)
else:
self.fc = nn.Linear(
self._in_src_feats, out_feats * num_heads, bias=False)
self.attn_l = nn.Parameter(th.FloatTensor(size=(1, num_heads, out_feats)))
self.attn_r = nn.Parameter(th.FloatTensor(size=(1, num_heads, out_feats)))
self.feat_drop = nn.Dropout(feat_drop)
self.attn_drop = nn.Dropout(attn_drop)
self.leaky_relu = nn.LeakyReLU(negative_slope)
if residual:
if self._in_dst_feats != out_feats:
self.res_fc = nn.Linear(
self._in_dst_feats, num_heads * out_feats, bias=False)
else:
self.res_fc = Identity()
else:
self.register_buffer('res_fc', None)
self.reset_parameters()
self.activation = activation
def reset_parameters(self):
"""
Description
-----------
Reinitialize learnable parameters.
Note
----
The fc weights :math:`W^{(l)}` are initialized using Glorot uniform initialization.
The attention weights are using xavier initialization method.
"""
gain = nn.init.calculate_gain('relu')
if hasattr(self, 'fc'):
nn.init.xavier_normal_(self.fc.weight, gain=gain)
else:
nn.init.xavier_normal_(self.fc_src.weight, gain=gain)
nn.init.xavier_normal_(self.fc_dst.weight, gain=gain)
nn.init.xavier_normal_(self.attn_l, gain=gain)
nn.init.xavier_normal_(self.attn_r, gain=gain)
if isinstance(self.res_fc, nn.Linear):
nn.init.xavier_normal_(self.res_fc.weight, gain=gain)
def set_allow_zero_in_degree(self, set_value):
r"""
Description
-----------
Set allow_zero_in_degree flag.
Parameters
----------
set_value : bool
The value to be set to the flag.
"""
self._allow_zero_in_degree = set_value
def forward(self, graph, feat, get_attention=False):
r"""
Description
-----------
Compute graph attention network layer.
Parameters
----------
graph : DGLGraph
The graph.
feat : torch.Tensor or pair of torch.Tensor
If a torch.Tensor is given, the input feature of shape :math:`(N, D_{in})` where
:math:`D_{in}` is size of input feature, :math:`N` is the number of nodes.
If a pair of torch.Tensor is given, the pair must contain two tensors of shape
:math:`(N_{in}, D_{in_{src}})` and :math:`(N_{out}, D_{in_{dst}})`.
get_attention : bool, optional
Whether to return the attention values. Default to False.
Returns
-------
torch.Tensor
The output feature of shape :math:`(N, H, D_{out})` where :math:`H`
is the number of heads, and :math:`D_{out}` is size of output feature.
torch.Tensor, optional
The attention values of shape :math:`(E, H, 1)`, where :math:`E` is the number of
edges. This is returned only when :attr:`get_attention` is ``True``.
Raises
------
DGLError
If there are 0-in-degree nodes in the input graph, it will raise DGLError
since no message will be passed to those nodes. This will cause invalid output.
The error can be ignored by setting ``allow_zero_in_degree`` parameter to ``True``.
"""
with graph.local_scope():
if not self._allow_zero_in_degree:
if (graph.in_degrees() == 0).any():
raise DGLError('There are 0-in-degree nodes in the graph, '
'output for those nodes will be invalid. '
'This is harmful for some applications, '
'causing silent performance regression. '
'Adding self-loop on the input graph by '
'calling `g = dgl.add_self_loop(g)` will resolve '
'the issue. Setting ``allow_zero_in_degree`` '
'to be `True` when constructing this module will '
'suppress the check and let the code run.')
if isinstance(feat, tuple):
h_src = self.feat_drop(feat[0])
h_dst = self.feat_drop(feat[1])
if not hasattr(self, 'fc_src'):
self.fc_src, self.fc_dst = self.fc, self.fc
feat_src = self.fc_src(h_src).view(-1, self._num_heads, self._out_feats)
feat_dst = self.fc_dst(h_dst).view(-1, self._num_heads, self._out_feats)
else:
h_src = h_dst = self.feat_drop(feat)
feat_src = feat_dst = self.fc(h_src).view(
-1, self._num_heads, self._out_feats)
if graph.is_block:
feat_dst = feat_src[:graph.number_of_dst_nodes()]
# NOTE: GAT paper uses "first concatenation then linear projection"
# to compute attention scores, while ours is "first projection then
# addition", the two approaches are mathematically equivalent:
# We decompose the weight vector a mentioned in the paper into
# [a_l || a_r], then
# a^T [Wh_i || Wh_j] = a_l Wh_i + a_r Wh_j
# Our implementation is much efficient because we do not need to
# save [Wh_i || Wh_j] on edges, which is not memory-efficient. Plus,
# addition could be optimized with DGL's built-in function u_add_v,
# which further speeds up computation and saves memory footprint.
el = (feat_src * self.attn_l).sum(dim=-1).unsqueeze(-1)
er = (feat_dst * self.attn_r).sum(dim=-1).unsqueeze(-1)
graph.srcdata.update({'ft': feat_src, 'el': el})
graph.dstdata.update({'er': er})
# compute edge attention, el and er are a_l Wh_i and a_r Wh_j respectively.
graph.apply_edges(fn.u_add_v('el', 'er', 'e'))
e = self.leaky_relu(graph.edata.pop('e'))
# compute softmax
graph.edata['a'] = self.attn_drop(edge_softmax(graph, e))
# message passing
graph.update_all(fn.u_mul_e('ft', 'a', 'm'),
fn.sum('m', 'ft'))
rst = graph.dstdata['ft']
# residual
if self.res_fc is not None:
resval = self.res_fc(h_dst).view(h_dst.shape[0], -1, self._out_feats)
rst = rst + resval
# activation
if self.activation:
rst = self.activation(rst)
if get_attention:
return rst, graph.edata['a']
else:
return rst
| 42.234528
| 99
| 0.563474
|
ee7296ac021730c80ad674d90a17611ce535330e
| 2,142
|
py
|
Python
|
tests/fuzzy_tests.py
|
va-dudnikov/pygraph7
|
bf3469ca75f44283ee647dc10fa7d94bb7834cf3
|
[
"MIT"
] | 2
|
2018-08-02T10:15:39.000Z
|
2018-08-02T17:32:11.000Z
|
tests/fuzzy_tests.py
|
va-dudnikov/pygraph7
|
bf3469ca75f44283ee647dc10fa7d94bb7834cf3
|
[
"MIT"
] | null | null | null |
tests/fuzzy_tests.py
|
va-dudnikov/pygraph7
|
bf3469ca75f44283ee647dc10fa7d94bb7834cf3
|
[
"MIT"
] | null | null | null |
# Simple fuzzy test
import unittest
import graph7 as g7
import copy
import random
MIN_INT_VAL = 0
MAX_INT_VAL = 2**128
MIN_ORDER = 3
MAX_ORDER = 50
NITERS = 1
class TestGraph7(unittest.TestCase):
def _testing(self, min_order, max_order, wtype, niters, min_int_val = 0, max_int_val = 1):
def to_undirected(_mat):
mat = copy.deepcopy(_mat)
order = len(mat)
for i in range(order - 1):
for j in range(i + 1, order):
mat[j][i] = mat[i][j]
return mat
def to_unloops(_mat):
mat = copy.deepcopy(_mat)
for i in range(len(mat)):
mat[i][i] = 0
return mat
def random_mat(order, wtype, min_int_val, max_int_val):
mat = None
if wtype == 'int':
mat = [[random.randint(min_int_val, max_int_val) for _ in range(order)] for _ in range(order)]
else:
mat = [[random.random() for _ in range(order)] for _ in range(order)]
return mat
for _ in range(niters):
for order in range(min_order, max_order):
# Directed with loops
mat1 = random_mat(order, wtype, min_int_val, max_int_val)
# Directed without loops
mat2 = to_unloops(mat1)
# Undirected with loops
mat3 = to_undirected(mat1)
# Undirected without loops
mat4 = to_undirected(mat2)
self.assertEqual(g7.decode(g7.encode(mat1, wtype), wtype), mat1)
self.assertEqual(g7.decode(g7.encode(mat2, wtype), wtype), mat2)
self.assertEqual(g7.decode(g7.encode(mat3, wtype), wtype), mat3)
self.assertEqual(g7.decode(g7.encode(mat4, wtype), wtype), mat4)
def test_compact(self):
self._testing(MIN_ORDER, MAX_ORDER, 'int', NITERS)
def test_int(self):
self._testing(MIN_ORDER, MAX_ORDER, 'int', NITERS, MIN_INT_VAL, MAX_INT_VAL)
def test_float(self):
self._testing(MIN_ORDER, MAX_ORDER, 'double', NITERS)
unittest.main()
| 31.970149
| 110
| 0.569561
|
8e332a1177af9ec5220d6674f8ca4dd4b7990819
| 6,512
|
py
|
Python
|
objectflow-static.py
|
tanayrastogi/ObjectFlow-static
|
4f6f33966ca414685a111dbe76df83c47dd87f7e
|
[
"MIT"
] | null | null | null |
objectflow-static.py
|
tanayrastogi/ObjectFlow-static
|
4f6f33966ca414685a111dbe76df83c47dd87f7e
|
[
"MIT"
] | null | null | null |
objectflow-static.py
|
tanayrastogi/ObjectFlow-static
|
4f6f33966ca414685a111dbe76df83c47dd87f7e
|
[
"MIT"
] | null | null | null |
# Python Imports
import numpy as np
import cv2
import time
from shapely import geometry
import argparse
import imutils
# Local libraries
import ObjectDetection
from pyimagesearch.centroidtracker import CentroidTracker
import Utils.utils as ut
# Check if the points are in the polygon
def check_box_in_polygon(polygon, bbox):
"""
Function to check if the point is in a polygon or not
"""
# Point to check for the box
point = ((bbox[0] + bbox[2])/2, bbox[3])
# Polygon
poly = geometry.Polygon(polygon)
# Point
pt = geometry.Point(point)
# Return if polygon conatins the point
return poly.contains(pt)
def detections_in_polygon(POLYGON, detections):
return [d for d in detections if check_box_in_polygon(POLYGON, d["bbox"])]
def update_trail_colors(objects):
global trail_color
for key in objects:
if key not in trail_color.keys():
trail_color[key] = np.random.uniform(0, 255, size=(1, 3))
return trail_color
def draw_object_on_image(frame, objects):
global trail_color
# loop over the tracked objects
for (objectID, centroid) in objects.items():
# draw both the ID of the object and the centroid of the
# object on the output frame
text = "ID {}".format(objectID)
cv2.putText(frame, text, (centroid[0] - 10, centroid[1] - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, trail_color[objectID].flatten(), 2)
cv2.circle(frame, (centroid[0], centroid[1]), 4, trail_color[objectID].flatten(), -1)
def update_footprints(objects):
global footprints
global trail_color
for (objectID, centroid) in objects.items():
# Add to the footprint dict
footprints.append(dict(location=centroid,
color=trail_color[objectID].flatten()))
def draw_footprint_on_image(frame):
global footprints
# Draw all the foot prints on the image
for foot in footprints:
cv2.circle(frame, (foot["location"][0], foot["location"][1]),
2, foot["color"], -1)
def draw_polygon(frame, pts):
bbox = np.array(pts, np.int32)
bbox = bbox.reshape((-1,1,2))
cv2.polylines(frame, [bbox], True, (0, 0, 255), 1)
if __name__ == "__main__":
# Create CL Arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--video", required=True,
help="path to the video.")
ap.add_argument("-p", "--polygon", nargs="+", default=None,
help="List of points of the polygon within objects are tracked.")
ap.add_argument("-d", "--maxdisp", type=int, default=30,
help="Max number of frame to consider for object to disappear.")
ap.add_argument("-s", "--skipframes", type=int, default=1,
help="number of frames to skip for Object Detection.")
ap.add_argument("-c", "--classes", nargs="+", default=["person"],
help="Classes to detect in obejct detection.")
ap.add_argument("--confidence", type=float, default=0.6,
help="Base confidence for object detection.")
args = vars(ap.parse_args())
# # ENVIRONMENT VARIABLES # #
VIDEO = args["video"]
FRAMES_TO_SKIP = args["skipframes"] # Frame
MAXDISP = args["maxdisp"]
CLASSES_TO_DETECT = args["classes"]
BASE_CONFIDENCE = args["confidence"]
# Bounding points for the polygon
POLYGON = [tuple(int(i) for i in args["polygon"][i*2:i*2+2]) for i in range(4)]
# # INITIALIZTION # #
# Variables
totalFrames = 0 # Variable to keep track of the frames
trail_color = dict() # Variable to keep track of all the trails for all object detected
footprints = list() # Varibale to keep track of footpritns for objects
# Object Detection Class
modelname = "faster_rcnn_inception_v2_coco_2018_01_28"
proto = modelname+".pbtxt"
classes = "object_detection_classes_coco.txt"
graph = "frozen_inference_graph.pb"
obd = ObjectDetection.TensorflowModel(modelname, proto, graph, classes,
BASE_CONFIDENCE, CLASSES_TO_DETECT)
# Video Sequence
vs = cv2.VideoCapture(VIDEO)
# Object Tracker
# Consecutive frames that an object will be considered dissappeared
ct = CentroidTracker(maxDisappeared=MAXDISP)
# Start Reading the frames video sequence
while True:
# Read frames
ret, frame = vs.read()
# check to see if we have reached to end of stream
if not ret:
break
# Reshape the frame for working on it and take height, width
frame = imutils.resize(frame, width=1280, inter=cv2.INTER_AREA)
(height, width, channel) = frame.shape
# Get timestamp of the video
curr_timestamp = ut.get_videotimestamp(vs)
# Skip frames
if totalFrames % FRAMES_TO_SKIP == 0:
####################
# Object Detection #
####################
detections = obd.detect(frame, imgName=curr_timestamp)
# Keep only the points that are in the polygon
if POLYGON is not None:
detections = detections_in_polygon(POLYGON, detections)
# Print detected objects
ut.pp_detectionlist(detections)
# Label the image with the detected object
ut.labelImage(image=frame, dectList=detections)
###################
# Object Tracking #
###################
objects = ct.update([d["bbox"] for d in detections])
update_trail_colors(objects)
draw_object_on_image(frame, objects)
update_footprints(objects)
draw_footprint_on_image(frame)
if POLYGON is not None:
draw_polygon(frame, POLYGON)
# Show frame
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
time.sleep(0.01)
# if 'p' is pressed, then pause video
if key == ord('p'):
print("[INFO] Paused!! Press any key to release ...")
cv2.imshow("Frame", frame)
cv2.waitKey(-1)
if key == ord('q'):
# if 'q' is pressed, then quit the video
print("[INFO] Quiting ...")
break
# Count frames
totalFrames += 1
# Close all the open windows
print("[INFO] Closing the all window ...", end=" ")
cv2.destroyAllWindows()
print("Done!")
| 35.391304
| 95
| 0.607801
|
5af009674cb5d7c4d20cc2f41a0477984714d02b
| 1,520
|
py
|
Python
|
frappe_telegram/utils/formatting.py
|
rafatali686/frappe_telegram
|
724ead04a531eddfe935acf35282684fef41cb67
|
[
"MIT"
] | 16
|
2021-07-25T09:30:28.000Z
|
2022-03-24T04:56:57.000Z
|
frappe_telegram/utils/formatting.py
|
rafatali686/frappe_telegram
|
724ead04a531eddfe935acf35282684fef41cb67
|
[
"MIT"
] | 5
|
2021-08-24T18:07:13.000Z
|
2022-02-03T04:26:08.000Z
|
frappe_telegram/utils/formatting.py
|
rafatali686/frappe_telegram
|
724ead04a531eddfe935acf35282684fef41cb67
|
[
"MIT"
] | 10
|
2021-07-27T07:26:11.000Z
|
2022-03-24T11:16:38.000Z
|
import re
def strip_unsupported_html_tags(txt: str) -> str:
"""
Only a set of formatting options are supported
https://core.telegram.org/bots/api#formatting-options
"""
tags_supported = [
"b", "strong", # Bold
"i", "em", # Italics
"u", "ins", # Underline
"s", "strike", "del", # Strikethrough
"a", # Links
"pre", "code", # Code
]
# Replace Unsupported Tags
"""
< /? # Permit closing tags
(?!
(?: em | strong ) # List of tags to avoid matching
\b # Word boundary avoids partial word matches
)
[a-z] # Tag name initial character must be a-z
(?: [^>"'] # Any character except >, ", or '
| "[^"]*" # Double-quoted attribute value
| '[^']*' # Single-quoted attribute value
)*
>
https://www.oreilly.com/library/view/regular-expressions-cookbook/9781449327453/ch09s04.html
"""
r = """</?(?!(?:{})\\b)[a-z](?:[^>"']|"[^"]*"|'[^']*')*>""".format("|".join(tags_supported))
txt = re.sub(
pattern=r,
repl="",
string=txt)
# & &
txt = txt.replace("&", "&")
# < <
txt = re.sub(
pattern=r"<(?!(?:[a-z]+|\/[a-z]+)\b)",
repl="<",
string=txt,
)
# > $gt;
# Seems to go through well
return txt
| 28.148148
| 96
| 0.431579
|
40bf70b371dad899d8fb122399a4f7fd7100535d
| 2,658
|
py
|
Python
|
openGaussBase/testcase/MOT/Opengauss_Function_MOT_Case0020.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/MOT/Opengauss_Function_MOT_Case0020.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/MOT/Opengauss_Function_MOT_Case0020.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
'''
Case Type: MOT不支持的数据类型
Case Name: MOT不支持的数据类型NVARCHAR2(n)
'''
import unittest
import sys
sys.path.append(sys.path[0] + "/../")
from testcase.utils.Constant import Constant
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
logger = Logger()
class Mot_datatype_test(unittest.TestCase):
def setUp(self):
self.sh_primysh = CommonSH('PrimaryDbUser')
self.constant = Constant()
# logger.info('------------修改配置,并重启数据库------------')
# self.configitem = "enable_incremental_checkpoint=off"
# mod_msg = self.sh_primysh.execute_gsguc('set', self.constant.GSGUC_SUCCESS_MSG, self.configitem)
# stopmsg = str(self.sh_primysh.stop_db_cluster())
# startmsg = str(self.sh_primysh.start_db_cluster())
# self.assertTrue(stopmsg)
# self.assertTrue(startmsg)
def test_mot_none_datatype_array(self):
logger.info("------------------------Opengauss_Function_MOT_Case0020开始执行----------------------")
self.schema = 'schema_mot_test'
self.tablename = 'MOTTable'
self.datatype = 'NVARCHAR2(2)'
self.sql_cmd = f'''CREATE SCHEMA {self.schema};
CREATE FOREIGN TABLE {self.schema}.{self.tablename}(t1 {self.datatype});
DROP SCHEMA {self.schema} CASCADE;
'''
logger.info("-------------------------开始用例测试:MOT不支持数据类型NVARCHAR2(n)--------------------------")
msg = self.sh_primysh.execut_db_sql(self.sql_cmd)
logger.info(msg)
self.assertIn(self.constant.NOT_SUPPORTED_TYPE, msg)
def tearDown(self):
# logger.info('-----------恢复配置,并重启数据库-----------')
# self.configitem = "enable_incremental_checkpoint=on"
# mod_msg = self.sh_primysh.execute_gsguc('set', self.constant.GSGUC_SUCCESS_MSG, self.configitem)
# stopmsg = str(self.sh_primysh.stop_db_cluster())
# startmsg = str(self.sh_primysh.start_db_cluster())
# self.assertTrue(stopmsg)
# self.assertTrue(startmsg)
logger.info('---------------Opengauss_Function_MOT_Case0020执行结束---------------')
| 40.272727
| 106
| 0.643717
|
a05f373c908a9e3713883f4d200bcf34725b62fa
| 45,819
|
py
|
Python
|
www_local/controller/findmaster.py
|
2218084076/hotpoor_autoclick_xhs
|
a52446ba691ac19e43410a465dc63f940c0e444d
|
[
"Apache-2.0"
] | 1
|
2021-12-21T10:42:46.000Z
|
2021-12-21T10:42:46.000Z
|
www_local/controller/findmaster.py
|
2218084076/hotpoor_autoclick_xhs
|
a52446ba691ac19e43410a465dc63f940c0e444d
|
[
"Apache-2.0"
] | null | null | null |
www_local/controller/findmaster.py
|
2218084076/hotpoor_autoclick_xhs
|
a52446ba691ac19e43410a465dc63f940c0e444d
|
[
"Apache-2.0"
] | null | null | null |
#!/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/vendor/')
import re
import uuid
import time
import random
import string
import hashlib
import urllib
import copy
from functools import partial
import logging
import datetime
import markdown
import tornado
import tornado.web
import tornado.escape
import tornado.websocket
import tornado.httpclient
import tornado.gen
from tornado.escape import json_encode, json_decode
import nomagic
import nomagic.auth
import nomagic.block
from nomagic.cache import get_user, get_users, update_user, get_doc, get_docs, update_doc, get_aim, get_aims, update_aim, get_entity, get_entities, update_entity
from nomagic.cache import BIG_CACHE
from setting import settings
from setting import conn
# from user_agents import parse as uaparse #早年KJ用来判断设备使用
from .base import WebRequest
from .base import WebSocket
import pymail
from .data import DataWebSocket
class UriMappingHandler(WebRequest):
@tornado.gen.coroutine
def get(self, app):
if app in settings["JDROOTFILES"]:
self.render("../static/jd_root_files/%s"%app)
return
link_list = self.request.uri.split("?")
uri_mapping = settings["uri_mapping"]
block_id = uri_mapping.get(link_list[0],None)
if not self.current_user:
self.user_id = "no_login:%s"%str(time.time())
else:
self.user_id = self.current_user["id"]
self.time_now = int(time.time())
self.block_id = block_id
if not block_id:
self.render("../template/404.html")
return
block = get_aim(block_id)
if not block:
self.render("../template/404.html")
return
self.permission_btns = {
"private":"Private·私有",
"publish":"Publish·公开发表",
"public":"Public·自由编辑"
}
self.permission = block.get("permission","private")
self.editors = block.get("editors",[block.get("owner",None)])
self.readers = block.get("readers",[])
self.blackers = block.get("blackers",[])
if self.user_id in self.blackers:
self.render("../template/404.html")
return
if self.permission in ["private"]:
if self.user_id not in self.editors and self.user_id not in self.readers:
self.render("../template/404.html")
return
self.doms = block.get("doms",[])
websocket_protocol = "ws" if self.request.protocol == "http" else "wss"
aim_host = self.request.host
self.websocket_url = "%s://%s/api/data/ws?aim_id=%s" % (websocket_protocol, aim_host, block_id)
self.title = block.get("title","")
self.desc = block.get("desc","")
self.fork_allow = block.get("fork_allow",True)
self.fork_from = block.get("fork_from","")
self.fork_entity = block.get("fork_entity",None)
self.grid_graph = block.get("grid_graph",{})
self.main_area = block.get("main_area",{})
self.render("../template/page.html")
class MainHandler(WebRequest):
@tornado.gen.coroutine
def get(self, app):
self.time_now = int(time.time())
self._ = self.locale.translate
self.render("../template/index.html")
block_id = "0b5ee08ed0ed498593306193601680e7"
if not self.current_user:
self.user_id = "no_login:%s"%str(time.time())
else:
self.user_id = self.current_user["id"]
self.time_now = int(time.time())
self.block_id = block_id
block = get_aim(block_id)
if not block:
self.render("../template/404.html")
return
self.permission_btns = {
"private":"Private·私有",
"publish":"Publish·公开发表",
"public":"Public·自由编辑"
}
self.permission = block.get("permission","private")
self.editors = block.get("editors",[block.get("owner",None)])
self.readers = block.get("readers",[])
self.blackers = block.get("blackers",[])
if self.user_id in self.blackers:
self.render("../template/404.html")
return
if self.permission in ["private"]:
if self.user_id not in self.editors and self.user_id not in self.readers:
self.render("../template/404.html")
return
self.doms = block.get("doms",[])
websocket_protocol = "ws" if self.request.protocol == "http" else "wss"
aim_host = self.request.host
self.websocket_url = "%s://%s/api/data/ws?aim_id=%s" % (websocket_protocol, aim_host, block_id)
self.title = block.get("title","")
self.desc = block.get("desc","")
self.fork_allow = block.get("fork_allow",True)
self.fork_from = block.get("fork_from","")
self.fork_entity = block.get("fork_entity",None)
self.grid_graph = block.get("grid_graph",{})
self.main_area = block.get("main_area",{})
self.render("../template/page.html")
class LoginHandler(WebRequest):
@tornado.gen.coroutine
def get(self):
self.time_now = int(time.time())
self._ = self.locale.translate
self.render("../template/login.html")
class RegisterHandler(WebRequest):
@tornado.gen.coroutine
def get(self):
block_id = "9b30f584a1cf4005996ec4d5e4170cbc"
if not self.current_user:
self.user_id = "no_login:%s"%str(time.time())
else:
self.user_id = self.current_user["id"]
self.time_now = int(time.time())
self.block_id = block_id
block = get_aim(block_id)
if not block:
self.render("../template/404.html")
return
self.permission_btns = {
"private":"Private·私有",
"publish":"Publish·公开发表",
"public":"Public·自由编辑"
}
self.permission = block.get("permission","private")
self.editors = block.get("editors",[block.get("owner",None)])
self.readers = block.get("readers",[])
self.blackers = block.get("blackers",[])
if self.user_id in self.blackers:
self.render("../template/404.html")
return
if self.permission in ["private"]:
if self.user_id not in self.editors and self.user_id not in self.readers:
self.render("../template/404.html")
return
self.doms = block.get("doms",[])
websocket_protocol = "ws" if self.request.protocol == "http" else "wss"
aim_host = self.request.host
self.websocket_url = "%s://%s/api/data/ws?aim_id=%s" % (websocket_protocol, aim_host, block_id)
self.title = block.get("title","")
self.desc = block.get("desc","")
self.fork_allow = block.get("fork_allow",True)
self.fork_from = block.get("fork_from","")
self.fork_entity = block.get("fork_entity",None)
self.grid_graph = block.get("grid_graph",{})
self.main_area = block.get("main_area",{})
self.render("../template/page.html")
class MainHomeHandler(WebRequest):
def get(self):
self.timer = int(time.time())
print(self.request)
self.render("../template/main.html")
class WelcomeHomeHandler(WebRequest):
def get(self):
self.timer = int(time.time())
self.render("../template/welcome.html")
class PagesHomeHandler(WebRequest):
def get(self):
self.time_now = int(time.time())
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
user = get_aim(user_id)
self.pages = user.get("pages",[])
self.render("../template/pages.html")
class PagesListAPIHandler(WebRequest):
def get(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
user = get_aim(user_id)
pages_ids = user.get("pages",[])
pages = get_aims(pages_ids)
result = []
for page in pages:
result_item = {
"block_id":page[0],
"title":page[1].get("title","new page"),
"desc":page[1].get("desc","this is a new page"),
"fork_allow":page[1].get("fork_allow",True),
"fork_from":page[1].get("fork_from",None),
}
result.append(result_item)
pages_top_ids = user.get("pages_top",[])
self.finish({"info":"ok","result":result,"pages_top_ids":pages_top_ids})
class PageAddAPIHandler(WebRequest):
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
title = self.get_argument("title","new page")
desc = self.get_argument("desc","this is a new page")
user = get_aim(user_id)
pages = user.get("pages",[])
block = {
"owner":user_id,
"subtype":"page",
"title":title,
"desc":desc,
"doms":[],
"history":[],
"updatetime":int(time.time())
}
[block_id,block]=nomagic.block.create_block(block)
pages.insert(0,block_id)
user["pages"]=pages
update_aim(user_id,user)
self.finish({"info":"ok","about":"create new page success","block_id":block_id})
class PageHomeHandler(WebRequest):
def get(self,block_id):
# self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin","*"))
# self.set_header("Access-Control-Allow-Credentials", "true")
if not self.current_user:
self.user_id = "no_login:%s"%str(time.time())
else:
self.user_id = self.current_user["id"]
self.time_now = int(time.time())
self.block_id = block_id
block = get_aim(block_id)
if not block:
self.render("../template/404.html")
return
self.permission_btns = {
"private":"Private·私有",
"publish":"Publish·公开发表",
"public":"Public·自由编辑"
}
self.permission = block.get("permission","private")
self.editors = block.get("editors",[block.get("owner",None)])
self.readers = block.get("readers",[])
self.blackers = block.get("blackers",[])
if self.user_id in self.blackers:
self.render("../template/404.html")
return
if self.permission in ["private"]:
if self.user_id not in self.editors and self.user_id not in self.readers:
self.render("../template/404.html")
return
self.doms = block.get("doms",[])
websocket_protocol = "ws" if self.request.protocol == "http" else "wss"
aim_host = self.request.host
self.websocket_url = "%s://%s/api/data/ws?aim_id=%s" % (websocket_protocol, aim_host, block_id)
self.title = block.get("title","")
self.desc = block.get("desc","")
self.fork_allow = block.get("fork_allow",True)
self.fork_from = block.get("fork_from","")
self.fork_entity = block.get("fork_entity",None)
self.grid_graph = block.get("grid_graph",{})
self.main_area = block.get("main_area",{})
self.render("../template/page.html")
class PageEditHomeHandler(WebRequest):
def get(self,block_id):
# self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin","*"))
# self.set_header("Access-Control-Allow-Credentials", "true")
if not self.current_user:
self.user_id = "no_login:%s"%str(time.time())
else:
self.user_id = self.current_user["id"]
self.time_now = int(time.time())
self.block_id = block_id
block = get_aim(block_id)
if not block:
self.render("../template/404.html")
return
self.doms = block.get("doms",[])
websocket_protocol = "ws" if self.request.protocol == "http" else "wss"
aim_host = self.request.host
self.websocket_url = "%s://%s/api/data/ws?aim_id=%s" % (websocket_protocol, aim_host, block_id)
self.permission_btns = {
"private":"Private·私有",
"publish":"Publish·公开发表",
"public":"Public·自由编辑"
}
self.permission = block.get("permission","private")
self.editors = block.get("editors",[block.get("owner",None)])
self.readers = block.get("readers",[])
self.blackers = block.get("blackers",[])
self.members = block.get("members",[])
self.stars = block.get("stars",[])
if self.user_id in self.blackers:
self.render("../template/404.html")
return
if self.permission not in ["public"]:
if self.user_id not in self.editors:
self.redirect("/home/page/%s"%block_id)
return
self.title = block.get("title","")
self.desc = block.get("desc","")
self.fork_allow = block.get("fork_allow",True)
self.fork_from = block.get("fork_from","")
self.fork_entity = block.get("fork_entity",None)
self.grid_graph = block.get("grid_graph",{})
self.main_area = block.get("main_area",{})
self.comment_entities = block.get("comment_entities",[])
self.render("../template/page_edit.html")
class PageFixHomeHandler(WebRequest):
def get(self,block_id):
# self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin","*"))
# self.set_header("Access-Control-Allow-Credentials", "true")
if not self.current_user:
self.user_id = "no_login:%s"%str(time.time())
else:
self.user_id = self.current_user["id"]
self.time_now = int(time.time())
self.block_id = block_id
block = get_aim(block_id)
if not block:
self.render("../template/404.html")
return
self.doms = block.get("doms",[])
websocket_protocol = "ws" if self.request.protocol == "http" else "wss"
aim_host = self.request.host
self.websocket_url = "%s://%s/api/data/ws?aim_id=%s" % (websocket_protocol, aim_host, block_id)
self.permission = block.get("permission","private")
self.editors = block.get("editors",[block.get("owner",None)])
self.readers = block.get("readers",[])
self.blackers = block.get("blackers",[])
self.members = block.get("members",[])
if self.user_id in self.blackers:
self.render("../template/404.html")
return
if self.permission not in ["public"]:
if self.user_id not in self.editors:
self.redirect("/home/page/%s"%block_id)
return
self.render("../template/page_fix.html")
class PageCopyDomAPIHandler(WebRequest):
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
aim_id = self.get_argument("aim_id",None)
aim_dom_id = self.get_argument("aim_dom_id",None)
dom_owner = self.get_argument("dom_owner",None)
dom_type = self.get_argument("dom_type","text")
dom_position_x = int(float(self.get_argument("dom_position_x","0")))
dom_position_y = int(float(self.get_argument("dom_position_y","0")))
# if dom_type not in ["text","img","video","canvas","input","button","textarea"]:
if dom_type not in ["domcopy"]:
self.finish({"info":"error","about":"not allow dom type"})
return
block = get_aim(block_id)
aim = get_aim(aim_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
doms = block.get("doms",[])
dom_sequence = "".join(random.choice(string.ascii_lowercase+string.digits) for _ in range(6))
dom_sequence_same = False
while True:
for dom_item in doms:
if dom_item[0] == dom_sequence:
dom_sequence_same = True
break
if dom_sequence_same:
dom_sequence = "".join(random.choice(string.ascii_lowercase+string.digits) for _ in range(6))
dom_sequence_same = False
else:
break
copy_dom = None
for aim_dom in aim.get("doms",[]):
if aim_dom[0]==aim_dom_id:
copy_dom = copy.deepcopy(aim_dom)
if not copy_dom:
self.finish({"info":"error","about":"copy dom is removed or none"})
return
updatetime = int(time.time())
copy_dom[0]=dom_sequence
# copy_dom[2]["x"]=dom_position_x
# copy_dom[2]["y"]=dom_position_y
copy_dom[6]=updatetime
# dom = [dom_sequence,dom_type,dom_position,dom_content,dom_css,dom_children,updatetime]
dom = copy_dom
doms.append(dom)
block["doms"] = doms
block["updatetime"] = updatetime
update_aim(block_id,block)
self.finish({"info":"ok","dom_sequence":dom_sequence})
[dom_sequence,dom_type,dom_position,dom_content,dom_css,dom_children,updatetime] = dom
content_data = {
"dom_current":dom_sequence,
"dom_content":dom_content,
"dom_position_x":dom_position["x"],
"dom_position_y":dom_position["y"],
"dom_position_w":dom_position["w"],
"dom_position_h":dom_position["h"],
"dom_position_z":dom_position["z"],
"dom_scroll":dom_position.get("s",""),
"dom_type":dom_type,
}
msgtype = "COMMENTPAGECOPYDOM"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageCopyDomsAPIHandler(WebRequest):
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
# aim_id = self.get_argument("aim_id",None)
# aim_dom_id = self.get_argument("aim_dom_id",None)
current_copy = self.get_argument("current_copy","[]")
dom_owner = self.get_argument("dom_owner",None)
dom_type = self.get_argument("dom_type","text")
dom_position_x = int(float(self.get_argument("dom_position_x","0")))
dom_position_y = int(float(self.get_argument("dom_position_y","0")))
# if dom_type not in ["text","img","video","canvas","input","button","textarea"]:
if dom_type not in ["domcopy"]:
self.finish({"info":"error","about":"not allow dom type"})
return
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
aim_ids = []
aim_dom_ids = []
current_copy_items = json_decode(current_copy)
for current_copy_item in current_copy_items:
current_copy_item_list = current_copy_item.split("COPYDOM//")
[a_aim_id,a_aim_dom_id]=current_copy_item_list[1].split(",")
aim_ids.append(a_aim_id)
aim_dom_ids.append([a_aim_id,a_aim_dom_id])
aim_ids = list(set(aim_ids))
aims = get_aims(aim_ids)
aims_json = {}
for aim in aims:
aims_json[aim[0]]=aim[1]
dom_sequences=[]
doms = block.get("doms",[])
content_datas = []
for [aim_id,aim_dom_id] in aim_dom_ids:
aim = aims_json[aim_id]
dom_sequence = "".join(random.choice(string.ascii_lowercase+string.digits) for _ in range(6))
dom_sequence_same = False
while True:
for dom_item in doms:
if dom_item[0] == dom_sequence:
dom_sequence_same = True
break
if dom_sequence_same:
dom_sequence = "".join(random.choice(string.ascii_lowercase+string.digits) for _ in range(6))
dom_sequence_same = False
else:
break
copy_dom = None
for aim_dom in aim.get("doms",[]):
if aim_dom[0]==aim_dom_id:
copy_dom = copy.deepcopy(aim_dom)
if not copy_dom:
self.finish({"info":"error","about":"copy dom is removed or none"})
return
updatetime = int(time.time())
copy_dom[0]=dom_sequence
# copy_dom[2]["x"]=dom_position_x
# copy_dom[2]["y"]=dom_position_y
copy_dom[6]=updatetime
# dom = [dom_sequence,dom_type,dom_position,dom_content,dom_css,dom_children,updatetime]
dom = copy_dom
doms.append(dom)
dom_sequences.append(dom_sequence)
[dom_sequence,dom_type,dom_position,dom_content,dom_css,dom_children,updatetime] = dom
content_data = {
"dom_current":dom_sequence,
"dom_content":dom_content,
"dom_position_x":dom_position["x"],
"dom_position_y":dom_position["y"],
"dom_position_w":dom_position["w"],
"dom_position_h":dom_position["h"],
"dom_position_z":dom_position["z"],
"dom_scroll":dom_position.get("s",""),
"dom_type":dom_type,
}
content_datas.append(content_data)
block["doms"] = doms
block["updatetime"] = updatetime
update_aim(block_id,block)
self.finish({"info":"ok","dom_sequences":dom_sequences})
msgtype = "COMMENTPAGECOPYDOMS"
msg = [msgtype, {
"content": content_datas,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageAddDomAPIHandler(WebRequest):
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_owner = self.get_argument("dom_owner",None)
dom_type = self.get_argument("dom_type","text")
dom_position_x = int(float(self.get_argument("dom_position_x","0")))
dom_position_y = int(float(self.get_argument("dom_position_y","0")))
dom_scroll = self.get_argument("dom_scroll","")
# if dom_type not in ["text","img","video","canvas","input","button","textarea"]:
if dom_type not in ["text","img","video","iframe"]:
self.finish({"info":"error","about":"not allow dom type"})
return
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
doms = block.get("doms",[])
dom_sequence = "".join(random.choice(string.ascii_lowercase+string.digits) for _ in range(6))
dom_sequence_same = False
while True:
for dom_item in doms:
if dom_item[0] == dom_sequence:
dom_sequence_same = True
break
if dom_sequence_same:
dom_sequence = "".join(random.choice(string.ascii_lowercase+string.digits) for _ in range(6))
dom_sequence_same = False
else:
break
dom_position = {
"x":dom_position_x,
"y":dom_position_y,
"w":100,
"h":40,
"z":0,
"s":dom_scroll,
}
dom_css = ""
dom_content = ""
dom_children = []
updatetime = int(time.time())
dom = [dom_sequence,dom_type,dom_position,dom_content,dom_css,dom_children,updatetime]
doms.append(dom)
block["doms"] = doms
block["updatetime"] = updatetime
update_aim(block_id,block)
self.finish({"info":"ok","dom_sequence":dom_sequence})
if dom_type in ["text"]:
dom_content = """
<div class="section">text</div>
"""
elif dom_type in ["img"]:
dom_content = """
<div class="section"><img src="/static/img/need_add_img.png"></div>
"""
elif dom_type in ["video"]:
dom_content = """
<div class="section" contenteditable="false"><span class="novideospan">视频未设置</span></div>
"""
elif dom_type in ["iframe"]:
dom_content = """
<div class="section" contenteditable="false">iframe暂未设置</div>
"""
content_data = {
"dom_current":dom_sequence,
"dom_content":dom_content,
"dom_position_x":dom_position["x"],
"dom_position_y":dom_position["y"],
"dom_position_w":dom_position["w"],
"dom_position_h":dom_position["h"],
"dom_position_z":dom_position["z"],
"dom_scroll":dom_position["s"],
"dom_type":dom_type,
}
msgtype = "COMMENTPAGEADDDOM"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageDelDomAPIHandler(WebRequest):
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_owner = self.get_argument("dom_owner",None)
dom_current = self.get_argument("dom_current",None)
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
doms = block.get("doms",[])
dom_tree = dom_current.split("_")
dom_aim = None
doms = block.get("doms",[])
_doms = doms
for dom in dom_tree:
_doms = _doms
for _dom in _doms:
if dom == _dom[0]:
if dom_owner == _dom[0]:
dom_aim = _dom
_doms.remove(_dom)
break
_doms = _dom[5]
break
if not dom_aim:
self.finish({"info":"error","about":"no dom"})
return
dom_content = ""
updatetime = int(time.time())
block["doms"] = doms
block["updatetime"] = updatetime
update_aim(block_id,block)
self.finish({"info":"ok",})
content_data = {
"dom_current":dom_current,
"dom_content":dom_content,
}
msgtype = "COMMENTPAGEDELDOM"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateTitleAPIHandler(WebRequest):
# @tornado.gen.coroutine
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_content = self.get_argument("dom_content",None)
title = json_decode(dom_content).get("title","")
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
old_title = block.get("title","")
if title == old_title:
self.finish({"info":"ok","about":"same title"})
return
block["title"]=title
block["updatetime"]=updatetime
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"dom_content":dom_content,
}
msgtype = "COMMENTPAGEUPDATETITLE"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateDescAPIHandler(WebRequest):
# @tornado.gen.coroutine
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_content = self.get_argument("dom_content",None)
desc = json_decode(dom_content).get("desc","")
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
old_desc = block.get("desc","")
if desc == old_desc:
self.finish({"info":"ok","about":"same desc"})
return
block["desc"]=desc
block["updatetime"]=updatetime
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"dom_content":dom_content,
}
msgtype = "COMMENTPAGEUPDATEDESC"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateMainAreaAPIHandler(WebRequest):
# @tornado.gen.coroutine
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_content = self.get_argument("dom_content",None)
w = int(json_decode(dom_content).get("text").get("w","1024"))
h = int(json_decode(dom_content).get("text").get("h","0"))
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
old_main_area = block.get("main_area",{})
if w == int(old_main_area.get("w","1024")) and h == int(old_main_area.get("h","0")):
self.finish({"info":"ok","about":"same main_area"})
return
block["updatetime"]=updatetime
block["main_area"]={
"w":w,
"h":h,
}
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"dom_content":dom_content,
}
msgtype = "COMMENTPAGEMAINAREA"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateGridGraphAPIHandler(WebRequest):
# @tornado.gen.coroutine
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_content = self.get_argument("dom_content",None)
w = int(json_decode(dom_content).get("text").get("w","30"))
h = int(json_decode(dom_content).get("text").get("h","30"))
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
old_grid_graph = block.get("grid_graph",{})
if w == int(old_grid_graph.get("w","30")) and h == int(old_grid_graph.get("h","30")):
self.finish({"info":"ok","about":"same grid graph"})
return
block["updatetime"]=updatetime
block["grid_graph"]={
"w":w,
"h":h,
}
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"dom_content":dom_content,
}
msgtype = "COMMENTPAGEGRIDGRAPH"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateDomsAPIHandler(WebRequest):
@tornado.gen.coroutine
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
updates = self.get_argument("updates","[]")
if updates == "[]":
self.finish({"info":"error","about":"no update"})
return
dom_content = self.get_argument("dom_content",None)
updates = json_decode(updates)
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
doms = block.get("doms",[])
for dom in doms:
for update in updates:
if dom[0] == update["dom_id"]:
dom[2]["x"]=update["x"]
dom[2]["y"]=update["y"]
block["doms"]=doms
block["updatetime"]=updatetime
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"updates":updates,
"dom_content":{
"uuid":dom_content
},
}
msgtype = "COMMENTPAGEUPDATEDOMS"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateDomAPIHandler(WebRequest):
@tornado.gen.coroutine
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_owner = self.get_argument("dom_owner",None)
dom_current = self.get_argument("dom_current",None)
dom_position_x = int(float(self.get_argument("dom_position_x","0")))
dom_position_y = int(float(self.get_argument("dom_position_y","0")))
dom_position_w = int(float(self.get_argument("dom_position_w","0")))
dom_position_h = int(float(self.get_argument("dom_position_h","0")))
dom_position_z = int(float(self.get_argument("dom_position_z","0")))
dom_scroll = self.get_argument("dom_scroll","")
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
# if block.get("owner",None) != user_id:
self.finish({"info":"error","about":"no in editors"})
return
dom_tree = dom_current.split("_")
dom_aim = None
doms = block.get("doms",[])
_doms = doms
for dom in dom_tree:
_doms = _doms
for _dom in _doms:
if dom == _dom[0]:
if dom_owner == _dom[0]:
dom_aim = _dom
dom_aim[2]={
"x":dom_position_x,
"y":dom_position_y,
"w":dom_position_w,
"h":dom_position_h,
"z":dom_position_z,
"s":dom_scroll,
}
dom_aim[6]=updatetime
break
_doms = _dom[5]
break
block["doms"]=doms
block["updatetime"]=updatetime
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"dom_current":dom_current,
"dom_position_x":dom_position_x,
"dom_position_y":dom_position_y,
"dom_position_w":dom_position_w,
"dom_position_h":dom_position_h,
"dom_position_z":dom_position_z,
"dom_scroll":dom_scroll,
}
msgtype = "COMMENTPAGEUPDATEDOM"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateDomVideoAPIHandler(WebRequest):
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_owner = self.get_argument("dom_owner",None)
dom_current = self.get_argument("dom_current",None)
dom_content = self.get_argument("dom_content",None)
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
self.finish({"info":"error","about":"no in editors"})
return
dom_tree = dom_current.split("_")
dom_aim = None
doms = block.get("doms",[])
_doms = doms
for dom in dom_tree:
_doms = _doms
for _dom in _doms:
if dom == _dom[0]:
if dom_owner == _dom[0]:
dom_aim = _dom
dom_aim[3] = json_decode(dom_content).get("text",{})
dom_aim[6] =updatetime
break
_doms = _dom[5]
break
block["doms"]=doms
block["updatetime"]=updatetime
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"dom_current":dom_current,
"dom_content":dom_content,
}
msgtype = "COMMENTPAGEUPDATEDOMVIDEO"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateDomContentAPIHandler(WebRequest):
# @tornado.gen.coroutine
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_owner = self.get_argument("dom_owner",None)
dom_current = self.get_argument("dom_current",None)
dom_content = self.get_argument("dom_content",None)
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
self.finish({"info":"error","about":"no in editors"})
return
dom_tree = dom_current.split("_")
dom_aim = None
doms = block.get("doms",[])
update_aim_flag = False
_doms = doms
for dom in dom_tree:
_doms = _doms
for _dom in _doms:
if dom == _dom[0]:
if dom_owner == _dom[0]:
dom_aim = _dom
dom_aim_3_text = json_decode(dom_content).get("text","")
if dom_aim[3] != dom_aim_3_text:
dom_aim[3] = dom_aim_3_text
update_aim_flag = True
dom_aim[6] =updatetime
break
_doms = _dom[5]
break
if not update_aim_flag:
self.finish({"info":"error","about":"same content"})
return
block["doms"]=doms
block["updatetime"]=updatetime
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"dom_current":dom_current,
"dom_content":dom_content,
}
msgtype = "COMMENTPAGEUPDATEDOMCONTENT"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
class PageUpdateDomIframeAPIHandler(WebRequest):
def post(self):
if not self.current_user:
self.finish({"info":"error","about":"no login"})
return
user_id = self.current_user["id"]
block_id = self.get_argument("block_id",None)
dom_owner = self.get_argument("dom_owner",None)
dom_current = self.get_argument("dom_current",None)
dom_content = self.get_argument("dom_content",None)
updatetime = int(time.time())
block = get_aim(block_id)
#判断是否为编辑者
if user_id not in block.get("editors",[block.get("owner",None)]):
self.finish({"info":"error","about":"no in editors"})
return
dom_tree = dom_current.split("_")
dom_aim = None
doms = block.get("doms",[])
_doms = doms
for dom in dom_tree:
_doms = _doms
for _dom in _doms:
if dom == _dom[0]:
if dom_owner == _dom[0]:
dom_aim = _dom
dom_aim[3] = json_decode(dom_content).get("text",{})
dom_aim[6] =updatetime
break
_doms = _dom[5]
break
block["doms"]=doms
block["updatetime"]=updatetime
update_aim(block_id,block)
self.finish({"info":"ok"})
content_data = {
"dom_current":dom_current,
"dom_content":dom_content,
}
msgtype = "COMMENTPAGEUPDATEDOMIFRAME"
msg = [msgtype, {
"content": content_data,
"nickname": "",
"headimgurl": "/static/img/oflogo.png",
"tel": "",
"time": updatetime,
"user_id": user_id,
"sequence": "",
"comment_id": ""
}, block_id]
DataWebSocket.send_to_all(json_encode(msg))
| 37.866942
| 161
| 0.548266
|
ea98cd08f9d16610dfb17ea20eb450103c5856a4
| 4,215
|
py
|
Python
|
gestaoemp/lib/python3.6/site-packages/pysignfe/nfe/manual_600/conssitnfe_310.py
|
jasonrosa92/gestaoempresa
|
c942c7cb5ff3bd299da94df11eab49b7dd0c7d71
|
[
"MIT"
] | null | null | null |
gestaoemp/lib/python3.6/site-packages/pysignfe/nfe/manual_600/conssitnfe_310.py
|
jasonrosa92/gestaoempresa
|
c942c7cb5ff3bd299da94df11eab49b7dd0c7d71
|
[
"MIT"
] | null | null | null |
gestaoemp/lib/python3.6/site-packages/pysignfe/nfe/manual_600/conssitnfe_310.py
|
jasonrosa92/gestaoempresa
|
c942c7cb5ff3bd299da94df11eab49b7dd0c7d71
|
[
"MIT"
] | 1
|
2020-11-04T08:45:57.000Z
|
2020-11-04T08:45:57.000Z
|
# -*- coding: utf-8 -*-
import os
from pysignfe.xml_sped.base import etree
from pysignfe.nfe.manual_500 import conssitnfe_310
from pysignfe.xml_sped import *
from pysignfe.nfe.manual_600 import ESQUEMA_ATUAL, ProtNFe_310, RetCancNFe_310
from .cancnfe_evento import ProcEventoNFeCancNFe
from .carta_correcao import ProcEventoNFeCCe
from .epec_evento import ProcEventoNFeEPEC
from pysignfe.nfe.manifestacao_destinatario import ProcEventoNFeConfRecebimento_100
DIRNAME = os.path.dirname(__file__)
class ConsSitNFe(conssitnfe_310.ConsSitNFe):
def __init__(self):
super(ConsSitNFe, self).__init__()
self.caminho_esquema = os.path.join(DIRNAME, u'schema', ESQUEMA_ATUAL + u'/')
self.arquivo_esquema = u'consSitNFe_v3.10.xsd'
class RetConsSitNFe(conssitnfe_310.RetConsSitNFe):
def __init__(self):
super(RetConsSitNFe, self).__init__()
self.versao = TagDecimal(nome=u'retConsSitNFe', codigo=u'ER01', propriedade=u'versao', namespace=NAMESPACE_NFE, valor=u'3.10', raiz=u'/')
self.dhRecbto = TagDataHoraUTC(nome=u'dhRecbto' , codigo=u'ER07a', raiz=u'//retConsSitNFe')
self.protNFe = None
self.retCancNFe = None
self.procEventoNFe = []
self.caminho_esquema = os.path.join(DIRNAME, u'schema', ESQUEMA_ATUAL + u'/')
self.arquivo_esquema = u'retConsSitNFe_v3.10.xsd'
def get_xml(self):
xml = XMLNFe.get_xml(self)
xml += ABERTURA
xml += self.versao.xml
xml += self.tpAmb.xml
xml += self.verAplic.xml
xml += self.cStat.xml
xml += self.xMotivo.xml
xml += self.cUF.xml
xml += self.dhRecbto.xml
xml += self.chNFe.xml
if self.protNFe is not None:
xml += self.protNFe.xml
if self.retCancNFe is not None:
xml += tira_abertura(self.retCancNFe.xml)
#if self.procEventoNFe is not None:
if len(self.procEventoNFe):
for ev in self.procEventoNFe:
xml += tira_abertura(ev.xml)
xml += u'</retConsSitNFe>'
return xml
def set_xml(self, arquivo):
if self._le_xml(arquivo):
self.versao.xml = arquivo
self.tpAmb.xml = arquivo
self.verAplic.xml = arquivo
self.cStat.xml = arquivo
self.xMotivo.xml = arquivo
self.cUF.xml = arquivo
self.dhRecbto.xml = arquivo
self.chNFe.xml = arquivo
if self._le_noh(u'//retConsSitNFe/protNFe') is not None:
self.protNFe = ProtNFe_310()
self.protNFe.xml = arquivo
if self._le_noh(u'//retConsSitNFe/retCancNFe') is not None:
self.retCancNFe = RetCancNFe_310()
self.retCancNFe.xml = arquivo
#if self._le_nohs('//retConsSitNFe/procEventoNFe') is not None:
# self.procEventoNFe = self.le_grupo('//retConsSitNFe/procEventoNFe', procEventoNFe)
if self._le_nohs('//retConsSitNFe/procEventoNFe') is not None:
for p in self._le_nohs('//retConsSitNFe/procEventoNFe'):
desc = p.xpath('//nfe:detEvento/nfe:descEvento/text()', namespaces={'nfe':NAMESPACE_NFE})
pev = None
if 'Cancelamento' in desc:
pev = ProcEventoNFeCancNFe()
#pev.xml = etree.tostring(p).decode('utf-8').strip('\n')
pev.xml = etree.tounicode(p)
elif 'Correcao' in desc or 'Correção' in desc:
pev = ProcEventoNFeCCe()
pev.xml = etree.tounicode(p)
elif 'Operacao' in desc:
pev = ProcEventoNFeConfRecebimento_100()
pev.xml = etree.tounicode(p)
elif 'EPEC' in desc:
pev = ProcEventoNFeEPEC()
pev.xml = etree.tounicode(p)
if pev:
self.procEventoNFe.append(pev)
xml = property(get_xml, set_xml)
| 40.528846
| 149
| 0.574614
|
f2097ef907b1b788a02dce9a375ad99f3351c779
| 4,981
|
py
|
Python
|
tensorflow_probability/python/internal/samplers_test.py
|
NeelGhoshal/probability
|
45ed841e3cff6cdc7cd1b2d96dd874d9070318f7
|
[
"Apache-2.0"
] | 2
|
2019-10-30T04:45:07.000Z
|
2019-10-30T04:45:08.000Z
|
tensorflow_probability/python/internal/samplers_test.py
|
gregorystrubel/probability
|
df96f3d56eff92c6b06fbac68dc58e095e28fed6
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/internal/samplers_test.py
|
gregorystrubel/probability
|
df96f3d56eff92c6b06fbac68dc58e095e28fed6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for TFP-internal random samplers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import samplers
from tensorflow_probability.python.internal import test_util
@test_util.test_all_tf_execution_regimes
class RandomTest(test_util.TestCase):
@test_util.substrate_disable_stateful_random_test
def test_sanitize_int(self):
seed1 = samplers.sanitize_seed(seed=123)
seed2 = samplers.sanitize_seed(seed=123)
if tf.executing_eagerly():
self.assertNotAllEqual(seed1, seed2)
else:
self.assertAllEqual(seed1, seed2)
@test_util.substrate_disable_stateful_random_test
def test_sanitize_then_split_equivalent_split_int(self):
seed = test_util.test_seed()
sanitized = samplers.sanitize_seed(seed, salt='please pass the')
s1 = samplers.split_seed(sanitized, n=3)
if tf.executing_eagerly():
tf.random.set_seed(seed)
s2 = samplers.split_seed(seed, n=3, salt='please pass the')
self.assertAllAssertsNested(self.assertAllEqual, s1, s2)
@test_util.substrate_disable_stateful_random_test
def test_sanitize_none(self):
seed1 = samplers.sanitize_seed(seed=None)
seed2 = samplers.sanitize_seed(seed=None)
self.assertNotAllEqual(seed1, seed2)
def test_sanitize_tensor_or_tensorlike(self):
seed = test_util.test_seed(sampler_type='stateless')
seed1 = samplers.sanitize_seed(seed=self.evaluate(seed))
seed2 = samplers.sanitize_seed(seed)
self.assertAllEqual(seed1, seed2)
def test_split(self):
seed = test_util.test_seed(sampler_type='stateless')
seed1, seed2 = samplers.split_seed(seed)
seed3, seed4 = samplers.split_seed(seed)
self.assertNotAllEqual(seed, seed1)
self.assertNotAllEqual(seed, seed2)
self.assertNotAllEqual(seed1, seed2)
self.assertAllEqual(self.evaluate([seed1, seed2]),
self.evaluate([seed3, seed4]))
def test_salted_split(self):
seed = test_util.test_seed(sampler_type='stateless')
seed1, seed2 = samplers.split_seed(seed, salt='normal')
seed3, seed4 = samplers.split_seed(seed, salt='lognormal')
self.assertNotAllEqual(seed, seed1)
self.assertNotAllEqual(seed, seed2)
self.assertNotAllEqual(seed1, seed2)
self.assertNotAllEqual(seed1, seed3)
self.assertNotAllEqual(seed2, seed4)
self.assertNotAllEqual(seed3, seed4)
@parameterized.named_parameters(
dict(testcase_name='_categorical',
sampler=samplers.categorical,
kwargs=dict(logits=[[1, 1.05, 1]], num_samples=5)),
dict(testcase_name='_gamma',
sampler=samplers.gamma,
kwargs=dict(shape=[2, 3], alpha=[.5, 1, 2.2], beta=0.75)),
dict(testcase_name='_normal',
sampler=samplers.normal,
kwargs=dict(shape=[2])),
dict(testcase_name='_poisson',
sampler=samplers.poisson,
kwargs=dict(shape=[2, 3], lam=[1.5, 5.5, 8.5])),
dict(testcase_name='_poisson_scalar',
sampler=samplers.poisson,
kwargs=dict(shape=[], lam=[1.5, 5.5, 8.5])),
dict(testcase_name='_shuffle',
sampler=samplers.shuffle,
kwargs=dict(value=list(range(10)))),
dict(testcase_name='_uniform',
sampler=samplers.uniform,
kwargs=dict(shape=[2])))
def test_sampler(self, sampler, kwargs):
s1 = sampler(seed=(1, 2), **kwargs)
s2 = sampler(seed=(1, 2), **kwargs)
self.assertAllEqual(s1, s2)
self.verify_tf_behavior_match(sampler, kwargs)
@test_util.substrate_disable_stateful_random_test
def verify_tf_behavior_match(self, sampler, kwargs):
# We don't test these scenarios for numpy, jax, where we don't support
# stateful sampling.
s1 = sampler(seed=123, **kwargs)
s2 = sampler(seed=123, **kwargs)
tf_sampler = getattr(tf.random, sampler.__name__)
tf_s1 = tf_sampler(seed=123, **kwargs)
tf_s2 = tf_sampler(seed=123, **kwargs)
if tf.executing_eagerly():
self.assertNotAllEqual(s1, s2)
self.assertNotAllEqual(tf_s1, tf_s2)
else:
self.assertAllEqual(s1, s2)
self.assertAllEqual(tf_s1, tf_s2)
if __name__ == '__main__':
test_util.main()
| 38.315385
| 78
| 0.702871
|
dc67bb4e131ae41317380993ac8a6bb6990edbca
| 3,669
|
py
|
Python
|
libqtile/widget/keyboardkbdd.py
|
JonBoyleCoding/qtile
|
e71fae802da55ebedb49ec04abb94b65b3ce0136
|
[
"MIT"
] | 2
|
2015-01-03T02:25:19.000Z
|
2015-02-07T19:16:06.000Z
|
libqtile/widget/keyboardkbdd.py
|
JonBoyleCoding/qtile
|
e71fae802da55ebedb49ec04abb94b65b3ce0136
|
[
"MIT"
] | 1
|
2020-04-15T20:20:42.000Z
|
2020-04-15T20:20:42.000Z
|
libqtile/widget/keyboardkbdd.py
|
JonBoyleCoding/qtile
|
e71fae802da55ebedb49ec04abb94b65b3ce0136
|
[
"MIT"
] | 1
|
2015-01-22T15:53:01.000Z
|
2015-01-22T15:53:01.000Z
|
# Copyright (c) 2015 Ali Mousavi
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import re
import dbus
from dbus.mainloop.glib import DBusGMainLoop
from libqtile.log_utils import logger
from libqtile.widget import base
class KeyboardKbdd(base.ThreadedPollText):
"""Widget for changing keyboard layouts per window, using kbdd
kbdd should be installed and running, you can get it from:
https://github.com/qnikst/kbdd
"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
("update_interval", 1, "Update interval in seconds."),
("configured_keyboards", ["us", "ir"],
"your predefined list of keyboard layouts."
"example: ['us', 'ir', 'es']"),
("colours", None,
"foreground colour for each layout"
"either 'None' or a list of colours."
"example: ['ffffff', 'E6F0AF']. ")
]
def __init__(self, **config):
base.ThreadedPollText.__init__(self, **config)
self.add_defaults(KeyboardKbdd.defaults)
self.keyboard = self.configured_keyboards[0]
self.is_kbdd_running = self._check_kbdd()
if not self.is_kbdd_running:
logger.error('Please check if kbdd is running')
self.keyboard = "N/A"
self._dbus_init()
def _check_kbdd(self):
running_list = self.call_process(["ps", "axw"])
if re.search("kbdd", running_list):
self.keyboard = self.configured_keyboards[0]
return True
return False
def _dbus_init(self):
dbus_loop = DBusGMainLoop()
bus = dbus.SessionBus(mainloop=dbus_loop)
bus.add_signal_receiver(self._layout_changed,
dbus_interface='ru.gentoo.kbdd',
signal_name='layoutChanged')
def _layout_changed(self, layout_changed):
"""
Handler for "layoutChanged" dbus signal.
"""
if self.colours:
self._set_colour(layout_changed)
self.keyboard = self.configured_keyboards[layout_changed]
def _set_colour(self, index):
if isinstance(self.colours, list):
try:
self.layout.colour = self.colours[index]
except ValueError:
self._setColour(index - 1)
else:
logger.error('variable "colours" should be a list, to set a\
colour for all layouts, use "foreground".')
def poll(self):
if not self.is_kbdd_running:
if self._check_kbdd():
self.is_kbdd_running = True
return self.configured_keyboards[0]
return self.keyboard
| 37.824742
| 79
| 0.65631
|
67b565ba773a108ffd64e608171589df56d73877
| 2,913
|
py
|
Python
|
stripe/api_resources/customer.py
|
illia-v/stripe-python
|
6b6620a3b16063c92db272d728714659c0740f85
|
[
"MIT"
] | null | null | null |
stripe/api_resources/customer.py
|
illia-v/stripe-python
|
6b6620a3b16063c92db272d728714659c0740f85
|
[
"MIT"
] | null | null | null |
stripe/api_resources/customer.py
|
illia-v/stripe-python
|
6b6620a3b16063c92db272d728714659c0740f85
|
[
"MIT"
] | null | null | null |
# File generated from our OpenAPI spec
from __future__ import absolute_import, division, print_function
from stripe import api_requestor
from stripe import util
from stripe.api_resources.abstract import CreateableAPIResource
from stripe.api_resources.abstract import DeletableAPIResource
from stripe.api_resources.abstract import ListableAPIResource
from stripe.api_resources.abstract import SearchableAPIResource
from stripe.api_resources.abstract import UpdateableAPIResource
from stripe.api_resources.abstract import custom_method
from stripe.api_resources.abstract import nested_resource_class_methods
@custom_method("delete_discount", http_verb="delete", http_path="discount")
@custom_method(
"create_funding_instructions",
http_verb="post",
http_path="funding_instructions",
)
@custom_method(
"list_payment_methods",
http_verb="get",
http_path="payment_methods",
)
@nested_resource_class_methods(
"cash_balance",
operations=["retrieve", "update"],
resource_plural="cash_balance",
)
@nested_resource_class_methods(
"balance_transaction",
operations=["create", "retrieve", "update", "list"],
)
@nested_resource_class_methods(
"source",
operations=["create", "retrieve", "update", "delete", "list"],
)
@nested_resource_class_methods(
"tax_id",
operations=["create", "retrieve", "delete", "list"],
)
class Customer(
CreateableAPIResource,
DeletableAPIResource,
ListableAPIResource,
SearchableAPIResource,
UpdateableAPIResource,
):
OBJECT_NAME = "customer"
def create_funding_instructions(self, idempotency_key=None, **params):
url = self.instance_url() + "/funding_instructions"
headers = util.populate_headers(idempotency_key)
resp = self.request("post", url, params, headers)
stripe_object = util.convert_to_stripe_object(resp)
return stripe_object
def list_payment_methods(self, idempotency_key=None, **params):
url = self.instance_url() + "/payment_methods"
headers = util.populate_headers(idempotency_key)
resp = self.request("get", url, params, headers)
stripe_object = util.convert_to_stripe_object(resp)
stripe_object._retrieve_params = params
return stripe_object
@classmethod
def search(cls, *args, **kwargs):
return cls._search(search_url="/v1/customers/search", *args, **kwargs)
@classmethod
def search_auto_paging_iter(cls, *args, **kwargs):
return cls.search(*args, **kwargs).auto_paging_iter()
def delete_discount(self, **params):
requestor = api_requestor.APIRequestor(
self.api_key,
api_version=self.stripe_version,
account=self.stripe_account,
)
url = self.instance_url() + "/discount"
_, api_key = requestor.request("delete", url, params)
self.refresh_from({"discount": None}, api_key, True)
| 34.678571
| 78
| 0.725026
|
a5d816e67c36d1fba669772b1c933d5bc007e0e6
| 15,808
|
py
|
Python
|
ross/core/countries.py
|
rossplt/ross-django-utils
|
27c8fa50b4cdd0324d5e87c46a05c4946523a923
|
[
"MIT"
] | null | null | null |
ross/core/countries.py
|
rossplt/ross-django-utils
|
27c8fa50b4cdd0324d5e87c46a05c4946523a923
|
[
"MIT"
] | null | null | null |
ross/core/countries.py
|
rossplt/ross-django-utils
|
27c8fa50b4cdd0324d5e87c46a05c4946523a923
|
[
"MIT"
] | null | null | null |
"""
This file holds constants for various country-related information
To load these constants, import them as usual, e.g.
from ross.core.countries import COUNTRIES
"""
COUNTRIES = (
('AF', u'Afghanistan'),
('AX', u'\xc5land Islands'),
('AL', u'Albania'),
('DZ', u'Algeria'),
('AS', u'American Samoa'),
('AD', u'Andorra'),
('AO', u'Angola'),
('AI', u'Anguilla'),
('AQ', u'Antarctica'),
('AG', u'Antigua and Barbuda'),
('AR', u'Argentina'),
('AM', u'Armenia'),
('AW', u'Aruba'),
('AU', u'Australia'),
('AT', u'Austria'),
('AZ', u'Azerbaijan'),
('BS', u'Bahamas'),
('BH', u'Bahrain'),
('BD', u'Bangladesh'),
('BB', u'Barbados'),
('BY', u'Belarus'),
('BE', u'Belgium'),
('BZ', u'Belize'),
('BJ', u'Benin'),
('BM', u'Bermuda'),
('BT', u'Bhutan'),
('BO', u'Bolivia'),
('BQ', u'Bonaire'),
('BA', u'Bosnia and Herzegovina'),
('BW', u'Botswana'),
('BV', u'Bouvet Island'),
('BR', u'Brazil'),
('IO', u'British Indian Ocean Territory'),
('BN', u'Brunei Darussalam'),
('BG', u'Bulgaria'),
('BF', u'Burkina Faso'),
('BI', u'Burundi'),
('KH', u'Cambodia'),
('CM', u'Cameroon'),
('CA', u'Canada'),
('CV', u'Cape Verde'),
('KY', u'Cayman Islands'),
('CF', u'Central African Republic'),
('TD', u'Chad'),
('CL', u'Chile'),
('CN', u'China'),
('CX', u'Christmas Island'),
('CC', u'Cocos (Keeling) Islands'),
('CO', u'Colombia'),
('KM', u'Comoros'),
('CG', u'Congo'),
('CD', u'Congo, The Democratic Republic of the'),
('CK', u'Cook Islands'),
('CR', u'Costa Rica'),
('CI', u"C\xf4te D'ivoire"),
('HR', u'Croatia'),
('CU', u'Cuba'),
('CW', u'Cura\xe7ao'),
('CY', u'Cyprus'),
('CZ', u'Czech Republic'),
('DK', u'Denmark'),
('DJ', u'Djibouti'),
('DM', u'Dominica'),
('DO', u'Dominican Republic'),
('EC', u'Ecuador'),
('EG', u'Egypt'),
('SV', u'El Salvador'),
('GQ', u'Equatorial Guinea'),
('ER', u'Eritrea'),
('EE', u'Estonia'),
('ET', u'Ethiopia'),
('FK', u'Falkland Islands (Malvinas)'),
('FO', u'Faroe Islands'),
('FJ', u'Fiji'),
('FI', u'Finland'),
('FR', u'France'),
('GF', u'French Guiana'),
('PF', u'French Polynesia'),
('TF', u'French Southern Territories'),
('GA', u'Gabon'),
('GM', u'Gambia'),
('GE', u'Georgia'),
('DE', u'Germany'),
('GH', u'Ghana'),
('GI', u'Gibraltar'),
('GR', u'Greece'),
('GL', u'Greenland'),
('GD', u'Grenada'),
('GP', u'Guadeloupe'),
('GU', u'Guam'),
('GT', u'Guatemala'),
('GG', u'Guernsey'),
('GN', u'Guinea'),
('GW', u'Guinea-bissau'),
('GY', u'Guyana'),
('HT', u'Haiti'),
('HM', u'Heard Island and McDonald Islands'),
('VA', u'Holy See (Vatican City State)'),
('HN', u'Honduras'),
('HK', u'Hong Kong'),
('HU', u'Hungary'),
('IS', u'Iceland'),
('IN', u'India'),
('ID', u'Indonesia'),
('IR', u'Iran'),
('IQ', u'Iraq'),
('IE', u'Ireland'),
('IM', u'Isle of Man'),
('IL', u'Israel'),
('IT', u'Italy'),
('JM', u'Jamaica'),
('JP', u'Japan'),
('JE', u'Jersey'),
('JO', u'Jordan'),
('KZ', u'Kazakhstan'),
('KE', u'Kenya'),
('KI', u'Kiribati'),
('KP', u"Korea, Democratic People's Republic of"),
('KR', u'Korea, Republic of'),
('KW', u'Kuwait'),
('KG', u'Kyrgyzstan'),
('LA', u"Lao People's Democratic Republic"),
('LV', u'Latvia'),
('LB', u'Lebanon'),
('LS', u'Lesotho'),
('LR', u'Liberia'),
('LY', u'Libya'),
('LI', u'Liechtenstein'),
('LT', u'Lithuania'),
('LU', u'Luxembourg'),
('MO', u'Macao'),
('MK', u'Macedonia'),
('MG', u'Madagascar'),
('MW', u'Malawi'),
('MY', u'Malaysia'),
('MV', u'Maldives'),
('ML', u'Mali'),
('MT', u'Malta'),
('MH', u'Marshall Islands'),
('MQ', u'Martinique'),
('MR', u'Mauritania'),
('MU', u'Mauritius'),
('YT', u'Mayotte'),
('MX', u'Mexico'),
('FM', u'Micronesia'),
('MD', u'Moldova'),
('MC', u'Monaco'),
('MN', u'Mongolia'),
('ME', u'Montenegro'),
('MS', u'Montserrat'),
('MA', u'Morocco'),
('MZ', u'Mozambique'),
('MM', u'Myanmar'),
('NA', u'Namibia'),
('NR', u'Nauru'),
('NP', u'Nepal'),
('NL', u'Netherlands'),
('NC', u'New Caledonia'),
('NZ', u'New Zealand'),
('NI', u'Nicaragua'),
('NE', u'Niger'),
('NG', u'Nigeria'),
('NU', u'Niue'),
('NF', u'Norfolk Island'),
('MP', u'Northern Mariana Islands'),
('NO', u'Norway'),
('OM', u'Oman'),
('PK', u'Pakistan'),
('PW', u'Palau'),
('PS', u'Palestinian Territory, Occupied'),
('PA', u'Panama'),
('PG', u'Papua New Guinea'),
('PY', u'Paraguay'),
('PE', u'Peru'),
('PH', u'Philippines'),
('PN', u'Pitcairn'),
('PL', u'Poland'),
('PT', u'Portugal'),
('PR', u'Puerto Rico'),
('QA', u'Qatar'),
('RE', u'R\xe9union'),
('RO', u'Romania'),
('RU', u'Russian Federation'),
('RW', u'Rwanda'),
('BL', u'Saint Barth\xe9lemy'),
('SH', u'Saint Helena, Ascension and Tristan Da Cunha'),
('KN', u'Saint Kitts and Nevis'),
('LC', u'Saint Lucia'),
('MF', u'Saint Martin (French Part)'),
('PM', u'Saint Pierre and Miquelon'),
('VC', u'Saint Vincent and the Grenadines'),
('WS', u'Samoa'),
('SM', u'San Marino'),
('ST', u'Sao Tome and Principe'),
('SA', u'Saudi Arabia'),
('SN', u'Senegal'),
('RS', u'Serbia'),
('SC', u'Seychelles'),
('SL', u'Sierra Leone'),
('SG', u'Singapore'),
('SX', u'Sint Maarten (Dutch Part)'),
('SK', u'Slovakia'),
('SI', u'Slovenia'),
('SB', u'Solomon Islands'),
('SO', u'Somalia'),
('ZA', u'South Africa'),
('GS', u'South Georgia and the South Sandwich Islands'),
('SS', u'South Sudan'),
('ES', u'Spain'),
('LK', u'Sri Lanka'),
('SD', u'Sudan'),
('SR', u'Suriname'),
('SJ', u'Svalbard and Jan Mayen'),
('SZ', u'Swaziland'),
('SE', u'Sweden'),
('CH', u'Switzerland'),
('SY', u'Syrian Arab Republic'),
('TW', u'Taiwan'),
('TJ', u'Tajikistan'),
('TZ', u'Tanzania'),
('TH', u'Thailand'),
('TL', u'Timor-leste'),
('TG', u'Togo'),
('TK', u'Tokelau'),
('TO', u'Tonga'),
('TT', u'Trinidad and Tobago'),
('TN', u'Tunisia'),
('TR', u'Turkey'),
('TM', u'Turkmenistan'),
('TC', u'Turks and Caicos Islands'),
('TV', u'Tuvalu'),
('UG', u'Uganda'),
('UA', u'Ukraine'),
('AE', u'United Arab Emirates'),
('GB', u'United Kingdom'),
('US', u'United States'),
('UM', u'United States Minor Outlying Islands'),
('UY', u'Uruguay'),
('UZ', u'Uzbekistan'),
('VU', u'Vanuatu'),
('VE', u'Venezuela'),
('VN', u'Viet Nam'),
('VG', u'Virgin Islands, British'),
('VI', u'Virgin Islands, U.S.'),
('WF', u'Wallis and Futuna'),
('EH', u'Western Sahara'),
('YE', u'Yemen'),
('ZM', u'Zambia'),
('ZW', u'Zimbabwe'),
)
NATIONALITY = (
('AD', u'Andorian'),
('AE', u'Emirian'),
('AF', u'Afghani'),
('AI', u'Anguillan'),
('AM', u'Armenian'),
('AO', u'Angolian'),
('AQ', u'Antarctic'),
('AR', u'Argentine'),
('AS', u'Austrian'),
('AU', u'Australian'),
('AW', u'Arubian'),
('BA', u'Bangladeshi'),
('BB', u'Barbadian'),
('BE', u'Belgian'),
('BH', u'Bahrainian'),
('BM', u'Bermuda'),
('BO', u'Bolivian'),
('BR', u'Brazilian'),
('BS', u'Bahameese'),
('BT', u'Bhutanese'),
('BU', u'Bulgarian'),
('BY', u'Belarusian'),
('BZ', u'Belizean'),
('CA', u'Canadian'),
('CG', u'Congolese'),
('CH', u'Chinese'),
('CH', u'Swiss'),
('CL', u'Chilean'),
('CM', u'Cambodian'),
('CM', u'Cameroonian'),
('CO', u'Columbian'),
('CR', u'Czech'),
('CR', u'Rican'),
('CU', u'Cuban'),
('CY', u'Cypriot'),
('DE', u'German'),
('DK', u'Danish'),
('DM', u'Dominican'),
('EC', u'Ecuadorean'),
('EE', u'Estonian'),
('EG', u'Egyptian'),
('ET', u'Ethiopian'),
('FI', u'Finnish'),
('FJ', u'Fijian'),
('FR', u'French'),
('GB', u'British'),
('GE', u'Georgian'),
('GH', u'Ghanaian'),
('GN', u'Guinean'),
('GR', u'Greek'),
('GY', u'Guyanese'),
('HK', u'Chinese'),
('HR', u'Croatian'),
('HU', u'Hungarian'),
('ID', u'Indonesian'),
('IE', u'Irish'),
('IN', u'Indian'),
('IQ', u'Iraqi'),
('IR', u'Iranian'),
('IS', u'Israeli'),
('IS', u'Icelander'),
('IT', u'Italian'),
('JM', u'Jamaican'),
('JO', u'Jordanian'),
('JP', u'Japanese'),
('KE', u'Kenyan'),
('KO', u'Korean'),
('KW', u'Kuwaiti'),
('KZ', u'Kazakhstani'),
('KZ', u'Kazakhstani'),
('LB', u'Lebanese'),
('LK', u'Lankan'),
('LT', u'Lithunian'),
('LU', u'Luxembourger'),
('MA', u'Moroccan'),
('MC', u'Monacan'),
('ME', u'Mexican'),
('MM', u'Mayanmarese'),
('MN', u'Mongolian'),
('MO', u'Macau'),
('MU', u'Mauritian'),
('MV', u'Maldivan'),
('MY', u'Malaysian'),
('NA', u'Namibian'),
('NG', u'Nigerian'),
('NL', u'Dutch'),
('NO', u'Norwegian'),
('NP', u'Nepalese'),
('NZ', u'Zealander'),
('OM', u'Omani'),
('PA', u'Panamanian'),
('PE', u'Peruvian'),
('PH', u'Filipino'),
('PK', u'Pakistani'),
('PO', u'Polish'),
('PT', u'Portugees'),
('PY', u'Paraguayan'),
('QA', u'Qatari'),
('RO', u'Romanian'),
('RU', u'Russian'),
('SA', u'Arabian'),
('SC', u'Seychellois'),
('SE', u'Swedish'),
('SG', u'Singaporean'),
('SK', u'Slovakian'),
('SN', u'Senegalese'),
('SO', u'Somali'),
('SP', u'Spanish'),
('TH', u'Thai'),
('TN', u'Tunisian'),
('TR', u'Turkish'),
('TW', u'Taiwanese'),
('TZ', u'Tanzanian'),
('UA', u'Ukrainian'),
('UG', u'Ugandan'),
('US', u'American'),
('UY', u'Uruguayan'),
('UZ', u'Uzbekistani'),
('VE', u'Venezuelan'),
('VN', u'Vietnamese'),
('YE', u'Yemeni'),
('ZA', u'African'),
('ZM', u'Zambian'),
('ZW', u'Zimbabwean'),
)
CURRENCY_ABBREVIATIONS = (
('AED', u'United Arab Emirates Dirham'),
('AFN', u'Afghanistan Afghani'),
('ALL', u'Albania Lek'),
('AMD', u'Armenia Dram'),
('ANG', u'Netherlands Antilles Guilder'),
('AOA', u'Angola Kwanza'),
('ARS', u'Argentina Peso'),
('AUD', u'Australia Dollar'),
('AWG', u'Aruba Guilder'),
('AZN', u'Azerbaijan New Manat'),
('BAM', u'Bosnia and Herzegovina Convertible Marka'),
('BBD', u'Barbados Dollar'),
('BDT', u'Bangladesh Taka'),
('BGN', u'Bulgaria Lev'),
('BHD', u'Bahrain Dinar'),
('BIF', u'Burundi Franc'),
('BMD', u'Bermuda Dollar'),
('BND', u'Brunei Darussalam Dollar'),
('BOB', u'Bolivia Boliviano'),
('BRL', u'Brazil Real'),
('BSD', u'Bahamas Dollar'),
('BTN', u'Bhutan Ngultrum'),
('BWP', u'Botswana Pula'),
('BYR', u'Belarus Ruble'),
('BZD', u'Belize Dollar'),
('CAD', u'Canada Dollar'),
('CDF', u'Congo/Kinshasa Franc'),
('CHF', u'Switzerland Franc'),
('CLP', u'Chile Peso'),
('CNY', u'China Yuan Renminbi'),
('COP', u'Colombia Peso'),
('CRC', u'Costa Rica Colon'),
('CUC', u'Cuba Convertible Peso'),
('CUP', u'Cuba Peso'),
('CVE', u'Cape Verde Escudo'),
('CZK', u'Czech Republic Koruna'),
('DJF', u'Djibouti Franc'),
('DKK', u'Denmark Krone'),
('DOP', u'Dominican Republic Peso'),
('DZD', u'Algeria Dinar'),
('EGP', u'Egypt Pound'),
('ERN', u'Eritrea Nakfa'),
('ETB', u'Ethiopia Birr'),
('EUR', u'Euro Member Countries'),
('FJD', u'Fiji Dollar'),
('FKP', u'Falkland Islands (Malvinas) Pound'),
('GBP', u'United Kingdom Pound'),
('GEL', u'Georgia Lari'),
('GGP', u'Guernsey Pound'),
('GHS', u'Ghana Cedi'),
('GIP', u'Gibraltar Pound'),
('GMD', u'Gambia Dalasi'),
('GNF', u'Guinea Franc'),
('GTQ', u'Guatemala Quetzal'),
('GYD', u'Guyana Dollar'),
('HKD', u'Hong Kong Dollar'),
('HNL', u'Honduras Lempira'),
('HRK', u'Croatia Kuna'),
('HTG', u'Haiti Gourde'),
('HUF', u'Hungary Forint'),
('IDR', u'Indonesia Rupiah'),
('ILS', u'Israel Shekel'),
('IMP', u'Isle of Man Pound'),
('INR', u'India Rupee'),
('IQD', u'Iraq Dinar'),
('IRR', u'Iran Rial'),
('ISK', u'Iceland Krona'),
('JEP', u'Jersey Pound'),
('JMD', u'Jamaica Dollar'),
('JOD', u'Jordan Dinar'),
('JPY', u'Japan Yen'),
('KES', u'Kenya Shilling'),
('KGS', u'Kyrgyzstan Som'),
('KHR', u'Cambodia Riel'),
('KMF', u'Comoros Franc'),
('KPW', u'Korea (North) Won'),
('KRW', u'Korea (South) Won'),
('KWD', u'Kuwait Dinar'),
('KYD', u'Cayman Islands Dollar'),
('KZT', u'Kazakhstan Tenge'),
('LAK', u'Laos Kip'),
('LBP', u'Lebanon Pound'),
('LKR', u'Sri Lanka Rupee'),
('LRD', u'Liberia Dollar'),
('LSL', u'Lesotho Loti'),
('LTL', u'Lithuania Litas'),
('LVL', u'Latvia Lat'),
('LYD', u'Libya Dinar'),
('MAD', u'Morocco Dirham'),
('MDL', u'Moldova Leu'),
('MGA', u'Madagascar Ariary'),
('MKD', u'Macedonia Denar'),
('MMK', u'Myanmar (Burma) Kyat'),
('MNT', u'Mongolia Tughrik'),
('MOP', u'Macau Pataca'),
('MRO', u'Mauritania Ouguiya'),
('MUR', u'Mauritius Rupee'),
('MVR', u'Maldives (Maldive Islands) Rufiyaa'),
('MWK', u'Malawi Kwacha'),
('MXN', u'Mexico Peso'),
('MYR', u'Malaysia Ringgit'),
('MZN', u'Mozambique Metical'),
('NAD', u'Namibia Dollar'),
('NGN', u'Nigeria Naira'),
('NIO', u'Nicaragua Cordoba'),
('NOK', u'Norway Krone'),
('NPR', u'Nepal Rupee'),
('NZD', u'New Zealand Dollar'),
('OMR', u'Oman Rial'),
('PAB', u'Panama Balboa'),
('PEN', u'Peru Nuevo Sol'),
('PGK', u'Papua New Guinea Kina'),
('PHP', u'Philippines Peso'),
('PKR', u'Pakistan Rupee'),
('PLN', u'Poland Zloty'),
('PYG', u'Paraguay Guarani'),
('QAR', u'Qatar Riyal'),
('RON', u'Romania New Leu'),
('RSD', u'Serbia Dinar'),
('RUB', u'Russia Ruble'),
('RWF', u'Rwanda Franc'),
('SAR', u'Saudi Arabia Riyal'),
('SBD', u'Solomon Islands Dollar'),
('SCR', u'Seychelles Rupee'),
('SDG', u'Sudan Pound'),
('SEK', u'Sweden Krona'),
('SGD', u'Singapore Dollar'),
('SHP', u'Saint Helena Pound'),
('SLL', u'Sierra Leone Leone'),
('SOS', u'Somalia Shilling'),
('SPL', u'Seborga Luigino'),
('SRD', u'Suriname Dollar'),
('STD', u'Sao Tome and Principe Dobra'),
('SVC', u'El Salvador Colon'),
('SYP', u'Syria Pound'),
('SZL', u'Swaziland Lilangeni'),
('THB', u'Thailand Baht'),
('TJS', u'Tajikistan Somoni'),
('TMT', u'Turkmenistan Manat'),
('TND', u'Tunisia Dinar'),
('TOP', u'Tonga Paanga'),
('TRY', u'Turkey Lira'),
('TTD', u'Trinidad and Tobago Dollar'),
('TVD', u'Tuvalu Dollar'),
('TWD', u'Taiwan New Dollar'),
('TZS', u'Tanzania Shilling'),
('UAH', u'Ukraine Hryvna'),
('UGX', u'Uganda Shilling'),
('USD', u'United States Dollar'),
('UYU', u'Uruguay Peso'),
('UZS', u'Uzbekistan Som'),
('VEF', u'Venezuela Bolivar'),
('VND', u'Viet Nam Dong'),
('VUV', u'Vanuatu Vatu'),
('WST', u'Samoa Tala'),
('XAF', u'Communaute Financiere Africaine (BEAC) CFA Franc BEAC'),
('XCD', u'East Caribbean Dollar'),
('XDR', u'International Monetary Fund (IMF) Special Drawing Rights'),
('XOF', u'Communaute Financiere Africaine (BCEAO) Franc'),
('XPF', u'Comptoirs Francais du Pacifique (CFP) Franc'),
('YER', u'Yemen Rial'),
('ZAR', u'South Africa Rand'),
('ZMW', u'Zambia Kwacha'),
('ZWD', u'Zimbabwe Dollar')
)
| 28.534296
| 73
| 0.493421
|
15b0d68dec03e8a0f995ee573897908b724054e5
| 3,672
|
py
|
Python
|
test.py
|
gallodev/devopslab
|
6eff188dad0785f4969e41df7354cc27485f839f
|
[
"MIT"
] | 2
|
2021-09-23T23:55:49.000Z
|
2021-09-23T23:59:00.000Z
|
test.py
|
gallodev/devopslab
|
6eff188dad0785f4969e41df7354cc27485f839f
|
[
"MIT"
] | null | null | null |
test.py
|
gallodev/devopslab
|
6eff188dad0785f4969e41df7354cc27485f839f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from app import app
import unittest
class Test(unittest.TestCase):
def setUp(self):
# cria uma instância do unittest, precisa do nome "setUp"
self.app = app.test_client()
# envia uma requisicao GET para a URL
self.result = self.app.get('/')
def test_requisicao(self):
# compara o status da requisicao (precisa ser igual a 200)
self.assertEqual(self.result.status_code, 200)
def test_conteudo(self):
# verifica o retorno do conteudo da pagina
self.assertEqual(self.result.data.decode('utf-8'), "Hello World")
if __name__ == "__main__":
print ('INICIANDO OS TESTES')
print('----------------------------------------------------------------------')
unittest.main(verbosity=2)
| 146.88
| 201
| 0.127179
|
84167e03e533b7f5e11fd91ae09712c4559b0c00
| 432
|
py
|
Python
|
handlers/test.py
|
kevinxin90/gene_analysis_api
|
603c9b574c04fd72e3690d6b6609bb274b689481
|
[
"MIT"
] | null | null | null |
handlers/test.py
|
kevinxin90/gene_analysis_api
|
603c9b574c04fd72e3690d6b6609bb274b689481
|
[
"MIT"
] | null | null | null |
handlers/test.py
|
kevinxin90/gene_analysis_api
|
603c9b574c04fd72e3690d6b6609bb274b689481
|
[
"MIT"
] | null | null | null |
import json
from .base import BaseHandler
class TestHandler(BaseHandler):
def get(self):
_input = self.get_query_argument('q', None)
print('input is {}'.format(_input))
if _input:
self.set_status(200)
self.write(json.dumps({'your input': _input}))
self.finish()
else:
self.set_status(400)
self.write(json.dumps({'No input is found'}))
| 28.8
| 58
| 0.578704
|
919f88d68663318130eeb160aec324258fdd81bb
| 2,148
|
py
|
Python
|
pyecvl/support_openslide.py
|
simleo/pyecvl
|
c044dc2ddf9bb69e93ffe06113de9365dc84e168
|
[
"MIT"
] | 2
|
2020-04-29T13:17:15.000Z
|
2021-01-07T19:13:14.000Z
|
pyecvl/support_openslide.py
|
simleo/pyecvl
|
c044dc2ddf9bb69e93ffe06113de9365dc84e168
|
[
"MIT"
] | 19
|
2020-01-16T11:55:07.000Z
|
2022-02-28T11:27:40.000Z
|
pyecvl/support_openslide.py
|
deephealthproject/pyecvl
|
3fb256a77ab6d7ff62219044d54b51d84471db6e
|
[
"MIT"
] | 2
|
2020-01-20T13:47:05.000Z
|
2020-02-27T11:13:32.000Z
|
# Copyright (c) 2019-2021 CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from . import _core
_ecvl = _core.ecvl
if not _ecvl.ECVL_WITH_OPENSLIDE:
raise ImportError("extension module not compiled with OpenSlide support")
__all__ = [
"OpenSlideGetLevels",
"OpenSlideRead",
]
def OpenSlideGetLevels(filename):
"""\
Get the width and height for each level of a whole-slide image.
:param filename: image file name
:return: a list of pairs representing the width and height of each level
"""
return _ecvl.OpenSlideGetLevels(filename)
def OpenSlideRead(filename, level, dims):
"""\
Load a region of a whole-slide image.
Supported formats are those supported by the OpenSlide library.
:param filename: image file name
:param level: image level to extract
:param dims: ``[x, y, w, h]`` list representing the region to extract.
``x`` and ``y`` are the top-left x and y coordinates in the level 0
reference frame. ``w`` and ``h`` are the width and height of the region
:return: an Image object
"""
return _ecvl.OpenSlideRead(filename, level, dims)
| 36.40678
| 79
| 0.735102
|
1fe529fe57e0e61b335716d374d3022d772d66c0
| 7,451
|
py
|
Python
|
src/openprocurement/tender/cfaua/views/tender.py
|
pontostroy/openprocurement.api
|
6651ef29413d155c83f893ee64a611cf75f4daaf
|
[
"Apache-2.0"
] | null | null | null |
src/openprocurement/tender/cfaua/views/tender.py
|
pontostroy/openprocurement.api
|
6651ef29413d155c83f893ee64a611cf75f4daaf
|
[
"Apache-2.0"
] | null | null | null |
src/openprocurement/tender/cfaua/views/tender.py
|
pontostroy/openprocurement.api
|
6651ef29413d155c83f893ee64a611cf75f4daaf
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from zope.component import getAdapter
from openprocurement.api.interfaces import IContentConfigurator
from openprocurement.api.utils import json_view, context_unpack, get_now, raise_operation_error
from openprocurement.tender.core.utils import (
optendersresource,
apply_patch,
save_tender,
calculate_complaint_business_date,
)
from openprocurement.tender.core.validation import (
validate_tender_period_extension,
validate_tender_not_in_terminated_status,
validate_tender_change_status_permission,
)
from openprocurement.tender.belowthreshold.views.tender import TenderResource
from openprocurement.tender.cfaua.utils import check_status, all_bids_are_reviewed, all_awards_are_reviewed
from openprocurement.tender.openua.validation import validate_patch_tender_ua_data
from openprocurement.tender.core.events import TenderInitializeEvent
from openprocurement.tender.cfaua.validation import validate_tender_status_update
@optendersresource(
name="closeFrameworkAgreementUA:Tender",
path="/tenders/{tender_id}",
procurementMethodType="closeFrameworkAgreementUA",
description="Open Contracting compatible data exchange format. "
"See http://ocds.open-contracting.org/standard/r/master/#tender for more info",
)
class TenderEUResource(TenderResource):
""" Resource handler for TenderEU """
@json_view(
content_type="application/json",
validators=(
validate_patch_tender_ua_data,
validate_tender_not_in_terminated_status,
validate_tender_status_update,
validate_tender_change_status_permission,
),
permission="edit_tender",
)
def patch(self):
"""Tender Edit (partial)
For example here is how procuring entity can change number of items to be procured and total Value of a tender:
.. sourcecode:: http
PATCH /tenders/4879d3f8ee2443169b5fbbc9f89fa607 HTTP/1.1
Host: example.com
Accept: application/json
{
"data": {
"value": {
"amount": 600
},
"itemsToBeProcured": [
{
"quantity": 6
}
]
}
}
And here is the response to be expected:
.. sourcecode:: http
HTTP/1.0 200 OK
Content-Type: application/json
{
"data": {
"id": "4879d3f8ee2443169b5fbbc9f89fa607",
"tenderID": "UA-64e93250be76435397e8c992ed4214d1",
"dateModified": "2014-10-27T08:12:34.956Z",
"value": {
"amount": 600
},
"itemsToBeProcured": [
{
"quantity": 6
}
]
}
}
"""
tender = self.context
config = getAdapter(tender, IContentConfigurator)
data = self.request.validated["data"]
now = get_now()
if (
self.request.authenticated_role == "tender_owner"
and self.request.validated["tender_status"] == "active.tendering"
):
if "tenderPeriod" in data and "endDate" in data["tenderPeriod"]:
self.request.validated["tender"].tenderPeriod.import_data(data["tenderPeriod"])
validate_tender_period_extension(self.request)
self.request.registry.notify(TenderInitializeEvent(self.request.validated["tender"]))
self.request.validated["data"]["enquiryPeriod"] = self.request.validated[
"tender"
].enquiryPeriod.serialize()
apply_patch(self.request, save=False, src=self.request.validated["tender_src"])
if self.request.authenticated_role == "chronograph":
check_status(self.request)
elif self.request.authenticated_role == "tender_owner" and tender.status == "active.tendering":
tender.invalidate_bids_data()
elif (
self.request.authenticated_role == "tender_owner"
and self.request.validated["tender_status"] == "active.pre-qualification"
and tender.status == "active.pre-qualification.stand-still"
):
active_lots = [lot.id for lot in tender.lots if lot.status == "active"] if tender.lots else [None]
if any(
[
i["status"] in self.request.validated["tender"].block_complaint_status
for q in self.request.validated["tender"]["qualifications"]
for i in q["complaints"]
if q["lotID"] in active_lots
]
):
raise_operation_error(
self.request, "Can't switch to 'active.pre-qualification.stand-still' before resolve all complaints"
)
if all_bids_are_reviewed(self.request):
tender.qualificationPeriod.endDate = calculate_complaint_business_date(
now, config.prequalification_complaint_stand_still, self.request.validated["tender"]
)
tender.check_auction_time()
else:
raise_operation_error(
self.request,
"Can't switch to 'active.pre-qualification.stand-still' while not all bids are qualified",
)
elif (
self.request.authenticated_role == "tender_owner"
and self.request.validated["tender_status"] == "active.qualification"
and tender.status == "active.qualification.stand-still"
):
active_lots = [lot.id for lot in tender.lots if lot.status == "active"] if tender.lots else [None]
if any(
[
i["status"] in self.request.validated["tender"].block_complaint_status
for a in self.request.validated["tender"]["awards"]
for i in a["complaints"]
if a["lotID"] in active_lots
]
):
raise_operation_error(
self.request, "Can't switch to 'active.qualification.stand-still' before resolve all complaints"
)
if all_awards_are_reviewed(self.request):
tender.awardPeriod.endDate = calculate_complaint_business_date(
now, config.qualification_complaint_stand_still, self.request.validated["tender"]
)
for award in [a for a in tender.awards if a.status != "cancelled"]:
award["complaintPeriod"] = {
"startDate": now.isoformat(),
"endDate": tender.awardPeriod.endDate.isoformat(),
}
else:
raise_operation_error(
self.request,
"Can't switch to 'active.qualification.stand-still' while not all awards are qualified",
)
save_tender(self.request)
self.LOGGER.info(
"Updated tender {}".format(tender.id), extra=context_unpack(self.request, {"MESSAGE_ID": "tender_patch"})
)
return {"data": tender.serialize(tender.status)}
| 42.096045
| 120
| 0.581935
|
53f7d317aa7a31c4d148cccedc3f137f4211505c
| 2,206
|
py
|
Python
|
pennylane/transforms/optimization/remove_barrier.py
|
AkashNarayanan/pennylane
|
b855176ebd5c84a49f51babce0a6d5af1ef29142
|
[
"Apache-2.0"
] | 1
|
2021-12-07T17:18:26.000Z
|
2021-12-07T17:18:26.000Z
|
pennylane/transforms/optimization/remove_barrier.py
|
AkashNarayanan/pennylane
|
b855176ebd5c84a49f51babce0a6d5af1ef29142
|
[
"Apache-2.0"
] | 1
|
2021-01-18T13:54:35.000Z
|
2021-01-18T13:54:35.000Z
|
pennylane/transforms/optimization/remove_barrier.py
|
AkashNarayanan/pennylane
|
b855176ebd5c84a49f51babce0a6d5af1ef29142
|
[
"Apache-2.0"
] | 1
|
2021-12-07T17:18:36.000Z
|
2021-12-07T17:18:36.000Z
|
# Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transform for removing the Barrier gate from quantum circuits."""
# pylint: disable=too-many-branches
from pennylane import apply
from pennylane.transforms import qfunc_transform
@qfunc_transform
def remove_barrier(tape):
"""Quantum function transform to remove Barrier gates.
Args:
qfunc (function): A quantum function.
Returns:
function: the transformed quantum function
**Example**
Consider the following quantum function:
.. code-block:: python
def qfunc(x, y, z):
qml.Hadamard(wires=0)
qml.Hadamard(wires=1)
qml.Barrier(wires=[0,1])
qml.PauliX(wires=0)
return qml.expval(qml.PauliZ(0))
The circuit before optimization:
>>> dev = qml.device('default.qubit', wires=2)
>>> qnode = qml.QNode(qfunc, dev)
>>> print(qml.draw(qnode)(1, 2))
0: ──H──╭||──X──┤ ⟨Z⟩
1: ──H──╰||─────┤
We can remove the Barrier by running the ``remove_barrier`` transform:
>>> optimized_qfunc = remove_barrier(qfunc)
>>> optimized_qnode = qml.QNode(optimized_qfunc, dev)
>>> print(qml.draw(optimized_qnode)(1, 2))
0: ──H──X──┤ ⟨Z⟩
1: ──H─────┤
"""
# Make a working copy of the list to traverse
list_copy = tape.operations.copy()
while len(list_copy) > 0:
current_gate = list_copy[0]
# Remove Barrier gate
if current_gate.name != "Barrier":
apply(current_gate)
list_copy.pop(0)
continue
# Queue the measurements normally
for m in tape.measurements:
apply(m)
| 28.649351
| 74
| 0.650045
|
9a346960ee11345f4219ef9ecf671cf0eef5cb93
| 6,572
|
py
|
Python
|
test/process_test.py
|
elemel/underbar
|
b439c511f324405643ac515f854b38c9fc568482
|
[
"MIT"
] | null | null | null |
test/process_test.py
|
elemel/underbar
|
b439c511f324405643ac515f854b38c9fc568482
|
[
"MIT"
] | null | null | null |
test/process_test.py
|
elemel/underbar
|
b439c511f324405643ac515f854b38c9fc568482
|
[
"MIT"
] | null | null | null |
from fractions import Fraction as Q
import unittest
from quest.assembler import assemble
from quest.process import Process
from quest.register import Register
from quest.stdio import StandardStream
STDIN = StandardStream.STDIN.value
STDOUT = StandardStream.STDOUT.value
STDERR = StandardStream.STDERR.value
ECHO_SOURCE = open('examples/echo.qs').read()
HELLO_WORLD_SOURCE = open('examples/hello_world.qs').read()
class ProcessTest(unittest.TestCase):
def test_halt(self):
process = Process(assemble('''
13, hcf
'''))
process.run()
self.assertEqual(process.pop_data(), Q(13))
def test_call(self):
process = Process(assemble('''
cls + function
hcf
function:
13, ret
'''))
process.run()
self.assertEqual(process.pop_data(), Q(13))
def test_hello_world(self):
process = Process(assemble(HELLO_WORLD_SOURCE))
process.run()
self.assertEqual(process.read(), 'Hello, World!\n')
def test_print(self):
process = Process(assemble('''
message, lds + stdout, cls + print
13, hcf
; [stream, string] -> []
print: .stream = 0
ent + 1
stl + .stream
.loop:
dup, ldd; Load character
dup, beq + .break; Break on null character
ldl + .stream, put; Write character to stream
adi + 1, bal + .loop; Next character
.break:
dis, dis
ret + 1
message:
"Hello, World!\n", 0
'''))
process.run()
self.assertEqual(process.pop_data(), Q(13))
self.assertEqual(process.read(), 'Hello, World!\n')
def test_echo(self):
process = Process(assemble(ECHO_SOURCE), argv=['hello', 'world'])
process.run()
self.assertEqual(process.read(), 'hello world\n')
def test_get_integer_line(self):
process = Process(assemble('''
lds + stdin, cls + get_integer_line
hcf
; [stream] -> [result]
get_integer_line: .stream = 0, .result = 1
ent + 2, stl + .stream
0, stl + .result; Initialize result
1; Positive sign
ldl + .stream, get; First character
dup, adi - '-', bne + .loop; If sign character
dis; Discard sign character
neg; Negative sign
ldl + .stream, get; First character after sign
.loop:
dup, adi - '\n', beq + .break; Break on newline
adi - '0'; Character to digit
ldl + .result, mli + 10; Multiply result by base
add, stl + .result; Add digit to result
ldl + .stream, get; Next character
bal + .loop
.break:
dis; Discard newline
ldl + .result, mul, stl + .result; Apply sign
ldl + .result, ret + 2
'''))
process.write('285793423\n')
process.run()
self.assertEqual(process.pop_data(), 285793423)
def test_get_integer_line_negative(self):
process = Process(assemble('''
lds + stdin, cls + get_integer_line
hcf
; [stream] -> [result]
get_integer_line: .stream = 0, .result = 1
ent + 2, stl + .stream
0, stl + .result; Initialize result
1; Positive sign
ldl + .stream, get; First character
dup, adi - '-', bne + .loop; If sign character
dis; Discard sign character
neg; Negative sign
ldl + .stream, get; First character after sign
.loop:
dup, adi - '\n', beq + .break; Break on newline
adi - '0'; Character to digit
ldl + .result, mli + 10; Multiply result by base
add, stl + .result; Add digit to result
ldl + .stream, get; Next character
bal + .loop
.break:
dis; Discard newline
ldl + .result, mul, stl + .result; Apply sign
ldl + .result, ret + 2
'''))
process.write('-618584259\n')
process.run()
self.assertEqual(process.pop_data(), -618584259)
def test_put_integer_line(self):
process = Process(assemble('''
285793423, lds + stdout, cls + put_integer_line
hcf
; [stream, value] -> []
put_integer_line: .stream = 0, .value = 1
ent + 2, stl + .stream, stl + .value
1
ldl + .value, bge + .loop_1
'-', ldl + .stream, put
ldl + .value, neg, stl + .value
.loop_1:
mli + 10
dup, ldl + .value, sub, ble + .loop_1
.loop_2:
fdi + 10
dup, beq + .break
dup, ldl + .value, swp, div, fdi + 1
adi + '0', ldl + .stream, put
dup, ldl + .value, swp, mod, stl + .value
bal + .loop_2
.break:
'\n', ldl + .stream, put
ret + 2
'''))
process.run()
self.assertEqual(process.read(), '285793423\n')
def test_put_integer_line_negative(self):
process = Process(assemble('''
-618584259, lds + stdout, cls + put_integer_line
hcf
; [stream, value] -> []
put_integer_line: .stream = 0, .value = 1
ent + 2, stl + .stream, stl + .value
1
ldl + .value, bge + .loop_1
'-', ldl + .stream, put
ldl + .value, neg, stl + .value
.loop_1:
mli + 10
dup, ldl + .value, sub, ble + .loop_1
.loop_2:
fdi + 10
dup, beq + .break
dup, ldl + .value, swp, div, fdi + 1
adi + '0', ldl + .stream, put
dup, ldl + .value, swp, mod, stl + .value
bal + .loop_2
.break:
'\n', ldl + .stream, put
ret + 2
'''))
process.run()
self.assertEqual(process.read(), '-618584259\n')
if __name__ == '__main__':
unittest.main()
| 30.567442
| 73
| 0.47535
|
04108c498c2d17b9e336377e9bc997e4b3e758f1
| 6,587
|
py
|
Python
|
generators/app/templates/_biz/site.py
|
stillst/generator-htmlinit
|
a758c4039b8bd3bad79a9198dae3b2d32aac970d
|
[
"MIT"
] | null | null | null |
generators/app/templates/_biz/site.py
|
stillst/generator-htmlinit
|
a758c4039b8bd3bad79a9198dae3b2d32aac970d
|
[
"MIT"
] | null | null | null |
generators/app/templates/_biz/site.py
|
stillst/generator-htmlinit
|
a758c4039b8bd3bad79a9198dae3b2d32aac970d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
__author__ = 'stgavrilov'
import os
import codecs
from shutil import copyfile
columns = {
u'index': {u'index': 0, u'name': u'№'},
u'page_title': {u'index': 0, u'name': u'Страница'},
u'template': {u'index': 0, u'name': u'Шаблон'},
u'title': {u'index': 0, u'name': u'Title'},
u'description': {u'index': 0, u'name': u'Description'},
u'extra_keys': {u'index': 0, u'name': u'Доп ключи'},
}
def open_site_map(sitemap_file):
input_thread = open(sitemap_file)
return input_thread
def read_table_data(line, page_info):
line_cells = line.split(',')
index = line_cells[columns[u'index'][u'index']]
page_title = line_cells[columns[u'page_title'][u'index']]
template = line_cells[columns[u'template'][u'index']]
extra_keys = line_cells[columns[u'extra_keys'][u'index']]
title = line_cells[columns[u'title'][u'index']]
description = line_cells[columns[u'description'][u'index']]
page_info[page_title] = {
columns[u'index'][u'name']:index,
columns[u'page_title'][u'name']:line_cells[columns[u'page_title'][u'index']].decode("utf-8"),
columns[u'template'][u'name']:template,
columns[u'extra_keys'][u'name']:extra_keys,
columns[u'title'][u'name']:title,
columns[u'description'][u'name']:description,
}
def read_table(input_thread):
page_info = {}
lines = input_thread.readlines()
for i, line in enumerate(lines):
if i == 0:
read_table_head(line)
continue
read_table_data(line, page_info)
return page_info
def read_table_head(line):
line_cells = line.strip().split(',')
for col_index, cell in enumerate(line_cells):
for key, value in columns.iteritems():
if cell == value[u'name'].encode('utf8'):
columns[key][u'index'] = col_index
def create_article_template_file_copy(template, template_dir_name, article_template_name):
file_copy_name = template_dir_name + '\\' + article_template_name + '_.txt'
copyfile(template, file_copy_name)
return file_copy_name
def insert_data_in_template(file_copy_name, page_info):
keyword = u'<keyword>'
keyword_from_table = page_info[u'Title'].decode('utf-8')
extra_keywords = u'<extra-keywords>'
extra_keywords_from_table = page_info[u'Доп ключи'].decode('utf-8')
brand = u'<brand>'
brand_from_table = page_info[u'Страница']
input_thread = codecs.open(file_copy_name, "r", "utf-8")
file = input_thread.read()
input_thread.close()
file = file.replace(keyword, keyword_from_table)
file = file.replace(extra_keywords, extra_keywords_from_table)
file = file.replace(brand, brand_from_table)
output_thread = codecs.open(file_copy_name, 'w', "utf-8")
output_thread.write(file)
output_thread.close()
def copy_template_to_dir_page(dir_name, page_info, templates):
for template in templates:
file_name = template.split('\\')[1]
template_dir_name = template.split('\\')[0]
article_template_name = file_name.replace('.txt', '')
article_template_name_from_table = page_info[u'Шаблон'].replace('.php','')
if article_template_name == article_template_name_from_table:
file_copy_name = create_article_template_file_copy(template, template_dir_name, article_template_name)
insert_data_in_template(file_copy_name, page_info)
copyfile(file_copy_name, dir_name + '\\' + file_name)
def create_article_templates(dir_name, page_info):
templates = []
article_templates_dir = 'article_templates'
create_template_from_one_template(article_templates_dir, page_info)
find_templates(article_templates_dir, templates)
copy_template_to_dir_page(dir_name, page_info, templates)
def create_template_from_one_template(article_templates_dir, page_info):
template_file_name = u'{}/{}'.format(article_templates_dir, u'_template.txt')
template_file_thread = open(template_file_name)
curent_template_file_name = u'{}/{}'.format(article_templates_dir, page_info[u'Шаблон'].replace('.php', '.txt'))
if not os.path.exists(curent_template_file_name):
with open(curent_template_file_name, 'w') as f:
f.write(template_file_thread.read())
def find_templates(dir_name, templates):
for name in os.listdir(dir_name):
path = os.path.join(dir_name, name)
if os.path.isfile(path):
templates.append(path)
else:
find_templates(path, templates)
def do_magick(pages_info):
results_dir = u'results'
if not os.path.exists(results_dir):
os.makedirs(results_dir)
for key, page_info in pages_info.iteritems():
index = page_info[u'№']
key = key.decode('utf-8').replace("/", "_").strip()
dir_name = u'{}/{}_{}'.format(results_dir, index, key)
dir_name = dir_name.replace('"', "").strip()
if not os.path.exists(dir_name):
os.makedirs(dir_name)
create_article_templates(dir_name, page_info)
def create_extra_keys(pages_info):
extra_keys_dir = u'extra_keys'
extra_keys_templates =[]
for name in os.listdir(extra_keys_dir):
path = os.path.join(extra_keys_dir, name)
if os.path.isfile(path):
extra_keys_templates.append(path)
for key, page_info in pages_info.iteritems():
for extra_keys_template in extra_keys_templates:
input_thread = codecs.open(extra_keys_template, "r", "utf-8")
file = input_thread.read()
file = file.lower()
input_thread.close()
extra_keys_template_name_from_table = page_info[u'Шаблон'].replace('.php', '')
extra_keys_template_name = extra_keys_template.split('\\')[1].replace('_extra_keys.txt', '')
if extra_keys_template_name_from_table == extra_keys_template_name:
file_copy = file
file_copy_name = extra_keys_template + '_result.txt'
brand = u'<brand>'
utf_key = key.decode('utf-8').strip()
file_copy = file_copy.replace(brand, utf_key)
output_thread = codecs.open(file_copy_name, 'a', "utf-8")
output_thread.write(file_copy)
output_thread.close()
if __name__ == '__main__':
input_thread = open_site_map('sitemap.csv')
pages_info = read_table(input_thread)
create_extra_keys(pages_info)
do_magick(pages_info)
input_thread.close()
| 36.392265
| 116
| 0.658418
|
dde8304b5b40b02918edcb83777a69ffa18cb44a
| 7,837
|
py
|
Python
|
typecode/typecode.py
|
Moby-C/TypingCodingResearch
|
b62ad116c52f1f89018d356f024320ce892f4fa5
|
[
"MIT"
] | 1
|
2022-03-08T02:09:39.000Z
|
2022-03-08T02:09:39.000Z
|
typecode/typecode.py
|
Moby-C/TypingCodingResearch
|
b62ad116c52f1f89018d356f024320ce892f4fa5
|
[
"MIT"
] | 1
|
2022-03-09T03:51:17.000Z
|
2022-03-16T02:35:41.000Z
|
typecode/typecode.py
|
Moby-C/TypingCodingResearch
|
b62ad116c52f1f89018d356f024320ce892f4fa5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import re
import argparse
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from io import StringIO
from io import open
from xpinyin import Pinyin
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfinterp import PDFResourceManager, process_pdf
parser = argparse.ArgumentParser(description='Generate a keyboard heatmap from a PDF file.')
parser.add_argument('input_filename', metavar='input_file', type=str,
help='the name of the file to process')
parser.add_argument('output_filename', metavar='output_file', type=str,
help='the name of the image file to output')
args = parser.parse_args()
initials = ['b', 'p', 'm', 'f', 'd', 't', 'n', 'l',
'g', 'k', 'h', 'j', 'q', 'x',
'zz', 'cc', 'ss', 'r', 'z', 'c', 's',
'y', 'w']
initials_single = [
[' ba ', ' bei ', ' bu '],
[' pa ', ' pi ', ' po '],
[' me ', ' mei ', ' men ', ' mian ', ' ming '],
[' fa ', ' fang ', ' fen '],
[' da ', ' dan ', ' dang ', ' dao ', ' de ', ' di ', ' dong ', ' du ', ' dui ', ' duo '],
[' ta ', ' tian ', ' tong ', ' tou '],
[' na ', ' neng ', ' ni ', ' nian ', ' nv '],
[' lai ', ' lao ', ' le ', ' li ', ' liang '],
[' gan ', ' gao ', ' ge ', ' gei ', ' guo '],
[' kai ', ' kan ', ' ke '],
[' hao ', ' he ', ' hen ', ' hou ', ' hua ', ' huan ', ' hui ', ' huo '],
[' ji ', ' jia ', ' jian ', ' jin ', ' jing ', ' jiu '],
[' qi ', ' qian ', ' qin ', ' qing ', ' qu '],
[' xia ', ' xian ', ' xiang ', ' xiao ', ' xie ', ' xin ', ' xing ', ' xue '],
[' zhao ', ' zhe ', ' zheng ', ' zhi ', ' zhong '],
[' chang ', ' cheng ', ' chu '],
[' shang ', ' shen ', ' sheng ', ' shi ', ' shou ', ' shuo '],
[' ran ', ' ren ', ' ri ', ' ru '],
[' zai ', ' zi ', ' zong ', ' zui ', ' zuo '],
[' ci ', ' cong '],
[' si ', ' suo '],
[' yang ', ' yao ', ' ye ', ' yi ', ' yin ', ' yong ', ' you ', ' yu '],
[' wei ', ' wo ', ' wu ']
]
initials_values = [
[0, 0, 0],
[0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0],
[0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0]
]
initials_dict = {' zh': ' zz', ' ch': ' cc', ' sh': ' ss'}
finals_dict = {'ng ': 'm ', 'uan ': 'on ', 'iao ': 'eo '}
sel_array = ['vp', 'vk', 'vr', 'vf', 'vq', 'vw', 'vt', 'vb', 'vh', 'vx']
# 文本暂存位置
file_text_separated = './data/text_separated.txt'
file_text_encode = './data/text_encode.txt'
file_text = './data/text.txt'
file_img_org = './data/img_org.png'
def read_pdf(pdf):
# resource manager
rsrcmgr = PDFResourceManager()
retstr = StringIO()
laparams = LAParams()
# device
device = TextConverter(rsrcmgr, retstr, laparams=laparams)
process_pdf(rsrcmgr, device, pdf)
device.close()
content = retstr.getvalue()
retstr.close()
# 获取文本并提取汉字
lines = str(content)
chinese = ''.join(re.findall('[\u4e00-\u9fef]', lines))
return chinese
def cal_efficiency(len_ch, len_ty):
eta = len_ch / len_ty
print('Lenth of Chinese characters: %d' % len_ch)
print('Lenth of type coding: %d' % len_ty)
print('Input efficiency: %.4f' % eta)
def cal_balance(stat):
stat_sum = 0
stat_word = []
stat_list = []
for i in range(0, 26):
stat_word.append(stat[i][0])
stat_list.append(stat[i][1])
stat_sum += stat[i][1]
for i in range(0, 26):
stat_list[i] /= stat_sum
stat_std = np.std(stat_list, ddof=1)
print('Balance: %.4f' % stat_std)
# 计算重配列均衡性
stat_H = stat_list[:7]
stat_L = stat_list[7:]
stat_H_std = np.std(stat_H, ddof=1)
stat_L_std = np.std(stat_L, ddof=1)
stat_R_std = np.sqrt(stat_H_std * stat_H_std + stat_L_std * stat_L_std)
print('Balance_r: %.4f' % stat_R_std)
def draw_heatmap(file_output):
cmd = 'tapmap ' + file_text + ' ' + file_output + ' -c Blues'
res = os.popen(cmd).readlines()
# print(res)
def re_encode(org):
res = org
for i in range(len(initials_single)):
for j in range(len(initials_single[i])):
initials_values[i][j] = res.count(initials_single[i][j])
res = res.replace(
initials_single[i][initials_values[i].index(max(initials_values[i]))],
' ' + initials[i] + ' ')
print('Replace with ' + initials[i] + ' :' +
initials_single[i][initials_values[i].index(max(initials_values[i]))])
for key, value in initials_dict.items():
res = res.replace(key, value)
print('Replace with' + value + ' :' + key)
for key, value in finals_dict.items():
res = res.replace(key, value)
print('Replace with ' + value + ': ' + key)
return res
def re_replace(org):
words = org.split()
counts = {}
for word in words:
if len(word) >= 3:
counts[word] = counts.get(word, 0) + 1
items = list(counts.items())
items.sort(key=lambda x: x[1], reverse=True)
res = org
for i in range(10):
word, count = items[i]
# print("{0:<10}{1:>5}".format(word, count))
res = res.replace(' ' + word + ' ', ' ' + sel_array[i] + ' ')
print('Replace with ' + sel_array[i] + ' : ' + word)
return res
def main():
# 字母使用统计结果
stat = {}
stat_re = {}
# 解析参数
file_input = args.input_filename
if file_input is None:
parser.error('Please specify the filename of the PDF file to process.')
file_output = args.output_filename
if file_output is None:
parser.error('Please specify the name of the image to generate.')
# 读取 PDF
with open(file_input, 'rb') as my_pdf:
text = read_pdf(my_pdf)
# 解析并存储拼音
result_separated = ' ' + Pinyin().get_pinyin(text, ' ') + ' '
with open(file_text_separated, 'w') as f:
f.write(result_separated)
print('\n\n====== Original ======\n')
result_org = result_separated
with open(file_text_encode, 'w') as f:
f.write(result_org)
result = ''.join(re.findall('[a-z]', result_org))
with open(file_text, 'w') as f:
f.write(result)
# 统计字母出现频次
for i in range(ord('a'), ord('z') + 1):
stat[chr(i)] = result.count(chr(i))
stat = sorted(stat.items(), key=lambda x: x[1], reverse=True)
print(stat)
# 计算输入效率
cal_efficiency(len(text), len(result))
# 计算均衡性
cal_balance(stat)
# 绘制热力图
draw_heatmap(file_img_org)
print('\n\n====== Re-encode ======\n')
# 重新编码
result_re = re_encode(result_separated)
result_re = re_replace(result_re)
with open(file_text_encode, 'w') as f:
f.write(result_re)
result = ''.join(re.findall('[a-z]', result_re))
with open(file_text, 'w') as f:
f.write(result)
# 统计字母出现频次
for i in range(ord('a'), ord('z') + 1):
stat_re[chr(i)] = result.count(chr(i))
stat_re = sorted(stat_re.items(), key=lambda x: x[1], reverse=True)
print(stat_re)
# 计算输入效率
cal_efficiency(len(text), len(result))
# 计算均衡性
cal_balance(stat_re)
# 绘制热力图
draw_heatmap(file_output)
# 显示图片
img1 = mpimg.imread(file_img_org)
img2 = mpimg.imread(file_output)
plt.subplot(2, 1, 1)
plt.imshow(img1)
plt.title('Original')
plt.axis('off')
plt.subplot(2, 1, 2)
plt.imshow(img2)
plt.title('Improved')
plt.axis('off')
plt.show()
if __name__ == '__main__':
main()
| 27.69258
| 93
| 0.532602
|
d56081bd2dc686f5d9e83b7cc0aaeaa7fc0c7bb9
| 17,716
|
py
|
Python
|
plugins/modules/oci_data_flow_private_endpoint.py
|
A7rMtWE57x/oci-ansible-collection
|
80548243a085cd53fd5dddaa8135b5cb43612c66
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_data_flow_private_endpoint.py
|
A7rMtWE57x/oci-ansible-collection
|
80548243a085cd53fd5dddaa8135b5cb43612c66
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_data_flow_private_endpoint.py
|
A7rMtWE57x/oci-ansible-collection
|
80548243a085cd53fd5dddaa8135b5cb43612c66
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2017, 2020 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_data_flow_private_endpoint
short_description: Manage a PrivateEndpoint resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a PrivateEndpoint resource in Oracle Cloud Infrastructure
- For I(state=present), creates a private endpoint to be used by an application.
version_added: "2.9"
author: Oracle (@oracle)
options:
compartment_id:
description:
- The OCID of a compartment.
- Required for create using I(state=present).
- Required for update when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- Required for delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
type: str
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see L(Resource
Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
- This parameter is updatable.
type: dict
description:
description:
- A user-friendly description. Avoid entering confidential information.
- This parameter is updatable.
type: str
display_name:
description:
- A user-friendly name. It does not have to be unique. Avoid entering confidential information.
- Required for create, update, delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- This parameter is updatable when C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["name"]
dns_zones:
description:
- "An array of DNS zone names.
Example: `[ \\"app.examplecorp.com\\", \\"app.examplecorp2.com\\" ]`"
- Required for create using I(state=present).
- This parameter is updatable.
type: list
freeform_tags:
description:
- "Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Department\\": \\"Finance\\"}`"
- This parameter is updatable.
type: dict
max_host_count:
description:
- The maximum number of hosts to be accessed through the private endpoint. This value is used
to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a
multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up
to 512.
- This parameter is updatable.
type: int
nsg_ids:
description:
- An array of network security group OCIDs.
- This parameter is updatable.
type: list
subnet_id:
description:
- The OCID of a subnet.
- Required for create using I(state=present).
type: str
private_endpoint_id:
description:
- The unique ID for a private endpoint.
- Required for update using I(state=present) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
- Required for delete using I(state=absent) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["id"]
state:
description:
- The state of the PrivateEndpoint.
- Use I(state=present) to create or update a PrivateEndpoint.
- Use I(state=absent) to delete a PrivateEndpoint.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Create private_endpoint
oci_data_flow_private_endpoint:
compartment_id: compartmentId
display_name: pe_1234
dns_zones:
- app.examplecorp.com
- oracle.com
max_host_count: 256
nsg_ids:
- nsgId
subnet_id: subnetId
- name: Update private_endpoint using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_data_flow_private_endpoint:
display_name: PE to DB32002
dns_zones:
- app.examplecorp.com
- oracle.com
nsg_ids:
- nsgId
subnet_id: subnetId
- name: Update private_endpoint
oci_data_flow_private_endpoint:
private_endpoint_id: ocid1.privateendpoint.oc1..xxxxxxEXAMPLExxxxxx
- name: Delete private_endpoint
oci_data_flow_private_endpoint:
private_endpoint_id: ocid1.privateendpoint.oc1..xxxxxxEXAMPLExxxxxx
state: absent
- name: Delete private_endpoint using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_data_flow_private_endpoint:
compartment_id: compartmentId
display_name: pe_1234
state: absent
"""
RETURN = """
private_endpoint:
description:
- Details of the PrivateEndpoint resource acted upon by the current operation
returned: on success
type: complex
contains:
compartment_id:
description:
- The OCID of a compartment.
returned: on success
type: string
sample: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see L(Resource
Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
description:
description:
- A user-friendly description. Avoid entering confidential information.
returned: on success
type: string
sample: description_example
display_name:
description:
- A user-friendly name. It does not have to be unique. Avoid entering confidential information.
returned: on success
type: string
sample: display_name_example
dns_zones:
description:
- "An array of DNS zone names.
Example: `[ \\"app.examplecorp.com\\", \\"app.examplecorp2.com\\" ]`"
returned: on success
type: list
sample: []
freeform_tags:
description:
- "Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
id:
description:
- The OCID of a private endpoint.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
lifecycle_details:
description:
- The detailed messages about the lifecycle state.
returned: on success
type: string
sample: lifecycle_details_example
lifecycle_state:
description:
- The current state of this private endpoint.
returned: on success
type: string
sample: CREATING
max_host_count:
description:
- The maximum number of hosts to be accessed through the private endpoint. This value is used
to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a
multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up
to 512.
returned: on success
type: int
sample: 56
nsg_ids:
description:
- An array of network security group OCIDs.
returned: on success
type: list
sample: []
owner_principal_id:
description:
- The OCID of the user who created the resource.
returned: on success
type: string
sample: ocid1.ownerprincipal.oc1..xxxxxxEXAMPLExxxxxx
owner_user_name:
description:
- The username of the user who created the resource. If the username of the owner does not exist,
`null` will be returned and the caller should refer to the ownerPrincipalId value instead.
returned: on success
type: string
sample: owner_user_name_example
subnet_id:
description:
- The OCID of a subnet.
returned: on success
type: string
sample: ocid1.subnet.oc1..xxxxxxEXAMPLExxxxxx
time_created:
description:
- "The date and time a application was created, expressed in L(RFC 3339,https://tools.ietf.org/html/rfc3339) timestamp format.
Example: `2018-04-03T21:10:29.600Z`"
returned: on success
type: string
sample: 2018-04-03T21:10:29.600Z
time_updated:
description:
- "The date and time a application was updated, expressed in L(RFC 3339,https://tools.ietf.org/html/rfc3339) timestamp format.
Example: `2018-04-03T21:10:29.600Z`"
returned: on success
type: string
sample: 2018-04-03T21:10:29.600Z
sample: {
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"description": "description_example",
"display_name": "display_name_example",
"dns_zones": [],
"freeform_tags": {'Department': 'Finance'},
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"lifecycle_details": "lifecycle_details_example",
"lifecycle_state": "CREATING",
"max_host_count": 56,
"nsg_ids": [],
"owner_principal_id": "ocid1.ownerprincipal.oc1..xxxxxxEXAMPLExxxxxx",
"owner_user_name": "owner_user_name_example",
"subnet_id": "ocid1.subnet.oc1..xxxxxxEXAMPLExxxxxx",
"time_created": "2018-04-03T21:10:29.600Z",
"time_updated": "2018-04-03T21:10:29.600Z"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.data_flow import DataFlowClient
from oci.data_flow.models import CreatePrivateEndpointDetails
from oci.data_flow.models import UpdatePrivateEndpointDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class DataFlowPrivateEndpointHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update, get, list and delete"""
def get_module_resource_id_param(self):
return "private_endpoint_id"
def get_module_resource_id(self):
return self.module.params.get("private_endpoint_id")
def get_get_fn(self):
return self.client.get_private_endpoint
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_private_endpoint,
private_endpoint_id=self.module.params.get("private_endpoint_id"),
)
def get_required_kwargs_for_list(self):
required_list_method_params = [
"compartment_id",
]
return dict(
(param, self.module.params[param]) for param in required_list_method_params
)
def get_optional_kwargs_for_list(self):
optional_list_method_params = ["display_name"]
return dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
and (
self._use_name_as_identifier()
or (
not self.module.params.get("key_by")
or param in self.module.params.get("key_by")
)
)
)
def list_resources(self):
required_kwargs = self.get_required_kwargs_for_list()
optional_kwargs = self.get_optional_kwargs_for_list()
kwargs = oci_common_utils.merge_dicts(required_kwargs, optional_kwargs)
return oci_common_utils.list_all_resources(
self.client.list_private_endpoints, **kwargs
)
def get_create_model_class(self):
return CreatePrivateEndpointDetails
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.create_private_endpoint,
call_fn_args=(),
call_fn_kwargs=dict(create_private_endpoint_details=create_details,),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def get_update_model_class(self):
return UpdatePrivateEndpointDetails
def update_resource(self):
update_details = self.get_update_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_private_endpoint,
call_fn_args=(),
call_fn_kwargs=dict(
update_private_endpoint_details=update_details,
private_endpoint_id=self.module.params.get("private_endpoint_id"),
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.UPDATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def delete_resource(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.delete_private_endpoint,
call_fn_args=(),
call_fn_kwargs=dict(
private_endpoint_id=self.module.params.get("private_endpoint_id"),
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.DELETE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
DataFlowPrivateEndpointHelperCustom = get_custom_class(
"DataFlowPrivateEndpointHelperCustom"
)
class ResourceHelper(
DataFlowPrivateEndpointHelperCustom, DataFlowPrivateEndpointHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
compartment_id=dict(type="str"),
defined_tags=dict(type="dict"),
description=dict(type="str"),
display_name=dict(aliases=["name"], type="str"),
dns_zones=dict(type="list"),
freeform_tags=dict(type="dict"),
max_host_count=dict(type="int"),
nsg_ids=dict(type="list"),
subnet_id=dict(type="str"),
private_endpoint_id=dict(aliases=["id"], type="str"),
state=dict(type="str", default="present", choices=["present", "absent"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="private_endpoint",
service_client_class=DataFlowClient,
namespace="data_flow",
)
result = dict(changed=False)
if resource_helper.is_delete_using_name():
result = resource_helper.delete_using_name()
elif resource_helper.is_delete():
result = resource_helper.delete()
elif resource_helper.is_update_using_name():
result = resource_helper.update_using_name()
elif resource_helper.is_update():
result = resource_helper.update()
elif resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 38.181034
| 142
| 0.64089
|
6d6b4c6b2501e57e1b75e54b91a73f24b9416d76
| 25,937
|
py
|
Python
|
svtyper/classic.py
|
ottov/svtyper-py3
|
e6dd991798479953cc0b2cc2a2a46555c7bb2df3
|
[
"MIT"
] | null | null | null |
svtyper/classic.py
|
ottov/svtyper-py3
|
e6dd991798479953cc0b2cc2a2a46555c7bb2df3
|
[
"MIT"
] | null | null | null |
svtyper/classic.py
|
ottov/svtyper-py3
|
e6dd991798479953cc0b2cc2a2a46555c7bb2df3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import logging
import pysam
import json
import argparse, sys, os.path
import math, time
from argparse import RawTextHelpFormatter
import svtyper.version
from svtyper.parsers import Vcf, Variant, Sample, confidence_interval
from svtyper.utils import *
from svtyper.statistics import bayes_gt
# --------------------------------------
# define functions
def get_args():
parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter, description="\
svtyper\n\
author: " + svtyper.version.__author__ + "\n\
version: " + svtyper.version.__version__ + "\n\
description: Compute genotype of structural variants based on breakpoint depth")
parser.add_argument('-i', '--input_vcf', metavar='FILE', type=argparse.FileType('r'), default=None, help='VCF input (default: stdin)')
parser.add_argument('-o', '--output_vcf', metavar='FILE', type=argparse.FileType('w'), default=sys.stdout, help='output VCF to write (default: stdout)')
parser.add_argument('-B', '--bam', metavar='FILE', type=str, required=True, help='BAM or CRAM file(s), comma-separated if genotyping multiple samples')
parser.add_argument('-T', '--ref_fasta', metavar='FILE', type=str, required=False, default=None, help='Indexed reference FASTA file (recommended for reading CRAM files)')
parser.add_argument('-S', '--split_bam', type=str, required=False, help=argparse.SUPPRESS)
parser.add_argument('-l', '--lib_info', metavar='FILE', dest='lib_info_path', type=str, required=False, default=None, help='create/read JSON file of library information')
parser.add_argument('-m', '--min_aligned', metavar='INT', type=int, required=False, default=20, help='minimum number of aligned bases to consider read as evidence [20]')
parser.add_argument('-n', dest='num_samp', metavar='INT', type=int, required=False, default=1000000, help='number of reads to sample from BAM file for building insert size distribution [1000000]')
parser.add_argument('-q', '--sum_quals', action='store_true', required=False, help='add genotyping quality to existing QUAL (default: overwrite QUAL field)')
parser.add_argument('--max_reads', metavar='INT', type=int, default=None, required=False, help='maximum number of reads to assess at any variant (reduces processing time in high-depth regions, default: unlimited)')
parser.add_argument('--max_ci_dist', metavar='INT', type=int, default=1e10, required=False, help='maximum size of a confidence interval before 95%% CI is used intead (default: 1e10)')
parser.add_argument('--split_weight', metavar='FLOAT', type=float, required=False, default=1, help='weight for split reads [1]')
parser.add_argument('--disc_weight', metavar='FLOAT', type=float, required=False, default=1, help='weight for discordant paired-end reads [1]')
parser.add_argument('-w', '--write_alignment', metavar='FILE', dest='alignment_outpath', type=str, required=False, default=None, help='write relevant reads to BAM file')
parser.add_argument('--debug', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('--verbose', action='store_true', default=False, help='Report status updates')
# parse the arguments
args = parser.parse_args()
# if no input, check if part of pipe and if so, read stdin.
if args.input_vcf == None:
if not sys.stdin.isatty():
args.input_vcf = sys.stdin
# send back the user input
return args
# methods to grab reads from region of interest in BAM file
def gather_all_reads(sample, chromA, posA, ciA, chromB, posB, ciB, z, max_reads):
# grab batch of reads from both sides of breakpoint
read_batch = {}
read_batch, many = gather_reads(sample, chromA, posA, ciA, z, read_batch, max_reads)
if many:
return {}, True
read_batch, many = gather_reads(sample, chromB, posB, ciB, z, read_batch, max_reads)
if many:
return {}, True
return read_batch, many
def gather_reads(sample,
chrom, pos, ci,
z,
fragment_dict,
max_reads):
# the distance to the left and right of the breakpoint to scan
# (max of mean + z standard devs over all of a sample's libraries)
fetch_flank = sample.get_fetch_flank(z)
chrom_length = sample.bam.lengths[sample.bam.gettid(chrom)]
many = False
for i, read in enumerate(sample.bam.fetch(chrom,
max(pos + ci[0] - fetch_flank, 0),
min(pos + ci[1] + fetch_flank, chrom_length))):
if read.is_unmapped or read.is_duplicate:
continue
lib = sample.get_lib(read.get_tag('RG'))
if lib.name not in sample.active_libs:
continue
# read.query_sequence = "*"
# read.query_qualities = "*"
if max_reads is not None and i > max_reads:
many = True
break
if read.query_name in fragment_dict:
fragment_dict[read.query_name].add_read(read)
else:
fragment_dict[read.query_name] = SamFragment(read, lib)
return fragment_dict, many
# ==================================================
# Genotyping function
# ==================================================
def sv_genotype(bam_string,
vcf_in,
vcf_out,
min_aligned,
split_weight,
disc_weight,
num_samp,
lib_info_path,
debug,
alignment_outpath,
ref_fasta,
sum_quals,
max_reads,
max_ci_dist):
# parse the comma separated inputs
bam_list = []
for b in bam_string.split(','):
if b.encode('UTF-8').endswith('.bam'.encode('ascii')):
bam_list.append(pysam.AlignmentFile(b, mode='rb'))
elif b.encode('UTF-8').endswith('.cram'.encode('ascii')):
bam_list.append(pysam.AlignmentFile(b,
mode='rc',reference_filename=ref_fasta,format_options=[b"required_fields=7167"]))
else:
sys.stderr.write('Error: %s is not a valid alignment file (*.bam or *.cram)\n' % b)
exit(1)
min_lib_prevalence = 1e-3 # only consider libraries that constitute at least this fraction of the BAM
# parse lib_info_path JSON
lib_info = None
if lib_info_path is not None and os.path.isfile(lib_info_path):
lib_info_file = open(lib_info_path, 'r')
lib_info = json.load(lib_info_file)
if vcf_in is None:
sys.stderr.write('Warning: VCF not found.\n')
# build the sample libraries, either from the lib_info JSON or empirically from the BAMs
sample_list = list()
for i in range(len(bam_list)):
if lib_info is None:
logging.info('Calculating library metrics from %s...' % bam_list[i].filename)
sample = Sample.from_bam(bam_list[i], num_samp, min_lib_prevalence)
else:
logging.info('Reading library metrics from %s...' % lib_info_path)
sample = Sample.from_lib_info(bam_list[i], lib_info, min_lib_prevalence)
sample.set_exp_seq_depth(min_aligned)
sample.set_exp_spanning_depth(min_aligned)
sample_list.append(sample)
logging.info('done')
# diagnostic dump of relevant BAM reads
if alignment_outpath is not None:
# create a BAM file of the reads used for genotyping
out_bam_written_reads = set()
template_bam = pysam.AlignmentFile(bam_string.split(',')[0], 'rb')
out_bam = pysam.AlignmentFile(alignment_outpath, 'wb', template_bam)
template_bam.close()
# write the JSON for each sample's libraries
if lib_info_path is not None and not os.path.isfile(lib_info_path):
logging.info('Writing library metrics to %s...' % lib_info_path)
lib_info_file = open(lib_info_path, 'w')
write_sample_json(sample_list, lib_info_file)
lib_info_file.close()
logging.info('done')
# quit early if VCF absent
if vcf_in is None:
if alignment_outpath is not None:
out_bam.close()
return
# set variables for genotyping
z = 3
split_slop = 3 # amount of slop around breakpoint to count splitters
in_header = True
header = []
breakend_dict = {} # cache to hold unmatched generic breakends for genotyping
vcf = Vcf()
# read input VCF
for line in vcf_in:
if in_header:
if line[0] == '#':
header.append(line)
if line[1] != '#':
vcf_samples = line.rstrip().split('\t')[9:]
continue
else:
in_header = False
vcf.add_header(header)
# if detailed:
vcf.add_custom_svtyper_headers()
# add the samples in the BAM files to the VCF output
for sample in sample_list:
if sample.name not in vcf.sample_list:
vcf.add_sample(sample.name)
# write the output header
vcf_out.write(vcf.get_header() + '\n')
v = line.rstrip().split('\t')
var = Variant(v, vcf)
var_length = None # var_length should be None except for deletions
if not sum_quals:
var.qual = 0
# genotype generic breakends
try:
svtype = var.get_info('SVTYPE')
except KeyError:
sys.stderr.write('Warning: SVTYPE missing at variant %s. Skipping.\n' % (var.var_id))
vcf_out.write(var.get_var_string() + '\n')
continue
# print original line if unsupported svtype
if svtype not in ('BND', 'DEL', 'DUP', 'INV'):
sys.stderr.write('Warning: Unsupported SVTYPE at variant %s (%s). Skipping.\n' % (var.var_id, svtype))
vcf_out.write(var.get_var_string() + '\n')
continue
if svtype == 'BND':
if var.info['MATEID'] in breakend_dict:
var2 = var
var = breakend_dict[var.info['MATEID']]
chromA = var.chrom
chromB = var2.chrom
posA = var.pos
posB = var2.pos
# confidence intervals
ciA = confidence_interval(var, 'CIPOS', 'CIPOS95', max_ci_dist)
ciB = confidence_interval(var2, 'CIPOS', 'CIPOS95', max_ci_dist)
# infer the strands from the alt allele
if var.alt[-1] == '[' or var.alt[-1] == ']':
o1_is_reverse = False
else: o1_is_reverse = True
if var2.alt[-1] == '[' or var2.alt[-1] == ']':
o2_is_reverse = False
else: o2_is_reverse = True
# remove the BND from the breakend_dict
# to free up memory
del breakend_dict[var.var_id]
else:
breakend_dict[var.var_id] = var
continue
else:
chromA = var.chrom
chromB = var.chrom
posA = var.pos
posB = int(var.get_info('END'))
# confidence intervals
ciA = confidence_interval(var, 'CIPOS', 'CIPOS95', max_ci_dist)
ciB = confidence_interval(var, 'CIEND', 'CIEND95', max_ci_dist)
if svtype == 'DEL':
var_length = posB - posA
o1_is_reverse, o2_is_reverse = False, True
elif svtype == 'DUP':
o1_is_reverse, o2_is_reverse = True, False
elif svtype == 'INV':
o1_is_reverse, o2_is_reverse = False, False
# increment the negative strand values (note position in VCF should be the base immediately left of the breakpoint junction)
if o1_is_reverse: posA += 1
if o2_is_reverse: posB += 1
for sample in sample_list:
# grab reads from both sides of breakpoint
read_batch, many = gather_all_reads(sample, chromA, posA, ciA, chromB, posB, ciB, z, max_reads)
if many:
var.genotype(sample.name).set_format('GT', './.')
continue
# initialize counts to zero
ref_span, alt_span = 0, 0
ref_seq, alt_seq = 0, 0
alt_clip = 0
# ref_ciA = ciA
# ref_ciB = ciB
ref_ciA = [0,0]
ref_ciB = [0,0]
for query_name in sorted(read_batch.keys()):
fragment = read_batch[query_name]
# boolean on whether to write the fragment
write_fragment = False
# -------------------------------------
# Check for split-read evidence
# -------------------------------------
# get reference sequences
for read in fragment.primary_reads:
is_ref_seq_A = fragment.is_ref_seq(read, var, chromA, posA, ciA, min_aligned)
is_ref_seq_B = fragment.is_ref_seq(read, var, chromB, posB, ciB, min_aligned)
if (is_ref_seq_A or is_ref_seq_B):
p_reference = prob_mapq(read)
ref_seq += p_reference
read.set_tag('XV', 'R')
write_fragment = True
# get non-reference split-read support
for split in fragment.split_reads:
split_lr = split.is_split_straddle(chromA, posA, ciA,
chromB, posB, ciB,
o1_is_reverse, o2_is_reverse,
svtype, split_slop)
# p_alt = prob_mapq(split.query_left) * prob_mapq(split.query_right)
p_alt = (prob_mapq(split.query_left) * split_lr[0] + prob_mapq(split.query_right) * split_lr[1]) / 2.0
if split.is_soft_clip:
alt_clip += p_alt
else:
alt_seq += p_alt
if p_alt > 0:
split.tag_split(p_alt)
write_fragment = True
# -------------------------------------
# Check for paired-end evidence
# -------------------------------------
# tally spanning alternate pairs
if svtype == 'DEL' and posB - posA < 2 * fragment.lib.sd:
alt_straddle = False
else:
alt_straddle = fragment.is_pair_straddle(chromA, posA, ciA,
chromB, posB, ciB,
o1_is_reverse, o2_is_reverse,
min_aligned,
fragment.lib)
# check both sides if inversion (perhaps should do this for BND as well?)
if svtype in ('INV'):
alt_straddle_reciprocal = fragment.is_pair_straddle(chromA, posA, ciA,
chromB, posB, ciB,
not o1_is_reverse,
not o2_is_reverse,
min_aligned,
fragment.lib)
else:
alt_straddle_reciprocal = False
if alt_straddle or alt_straddle_reciprocal:
if svtype == 'DEL':
p_conc = fragment.p_concordant(var_length)
if p_conc is not None:
p_alt = (1 - p_conc) * prob_mapq(fragment.readA) * prob_mapq(fragment.readB)
alt_span += p_alt
# # since an alt straddler is by definition also a reference straddler,
# # we can bail out early here to save some time
# p_reference = p_conc * prob_mapq(fragment.readA) * prob_mapq(fragment.readB)
# ref_span += p_reference
# continue
fragment.tag_span(p_alt)
write_fragment = True
else:
p_alt = prob_mapq(fragment.readA) * prob_mapq(fragment.readB)
alt_span += p_alt
fragment.tag_span(p_alt)
write_fragment = True
# # tally spanning reference pairs
if svtype == 'DEL' and posB - posA < 2 * fragment.lib.sd:
ref_straddle_A = False
ref_straddle_B = False
else:
ref_straddle_A = fragment.is_pair_straddle(chromA, posA, ref_ciA,
chromA, posA, ref_ciA,
False, True,
min_aligned,
fragment.lib)
ref_straddle_B = fragment.is_pair_straddle(chromB, posB, ref_ciB,
chromB, posB, ref_ciB,
False, True,
min_aligned,
fragment.lib)
if ref_straddle_A or ref_straddle_B:
# don't allow the pair to jump the entire variant, except for
# length-changing SVs like deletions
if not (ref_straddle_A and ref_straddle_B) or svtype == 'DEL':
p_conc = fragment.p_concordant(var_length)
if p_conc is not None:
p_reference = p_conc * prob_mapq(fragment.readA) * prob_mapq(fragment.readB)
ref_span += (ref_straddle_A + ref_straddle_B) * p_reference / 2
fragment.tag_span(1 - p_conc)
write_fragment = True
# write to BAM if requested
if alignment_outpath is not None and write_fragment:
for read in fragment.primary_reads + [split.read for split in fragment.split_reads]:
out_bam_written_reads = write_alignment(read, out_bam, out_bam_written_reads)
if debug:
print('--------------------------')
print('ref_span:', ref_span)
print('alt_span:', alt_span)
print('ref_seq:', ref_seq)
print('alt_seq:', alt_seq)
print('alt_clip:', alt_clip)
# in the absence of evidence for a particular type, ignore the reference
# support for that type as well
if (alt_seq + alt_clip) < 0.5 and alt_span >= 1:
alt_seq = 0
alt_clip = 0
ref_seq = 0
if alt_span < 0.5 and (alt_seq + alt_clip) >= 1:
alt_span = 0
ref_span = 0
if alt_span + alt_seq == 0 and alt_clip > 0:
# discount any SV that's only supported by clips.
alt_clip = 0
if ref_seq + alt_seq + ref_span + alt_span + alt_clip > 0:
# get bayesian classifier
if var.info['SVTYPE'] == "DUP": is_dup = True
else: is_dup = False
alt_splitters = alt_seq + alt_clip
QR = int(split_weight * ref_seq) + int(disc_weight * ref_span)
QA = int(split_weight * alt_splitters) + int(disc_weight * alt_span)
gt_lplist = bayes_gt(QR, QA, is_dup)
best, second_best = sorted([ (i, e) for i, e in enumerate(gt_lplist) ], key=lambda x: x[1], reverse=True)[0:2]
gt_idx = best[0]
# print log probabilities of homref, het, homalt
if debug:
print(gt_lplist)
# set the overall variant QUAL score and sample specific fields
var.genotype(sample.name).set_format('GL', ','.join(['%.0f' % x for x in gt_lplist]))
var.genotype(sample.name).set_format('DP', int(ref_seq + alt_seq + alt_clip + ref_span + alt_span))
var.genotype(sample.name).set_format('RO', int(ref_seq + ref_span))
var.genotype(sample.name).set_format('AO', int(alt_seq + alt_clip + alt_span))
var.genotype(sample.name).set_format('QR', QR)
var.genotype(sample.name).set_format('QA', QA)
# if detailed:
var.genotype(sample.name).set_format('RS', int(ref_seq))
var.genotype(sample.name).set_format('AS', int(alt_seq))
var.genotype(sample.name).set_format('ASC', int(alt_clip))
var.genotype(sample.name).set_format('RP', int(ref_span))
var.genotype(sample.name).set_format('AP', int(alt_span))
try:
var.genotype(sample.name).set_format('AB', '%.2g' % (QA / float(QR + QA)))
except ZeroDivisionError:
var.genotype(sample.name).set_format('AB', '.')
# assign genotypes
gt_sum = 0
for gt in gt_lplist:
try:
gt_sum += 10**gt
except OverflowError:
gt_sum += 0
if gt_sum > 0:
gt_sum_log = math.log(gt_sum, 10)
sample_qual = abs(-10 * (gt_lplist[0] - gt_sum_log)) # phred-scaled probability site is non-reference in this sample
phred_gq = min(-10 * (second_best[1] - best[1]), 200)
var.genotype(sample.name).set_format('GQ', int(phred_gq))
var.genotype(sample.name).set_format('SQ', sample_qual)
var.qual += sample_qual
if gt_idx == 1:
var.genotype(sample.name).set_format('GT', '0/1')
elif gt_idx == 2:
var.genotype(sample.name).set_format('GT', '1/1')
elif gt_idx == 0:
var.genotype(sample.name).set_format('GT', '0/0')
else:
var.genotype(sample.name).set_format('GQ', '.')
var.genotype(sample.name).set_format('SQ', '.')
var.genotype(sample.name).set_format('GT', './.')
else:
var.genotype(sample.name).set_format('GT', './.')
var.qual = 0
var.genotype(sample.name).set_format('GQ', '.')
var.genotype(sample.name).set_format('SQ', '.')
var.genotype(sample.name).set_format('GL', '.')
var.genotype(sample.name).set_format('DP', 0)
var.genotype(sample.name).set_format('AO', 0)
var.genotype(sample.name).set_format('RO', 0)
# if detailed:
var.genotype(sample.name).set_format('AS', 0)
var.genotype(sample.name).set_format('ASC', 0)
var.genotype(sample.name).set_format('RS', 0)
var.genotype(sample.name).set_format('AP', 0)
var.genotype(sample.name).set_format('RP', 0)
var.genotype(sample.name).set_format('QR', 0)
var.genotype(sample.name).set_format('QA', 0)
var.genotype(sample.name).set_format('AB', '.')
# after all samples have been processed, write
vcf_out.write(var.get_var_string() + '\n')
if var.info['SVTYPE'] == 'BND':
var2.qual = var.qual
var2.active_formats = var.active_formats
var2.genotype = var.genotype
vcf_out.write(var2.get_var_string() + '\n')
# throw warning if we've lost unpaired breakends
if breakend_dict:
logging.warning('Unpaired breakends found in file. These will not be present in output.')
# close the files
vcf_in.close()
vcf_out.close()
if alignment_outpath is not None:
out_bam.close()
return
def set_up_logging(verbose):
level = logging.WARNING
if verbose:
level = logging.INFO
logging.basicConfig(format='%(message)s', level=level)
# --------------------------------------
# main function
def main():
# parse the command line args
args = get_args()
set_up_logging(args.verbose)
if args.split_bam is not None:
sys.stderr.write('Warning: --split_bam (-S) is deprecated. Ignoring %s.\n' % args.split_bam)
# call primary function
sv_genotype(args.bam,
args.input_vcf,
args.output_vcf,
args.min_aligned,
args.split_weight,
args.disc_weight,
args.num_samp,
args.lib_info_path,
args.debug,
args.alignment_outpath,
args.ref_fasta,
args.sum_quals,
args.max_reads,
args.max_ci_dist)
# --------------------------------------
# command-line/console entrypoint
def cli():
try:
sys.exit(main())
except IOError as e:
if e.errno != 32: # ignore SIGPIPE
raise
# initialize the script
if __name__ == '__main__':
cli()
| 44.488851
| 218
| 0.529282
|
2bcfd780bcac2670e347cbd311ddec5ff66be7c7
| 3,462
|
py
|
Python
|
ghs_armcc_error_analyzer/ghs_armcc_error_analyzer.py
|
rarewin/ghs-armcc-error-analyzer
|
5dfd1246aee4b1c506237e58985d45dc4083018d
|
[
"BSD-2-Clause"
] | null | null | null |
ghs_armcc_error_analyzer/ghs_armcc_error_analyzer.py
|
rarewin/ghs-armcc-error-analyzer
|
5dfd1246aee4b1c506237e58985d45dc4083018d
|
[
"BSD-2-Clause"
] | null | null | null |
ghs_armcc_error_analyzer/ghs_armcc_error_analyzer.py
|
rarewin/ghs-armcc-error-analyzer
|
5dfd1246aee4b1c506237e58985d45dc4083018d
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import sys
import os
from optparse import OptionParser, OptionGroup
from jinja2 import Environment, FileSystemLoader
err_reg = re.compile('^\s*\"(?P<file>.*)\",\s*line\s*(?P<line>\d+).*:\s*[fatal]*\s*(?P<type>warning|error|remark)\s*#(?P<number>\d+)[-D]*\s*:*\s*(?P<message>.*)$')
def main():
dirname = os.path.normpath(os.path.dirname(__file__))
env = Environment(loader = FileSystemLoader(os.path.join(dirname, 'templates/'), encoding = 'utf8'))
tpl = env.get_template('error.tpl.html')
# parse options
parser = OptionParser()
parser.add_option('-t', '--text', action = 'store_true', dest = 'output_text',
help = "output plain text")
# control outputs
g_control_outputs = OptionGroup(parser, "Control Outputs", "The following options are only valid in plain text mode at this point")
g_control_outputs.add_option('-e', '--error', action = 'store_true', dest = 'print_error_only',
help = "display only errors")
g_control_outputs.add_option('-E', '--no-error', action = 'store_false', dest = 'print_error',
help = "don't display errors")
g_control_outputs.add_option('-w', '--warning', action = 'store_true', dest = 'print_warning_only',
help = "display warnings only")
g_control_outputs.add_option('-W', '--no-warning', action = 'store_false', dest = 'print_warning',
help = "don't display warnings")
g_control_outputs.add_option('-r', '--remark', action = 'store_true', dest = 'print_remark_only',
help = "display remarks only (default)")
g_control_outputs.add_option('-R', '--no-remark', action = 'store_false', dest = 'print_remark',
help = "don't display remarks")
parser.add_option_group(g_control_outputs)
(options, args) = parser.parse_args()
# no input
if len(args) < 1:
sys.exit(0)
input_file_name = args[0]
result = {'error' : [], 'warning': [], 'remark': []}
# analyze
with open(input_file_name, 'rb') as input_file:
line = ' '
while line:
m = err_reg.match(line.strip())
# append only unique messages
if m and not (m.groupdict() in result[m.group('type')]):
d = m.groupdict()
d.update({'plain': line})
result[m.group('type')].append(d)
try:
line = input_file.readline().decode('cp932')
except:
line = ' '
# print
if options.output_text:
p = []
if (options.print_error and not (options.print_warning_only or options.print_remark_only)) or options.print_error_only:
p = result['error']
if (options.print_warning and not (options.print_error_only or options.print_remark_only)) or options.print_warning_only:
p += result['warning']
if (options.print_remark and not (options.print_error_only or options.print_warning_only)) or options.print_remark_only:
p += result['remark']
for l in p:
print(l['plain'])
else:
# output html by means of jinja2
html = tpl.render({'errors': result['error'], 'warnings': result['warning'], 'remarks': result['remark']})
sys.stdout.buffer.write(html.encode('utf-8'))
if __name__ == '__main__':
main()
| 34.969697
| 163
| 0.597631
|
b0f2b30f73d02ea550ad9f0077190688854c2700
| 4,131
|
py
|
Python
|
examples/cifar10_cnn.py
|
wavelets/keras
|
c57d5cce7903511edd4048f8bfed2ad0dc6f6b6b
|
[
"MIT"
] | 1
|
2015-10-06T22:08:00.000Z
|
2015-10-06T22:08:00.000Z
|
examples/cifar10_cnn.py
|
wavelets/keras
|
c57d5cce7903511edd4048f8bfed2ad0dc6f6b6b
|
[
"MIT"
] | null | null | null |
examples/cifar10_cnn.py
|
wavelets/keras
|
c57d5cce7903511edd4048f8bfed2ad0dc6f6b6b
|
[
"MIT"
] | null | null | null |
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.optimizers import SGD, Adadelta, Adagrad
from keras.utils import np_utils, generic_utils
'''
Train a (fairly simple) deep CNN on the CIFAR10 small images dataset.
GPU run command:
THEANO_FLAGS=mode=FAST_RUN,device=gpu,floatX=float32 python cifar10_cnn.py
It gets down to 0.65 test logloss in 25 epochs, and down to 0.55 after 50 epochs.
(it's still underfitting at that point, though).
'''
batch_size = 32
nb_classes = 10
nb_epoch = 200
data_augmentation = True
# the data, shuffled and split between tran and test sets
(X_train, y_train), (X_test, y_test) = cifar10.load_data(test_split=0.1)
print X_train.shape[0], 'train samples'
print X_test.shape[0], 'test samples'
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Convolution2D(32, 3, 3, 3, border_mode='full'))
model.add(Activation('relu'))
model.add(Convolution2D(32, 32, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(poolsize=(2, 2)))
model.add(Dropout(0.25))
model.add(Convolution2D(64, 32, 3, 3, border_mode='full'))
model.add(Activation('relu'))
model.add(Convolution2D(64, 64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(poolsize=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten(64*8*8))
model.add(Dense(64*8*8, 512, init='normal'))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(512, nb_classes, init='normal'))
model.add(Activation('softmax'))
# let's train the model using SGD + momentum (how original).
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd)
if not data_augmentation:
print "Not using data augmentation or normalization"
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=10)
score = model.evaluate(X_test, Y_test, batch_size=batch_size)
print 'Test score:', score
else:
print "Using real time data augmentation"
# this will do preprocessing and realtime data augmentation
datagen = ImageDataGenerator(
featurewise_center=True, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=True, # divide inputs by std of the dataset
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
rotation_range=20, # randomly rotate images in the range (degrees, 0 to 180)
width_shift_range=0.2, # randomly shift images horizontally (fraction of total width)
height_shift_range=0.2, # randomly shift images vertically (fraction of total height)
horizontal_flip=True, # randomly flip images
vertical_flip=False) # randomly flip images
# compute quantities required for featurewise normalization
# (std, mean, and principal components if ZCA whitening is applied)
datagen.fit(X_train)
for e in range(nb_epoch):
print '-'*40
print 'Epoch', e
print '-'*40
print "Training..."
# batch train with realtime data augmentation
progbar = generic_utils.Progbar(X_train.shape[0])
for X_batch, Y_batch in datagen.flow(X_train, Y_train):
loss = model.train(X_batch, Y_batch)
progbar.add(X_batch.shape[0], values=[("train loss", loss)])
print "Testing..."
# test time!
progbar = generic_utils.Progbar(X_test.shape[0])
for X_batch, Y_batch in datagen.flow(X_test, Y_test):
score = model.test(X_batch, Y_batch)
progbar.add(X_batch.shape[0], values=[("test loss", score)])
| 35.307692
| 93
| 0.710482
|
dbcb3b0a8a8282ac263bf5393ea70cfe78b70565
| 113
|
py
|
Python
|
test/normal_failure.py
|
praekeltfoundation/vaultkeeper-agent
|
7b62b0e77b4ced618bb28ad466fa15b87d709ea9
|
[
"BSD-3-Clause"
] | 7
|
2017-11-20T15:13:56.000Z
|
2021-11-22T23:38:03.000Z
|
test/normal_failure.py
|
praekeltfoundation/vaultkeeper-agent
|
7b62b0e77b4ced618bb28ad466fa15b87d709ea9
|
[
"BSD-3-Clause"
] | 11
|
2017-08-31T09:00:51.000Z
|
2018-02-09T10:17:48.000Z
|
test/normal_failure.py
|
praekeltfoundation/vaultkeeper-agent
|
7b62b0e77b4ced618bb28ad466fa15b87d709ea9
|
[
"BSD-3-Clause"
] | 2
|
2018-04-11T11:00:25.000Z
|
2020-05-05T12:06:45.000Z
|
#!/usr/bin/python
import sys
import time
print('Subprocess started...')
time.sleep(1)
print('Oops!')
sys.exit(3)
| 14.125
| 30
| 0.707965
|
3d687e9a2f0a2238e0aaf3771614ad66e7bf7404
| 24,968
|
py
|
Python
|
places/migrations/0042_auto__add_field_place_summary.py
|
evrenesat/ganihomes
|
eece2d8d957989b176cc5a36d723f676862f8d17
|
[
"BSD-2-Clause"
] | 24
|
2016-08-06T18:10:54.000Z
|
2022-03-04T11:47:39.000Z
|
places/migrations/0042_auto__add_field_place_summary.py
|
evrenesat/ganihomes
|
eece2d8d957989b176cc5a36d723f676862f8d17
|
[
"BSD-2-Clause"
] | 1
|
2017-03-28T02:36:50.000Z
|
2017-03-28T07:18:57.000Z
|
places/migrations/0042_auto__add_field_place_summary.py
|
evrenesat/ganihomes
|
eece2d8d957989b176cc5a36d723f676862f8d17
|
[
"BSD-2-Clause"
] | 13
|
2017-03-28T02:35:32.000Z
|
2022-02-21T23:36:15.000Z
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Place.summary'
db.add_column('places_place', 'summary', self.gf('django.db.models.fields.TextField')(default=''), keep_default=False)
def backwards(self, orm):
# Deleting field 'Place.summary'
db.delete_column('places_place', 'summary')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'places.booking': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Booking'},
'end': ('django.db.models.fields.DateField', [], {}),
'guest': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'guest'", 'to': "orm['auth.User']"}),
'guest_payment': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'host'", 'to': "orm['auth.User']"}),
'host_earning': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payment_type': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Place']"}),
'reservation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.ReservedDates']"}),
'start': ('django.db.models.fields.DateField', [], {}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'valid': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'places.currency': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Currency'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'code_position': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'factor': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '12', 'decimal_places': '4'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'places.description': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Description'},
'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'descriptions'", 'to': "orm['places.Place']"}),
'text': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'places.friendship': {
'Meta': {'object_name': 'Friendship'},
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Profile']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'places.message': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Message'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'receiver': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'receiver'", 'to': "orm['auth.User']"}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sender'", 'to': "orm['auth.User']"}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'text': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'places.photo': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Photo'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.SmallIntegerField', [], {'default': '60'}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Place']", 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'places.place': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Place'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'address': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'bathrooms': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'bed_type': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'bedroom': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'cancellation': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'capacity': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'clean_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'cleaning_fee': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'comfort_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Currency']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'district': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'emergency_phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'extra_limit': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'extra_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'favorite_counter': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'lat': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'location_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'lon': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'manual': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'max_stay': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'min_stay': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'monthly_discount': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'overall_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '2'}),
'prices': ('django.db.models.fields.TextField', [], {'default': "''"}),
'primary_photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'reserved_dates': ('django.db.models.fields.TextField', [], {'default': "''"}),
'rules': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'size_type': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'space': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'street_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['places.Tag']", 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'value_money_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'weekend_price': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '6', 'decimal_places': '2'}),
'weekly_discount': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'places.placereview': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'PlaceReview'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'clean_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'comfort_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'overall_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Place']"}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'text': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'value_money_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'writer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'places.profile': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Profile'},
'brithdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'cell': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Currency']"}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'favorites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['places.Place']", 'null': 'True', 'blank': 'True'}),
'friends': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'friend_profiles'", 'symmetrical': 'False', 'through': "orm['places.Friendship']", 'to': "orm['auth.User']"}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'default': "'tr_TR'", 'max_length': '5'}),
'lastlogin': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'private_name': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'places.promotioncode': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'PromotionCode'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'expiry_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'percentage': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '2'}),
'puser': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'used_promotions'", 'to': "orm['auth.User']"}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'related_promotions'", 'to': "orm['auth.User']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {}),
'used': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'places.reserveddates': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'ReservedDates'},
'end': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Place']"}),
'start': ('django.db.models.fields.DateField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'places.sessionalprice': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'SessionalPrice'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'end': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Place']"}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'start': ('django.db.models.fields.DateField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'weekend_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'})
},
'places.tag': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Tag'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.TagCategory']"}),
'help': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'places.tagcategory': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'TagCategory'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'places.tagtranslation': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'TagTranslation'},
'help': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tags'", 'to': "orm['places.Tag']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'translation': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'places.transaction': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Transaction'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'reciver_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'sender_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'places.userreview': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'UserReview'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'person'", 'to': "orm['auth.User']"}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'text': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'writer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writer'", 'to': "orm['auth.User']"})
}
}
complete_apps = ['places']
| 83.505017
| 207
| 0.553869
|
795c1af440bd7b6ea259fa01ff799e7878e66728
| 2,673
|
py
|
Python
|
var/spack/repos/builtin/packages/py-petsc4py/package.py
|
vreshniak/spack-xsdk
|
b2da85f9309e38082db6b35a79028734ae5e0e96
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/py-petsc4py/package.py
|
vreshniak/spack-xsdk
|
b2da85f9309e38082db6b35a79028734ae5e0e96
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/py-petsc4py/package.py
|
vreshniak/spack-xsdk
|
b2da85f9309e38082db6b35a79028734ae5e0e96
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPetsc4py(PythonPackage):
"""This package provides Python bindings for the PETSc package.
"""
homepage = "https://gitlab.com/petsc/petsc4py"
url = "https://gitlab.com/petsc/petsc4py/-/archive/3.13.0/petsc4py-3.13.0.tar.gz"
git = "https://gitlab.com/petsc/petsc4py.git"
maintainers = ['dalcinl', 'balay']
version('develop', branch='master')
version('3.14.0', branch='master')
version('3.13.0', sha256='0e11679353c0c2938336a3c8d1a439b853e20d3bccd7d614ad1dbea3ec5cb31f')
version('3.12.0', sha256='4c94a1dbbf244b249436b266ac5fa4e67080d205420805deab5ec162b979df8d')
version('3.11.0', sha256='ec114b303aadaee032c248a02021e940e43c6437647af0322d95354e6f2c06ad')
version('3.10.1', sha256='11b59693af0e2067f029924dd6b5220f7a7ec00089f6e2c2361332d6123ea6f7')
version('3.10.0', sha256='4e58b9e7d4343adcf905751261b789c8c3489496f8de5c3fc3844664ef5ec5a3')
version('3.9.1', sha256='8b7f56e0904c57cca08d1c24a1d8151d1554f06c9c5a31b16fb6db3bc928bbd8')
version('3.9.0', sha256='ae077dffd455014de16b6ed4ba014ac9e10227dc6b93f919a4229e8e1c870aec')
version('3.8.1', sha256='f6260a52dab02247f5b8d686a0587441b1a2048dff52263f1db42e75d2e3f330')
version('3.8.0', sha256='3445da12becf23ade4d40cdd04c746581982ab6a27f55fbb5cd29bc5560df4b1')
version('3.7.0', sha256='c04931a5ba3fd7c8c8d165aa7908688921ce3cf4cf8725d0cba73380c2107386')
variant('mpi', default=True, description='Activates MPI support')
depends_on('py-cython', type='build', when='@develop')
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-mpi4py', when='+mpi', type=('build', 'run'))
depends_on('petsc+mpi', when='+mpi')
depends_on('petsc~mpi', when='~mpi')
depends_on('petsc@develop', when='@develop')
depends_on('petsc@3.14:3.14.99', when='@3.14:3.14.99')
depends_on('petsc@3.13:3.13.99', when='@3.13:3.13.99')
depends_on('petsc@3.12:3.12.99', when='@3.12:3.12.99')
depends_on('petsc@3.11:3.11.99', when='@3.11:3.11.99')
depends_on('petsc@3.10.3:3.10.99', when='@3.10.1:3.10.99')
depends_on('petsc@3.10:3.10.2', when='@3.10.0')
depends_on('petsc@3.9:3.9.99', when='@3.9:3.9.99')
depends_on('petsc@3.8:3.8.99', when='@3.8:3.8.99')
depends_on('petsc@3.7:3.7.99', when='@3.7:3.7.99')
depends_on('petsc@3.6:3.6.99', when='@3.6:3.6.99')
| 50.433962
| 96
| 0.7052
|
734f2893617c322bcd8cd7ef6884a1fef7533ff3
| 7,202
|
py
|
Python
|
conda/cli/main_remove.py
|
westurner/conda
|
24d3af9f0a4334b76afaef91a6e74a15e86a8ea1
|
[
"BSD-3-Clause"
] | null | null | null |
conda/cli/main_remove.py
|
westurner/conda
|
24d3af9f0a4334b76afaef91a6e74a15e86a8ea1
|
[
"BSD-3-Clause"
] | null | null | null |
conda/cli/main_remove.py
|
westurner/conda
|
24d3af9f0a4334b76afaef91a6e74a15e86a8ea1
|
[
"BSD-3-Clause"
] | null | null | null |
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from os.path import join
import argparse
from argparse import RawDescriptionHelpFormatter
from conda import config
from conda.cli import common
from conda.console import json_progress_bars
help = "%s a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution. Note that conda uninstall is an alias for conda remove
"""
example = """
examples:
conda %s -n myenv scipy
"""
def configure_parser(sub_parsers, name='remove'):
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=descr % name,
help=help % name,
epilog=example % name,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"--all",
action="store_true",
help="%s all packages, i.e. the entire environment" % name,
)
p.add_argument(
"--features",
action="store_true",
help="%s features (instead of packages)" % name,
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
common.add_parser_use_index_cache(p)
common.add_parser_use_local(p)
common.add_parser_offline(p)
p.add_argument(
"--force-pscheck",
action="store_true",
help=("force removal (when package process is running)"
if config.platform == 'win' else argparse.SUPPRESS)
)
p.add_argument(
'package_names',
metavar='package_name',
action="store",
nargs='*',
help="package names to %s from environment" % name,
)
p.set_defaults(func=execute)
def execute(args, parser):
import sys
import conda.plan as plan
import conda.instructions as inst
from conda.cli import pscheck
from conda.install import rm_rf, linked
from conda import config
if not (args.all or args.package_names):
common.error_and_exit('no package names supplied,\n'
' try "conda remove -h" for more details',
json=args.json,
error_type="ValueError")
prefix = common.get_prefix(args)
if args.all and prefix == config.default_prefix:
common.error_and_exit("cannot remove current environment. deactivate and run conda remove again")
common.check_write('remove', prefix, json=args.json)
common.ensure_override_channels_requires_channel(args, json=args.json)
channel_urls = args.channel or ()
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls),
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
else:
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
specs = None
if args.features:
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
common.error_and_exit('cannot remove root environment,\n'
' add -n NAME or -p PREFIX option',
json=args.json,
error_type="CantRemoveRoot")
actions = {inst.PREFIX: prefix,
inst.UNLINK: sorted(linked(prefix))}
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
common.error_and_exit('cannot remove %s from root environment' %
', '.join(common.root_no_rm),
json=args.json,
error_type="CantRemoveFromRoot")
actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
return
common.error_and_exit('no packages found to remove from '
'environment: %s' % prefix,
json=args.json,
error_type="PackageNotInstalled")
if not args.json:
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if args.json and args.dry_run:
common.stdout_json({
'success': True,
'dry_run': True,
'actions': actions
})
return
if not args.json:
if not pscheck.main(args):
common.confirm_yn(args)
elif (sys.platform == 'win32' and not args.force_pscheck and
not pscheck.check_processes(verbose=False)):
common.error_and_exit("Cannot continue removal while processes "
"from packages are running without --force-pscheck.",
json=True,
error_type="ProcessesStillRunning")
if args.json and not args.quiet:
with json_progress_bars():
plan.execute_actions(actions, index, verbose=not args.quiet)
else:
plan.execute_actions(actions, index, verbose=not args.quiet)
if specs:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# remove specs: %s\n' % specs)
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
| 36.190955
| 105
| 0.579006
|
c091c2b52f38c6816d2d0b655dad45257c6e5143
| 617
|
py
|
Python
|
vpn/vpnapi/models.py
|
futurice/vpn-management-server
|
5418b22356b58cd9a7f3043ec21e1e728abb6b27
|
[
"BSD-3-Clause"
] | 13
|
2015-11-23T05:05:16.000Z
|
2021-05-30T13:00:46.000Z
|
vpn/vpnapi/models.py
|
futurice/vpn-management-server
|
5418b22356b58cd9a7f3043ec21e1e728abb6b27
|
[
"BSD-3-Clause"
] | null | null | null |
vpn/vpnapi/models.py
|
futurice/vpn-management-server
|
5418b22356b58cd9a7f3043ec21e1e728abb6b27
|
[
"BSD-3-Clause"
] | 12
|
2015-01-09T08:07:48.000Z
|
2022-02-28T05:00:10.000Z
|
from django.db import models
import datetime
class State(models.Model):
username = models.CharField(max_length=60, primary_key=True)
valid_csr = models.BooleanField(default=False)
password = models.CharField(max_length=20, blank=True, null=True)
csr_filename = models.CharField(max_length=255, blank=True, null=True)
timestamp = models.DateTimeField(auto_now_add=True)
cn = models.CharField(max_length=255, blank=True, null=True)
def expired(self):
age = (datetime.datetime.now() - self.timestamp).seconds
if age > 900:
return True
return False
| 32.473684
| 74
| 0.701783
|
e1a4db05ede666962f001417fc7b171258050ac6
| 6,589
|
py
|
Python
|
examples/gauge_app.py
|
smurfix/remi
|
879f48048c3d753a07667a9b239358d104692788
|
[
"Apache-2.0"
] | 3,224
|
2015-10-30T15:35:05.000Z
|
2022-03-08T19:31:46.000Z
|
examples/gauge_app.py
|
smurfix/remi
|
879f48048c3d753a07667a9b239358d104692788
|
[
"Apache-2.0"
] | 453
|
2015-10-26T17:39:01.000Z
|
2022-03-07T13:57:18.000Z
|
examples/gauge_app.py
|
smurfix/remi
|
879f48048c3d753a07667a9b239358d104692788
|
[
"Apache-2.0"
] | 458
|
2015-11-03T12:08:01.000Z
|
2022-03-09T00:17:19.000Z
|
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
""" This example shows an application using Svg graphics
to display an interactive gauge.
"""
import remi.gui as gui
from remi import start, App
import math
from threading import Timer
import random
class InputGauge(gui.VBox, gui.EventSource):
def __init__(self, width, height, _min, _max, **kwargs):
super(InputGauge, self).__init__(**kwargs)
gui.EventSource.__init__(self)
self.set_size(width, height)
self.gauge = Gauge(width, height, _min, _max)
self.gauge.set_value(_min)
self.append(self.gauge)
self.onmousedown.do(self.confirm_value)
self.onmousemove.do(self.gauge.onmousemove)
@gui.decorate_event
def confirm_value(self, widget, x, y):
"""event called clicking on the gauge and so changing its value.
propagates the new value
"""
self.gauge.onmousedown(self.gauge, x, y)
params = (self.gauge.value)
return params
class Gauge(gui.Svg):
def __init__(self, width, height, _min, _max):
super(Gauge, self).__init__(width=width, height=height)
self.width = width
self.height = height
self.min = _min
self.max = _max
self.scale_angle_range = math.pi*2-1.0
self.scale_value_range = _max - _min
self.base_angle = 0 #-self.scale_angle_range/2.0
self.radius = min(width, height)/2.0
circle = gui.SvgCircle(width/2.0, height/2.0, self.radius)
self.append(circle)
circle.set_fill('gray')
circle.set_stroke(1,'lightgray')
circle = gui.SvgCircle(width/2.0, height/2.0, self.radius*92.0/100.0)
self.append(circle)
circle.set_fill('lightgray')
circle.set_stroke(1,'lightgray')
font_size = self.radius*10.0/100.0
xy = self.value_to_xy_tuple(_min, self.radius*90.0/100.0)
textMin = gui.SvgText(xy[0], xy[1], str(_min))
xy = self.value_to_xy_tuple(_max, self.radius*90.0/100.0)
textMax = gui.SvgText(xy[0], xy[1], str(_max))
textMin.style['font-size'] = gui.to_pix(font_size)
textMax.style['font-size'] = gui.to_pix(font_size)
textMin.style['text-anchor'] = "end"
textMax.style['text-anchor'] = "end"
textMin.set_fill('red')
textMax.set_fill('green')
for i in range( 0, 11 ):
xy1 = self.value_to_xy_tuple(self.min + self.scale_value_range/10*i, self.radius*92.0/100.0)
xy2 = self.value_to_xy_tuple(self.min + self.scale_value_range/10*i, self.radius)
tick = gui.SvgLine(xy1[0], xy1[1], xy2[0], xy2[1])
tick.set_stroke(2, 'white')
self.append(tick)
self.append(textMin)
self.append(textMax)
self.arrow = gui.SvgPolyline()
self.arrow.add_coord(-self.radius*20.0/100.0,0)
self.arrow.add_coord(-self.radius*23.0/100.0,self.radius*10.0/100.0)
self.arrow.add_coord(0,0)
self.arrow.add_coord(-self.radius*23.0/100.0,-self.radius*10.0/100.0)
self.arrow.add_coord(-self.radius*20.0/100.0,0)
self.arrow.style['fill'] = 'white'
self.arrow.set_stroke(1.0, 'white')
self.append(self.arrow)
self.arrow_preview = gui.SvgPolyline()
self.arrow_preview.add_coord(-self.radius*10.0/100.0,0)
self.arrow_preview.add_coord(-self.radius*13.0/100.0,self.radius*5.0/100.0)
self.arrow_preview.add_coord(0,0)
self.arrow_preview.add_coord(-self.radius*13.0/100.0,-self.radius*5.0/100.0)
self.arrow_preview.add_coord(-self.radius*10.0/100.0,0)
self.arrow_preview.style['fill'] = 'beige'
self.arrow_preview.set_stroke(1.0, 'beige')
self.append(self.arrow_preview)
self.set_value(_min)
def value_to_angle(self, value):
return self.base_angle + (value-self.min) * self.scale_angle_range / self.scale_value_range #subtraction in order to go clockwise
def angle_to_value(self, angle):
print("angolo:" + str(math.degrees(angle)))
print("valore:" + str((angle-self.base_angle) * self.scale_value_range / self.scale_angle_range + self.min))
return (angle-self.base_angle) * self.scale_value_range / self.scale_angle_range + self.min
def value_to_xy_tuple(self, value, radius):
return [math.cos(self.value_to_angle(value))*radius + self.radius, self.radius - math.sin(self.value_to_angle(value))*radius]
def xy_tuple_to_value(self, xy):
return self.angle_to_value(math.atan2(xy[1], xy[0])%(math.pi*2))
def set_value(self, value):
if value<self.min:
value = self.min
if value>self.max:
value = self.max
self.value = value
angle = self.value_to_angle(value)
xy = self.value_to_xy_tuple(value, self.radius-10)
self.arrow.attributes['transform'] = "translate(%s,%s) rotate(%s)" % (xy[0], xy[1], math.degrees(-angle))
self.set_value_preview(value)
def set_value_preview(self, value):
if value<self.min:
value = self.min
if value>self.max:
value = self.max
angle = self.value_to_angle(value)
xy = self.value_to_xy_tuple(value, self.radius-10)
self.arrow_preview.attributes['transform'] = "translate(%s,%s) rotate(%s)" % (xy[0], xy[1], math.degrees(-angle))
def onmousedown(self, widget, x, y):
value = self.xy_tuple_to_value([float(x)-self.radius, -(float(y)-self.radius)])
self.set_value(value)
def onmousemove(self, widget, x, y):
value = self.xy_tuple_to_value([float(x)-self.radius, -(float(y)-self.radius)])
self.set_value_preview(value)
class MyApp(App):
def main(self, name='world'):
self.wid = gui.VBox(margin='0px auto') #margin 0px auto to center the screen
self.gauge = InputGauge(200, 200, 1000, 10000)
self.wid.append(self.gauge)
# returning the root widget
return self.wid
if __name__ == "__main__":
start(MyApp)
| 38.758824
| 137
| 0.642586
|
dd094cc9bc14b7d0f2231591ff9f922575e9d1c0
| 1,465
|
py
|
Python
|
anthill/admin/options.py
|
anthill-platform/anthill-admin
|
0715e538a2bd4728fa6b0199d794149f2c92d852
|
[
"MIT"
] | null | null | null |
anthill/admin/options.py
|
anthill-platform/anthill-admin
|
0715e538a2bd4728fa6b0199d794149f2c92d852
|
[
"MIT"
] | null | null | null |
anthill/admin/options.py
|
anthill-platform/anthill-admin
|
0715e538a2bd4728fa6b0199d794149f2c92d852
|
[
"MIT"
] | null | null | null |
from anthill.common.options import define
# Main
define("host",
default="http://localhost:9500",
help="Public hostname of this service",
type=str)
define("listen",
default="port:9500",
help="Socket to listen. Could be a port number (port:N), or a unix domain socket (unix:PATH)",
type=str)
define("name",
default="admin",
help="Service short name. User to discover by discovery service.",
type=str)
# Regular cache
define("cache_host",
default="127.0.0.1",
help="Location of a regular cache (redis).",
group="cache",
type=str)
define("cache_port",
default=6379,
help="Port of regular cache (redis).",
group="cache",
type=int)
define("cache_db",
default=4,
help="Database of regular cache (redis).",
group="cache",
type=int)
define("cache_max_connections",
default=500,
help="Maximum connections to the regular cache (connection pool).",
group="cache",
type=int)
# MySQL database
define("db_host",
default="127.0.0.1",
type=str,
help="MySQL database location")
define("db_username",
default="root",
type=str,
help="MySQL account username")
define("db_password",
default="",
type=str,
help="MySQL account password")
define("db_name",
default="dev_admin",
type=str,
help="MySQL database name")
| 21.544118
| 101
| 0.59727
|
9e5c2511a9b537220149fabc7105e5dd16ee77c5
| 2,967
|
py
|
Python
|
mediabrowser.py
|
nickw444/MediaBrowser
|
38c47d42d2e59faab8ced5c86565d0beb1a30615
|
[
"MIT"
] | null | null | null |
mediabrowser.py
|
nickw444/MediaBrowser
|
38c47d42d2e59faab8ced5c86565d0beb1a30615
|
[
"MIT"
] | null | null | null |
mediabrowser.py
|
nickw444/MediaBrowser
|
38c47d42d2e59faab8ced5c86565d0beb1a30615
|
[
"MIT"
] | null | null | null |
import os
from flask import Flask, render_template, send_file, request, after_this_request, redirect, url_for, safe_join
import re
from config import MAX_FOLDER_DL_SIZE_BYTES, IGNORE_FILES, ROOT_PATHS
app = Flask(__name__)
def get_size(start_path):
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size
import zipfile
def zipdir(path, ziph):
# ziph is zipfile handle
for root, dirs, files in os.walk(path):
for file in files:
ziph.write(
os.path.join(root, file),
arcname=os.path.join(root.replace(path, ''), file)
)
@app.route('/')
def index():
return render_template('index.html', items=ROOT_PATHS)
@app.route('/<int:id>/<path:path>')
@app.route('/<int:id>/')
def browse(id, path=''):
path = path.replace('../', '')
real_path = safe_join(ROOT_PATHS[id].path, path)
items = {
'dirs': [],
'files': [],
}
if os.path.isfile(real_path):
# If it's a file, send it.
return send_file(real_path,
as_attachment=request.args.get('download'))
else:
if request.args.get('download'):
folder_size = get_size(real_path)
if folder_size > MAX_FOLDER_DL_SIZE_BYTES:
print("TOO LARGE YO")
return "Folder too large. Exceeds maximum dl of {} '\
'bytes".format(MAX_FOLDER_DL_SIZE_BYTES)
print("Request for DL")
zipfilename = 'static/zips/{}.zip'.format(
os.path.basename(os.path.dirname(real_path))
)
zipf = zipfile.ZipFile(zipfilename, 'w')
zipdir(real_path, zipf)
zipf.close()
@after_this_request
def after(r):
os.unlink(zipfilename)
print("Done!")
return r
return send_file(zipfilename,
attachment_filename=os.path.basename(os.path.dirname(real_path)))
return "DL"
else:
for f in os.listdir(real_path):
if not re.match(IGNORE_FILES, f):
if os.path.isdir(os.path.join(real_path, f)):
item = (f, os.path.join(path, f) + '/')
items['dirs'].append(item)
else:
item = (f, os.path.join(path, f))
items['files'].append(item)
return render_template('browse.html', id=id, items=items)
return "lel"
if __name__ == '__main__':
import sys
if len(sys.argv) > 1 and sys.argv[1] == 'meinheld':
from meinheld import server
server.listen(("0.0.0.0", 8080))
server.run(app)
else:
app.debug = True
app.run(host="0.0.0.0", port=8080)
| 30.90625
| 110
| 0.545332
|
3b0a715eb8f6b74a99c034019153fa8f732ba1db
| 714
|
py
|
Python
|
willie/modules/urbandictionary.py
|
Eltiech/meanwillie
|
85e97fa8f4577b4c8517a474a1e45639aa4e9b51
|
[
"EFL-2.0"
] | 1
|
2016-01-31T12:12:34.000Z
|
2016-01-31T12:12:34.000Z
|
willie/modules/urbandictionary.py
|
Eltiech/meanwillie
|
85e97fa8f4577b4c8517a474a1e45639aa4e9b51
|
[
"EFL-2.0"
] | null | null | null |
willie/modules/urbandictionary.py
|
Eltiech/meanwillie
|
85e97fa8f4577b4c8517a474a1e45639aa4e9b51
|
[
"EFL-2.0"
] | null | null | null |
"""
urbandictionary.py - look up a definition on UrbanDictionary
"""
from willie.module import commands, rate, NOLIMIT
from willie import web
import json
@commands('urbandictionary', 'urban', 'ud')
@rate(10)
def urbandictionary(bot, trigger):
"""Looks a word up on UrbanDictionary"""
word = trigger.group(2)
if not word:
bot.reply("Whats the matter, cat got your tongue?")
return NOLIMIT
data = json.loads(web.get('http://api.urbandictionary.com/v0/define?term=%s' % web.quote(word)))
defs = data.get('list', [])
if len(defs) > 0:
bot.reply('%s - %s' % (defs[0]['word'], defs[0]['definition']))
else:
bot.reply('Does that look like a word to you?')
| 27.461538
| 100
| 0.640056
|
e15a520a083fd589b92e1b52fd55c6a6ae61c92b
| 379
|
py
|
Python
|
movie/migrations/0006_auto_20200924_1602.py
|
justnclrk/MCURanker
|
97f946e75bb911a2073955c1ea66402da839d2ef
|
[
"MIT"
] | null | null | null |
movie/migrations/0006_auto_20200924_1602.py
|
justnclrk/MCURanker
|
97f946e75bb911a2073955c1ea66402da839d2ef
|
[
"MIT"
] | 6
|
2020-09-24T04:35:08.000Z
|
2022-03-12T00:27:25.000Z
|
movie/migrations/0006_auto_20200924_1602.py
|
justnclrk/MCURanker
|
97f946e75bb911a2073955c1ea66402da839d2ef
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.1 on 2020-09-24 21:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movie', '0005_auto_20200924_1557'),
]
operations = [
migrations.AlterField(
model_name='movie',
name='release_date',
field=models.IntegerField(),
),
]
| 19.947368
| 47
| 0.593668
|
827f398b096c61bdcd6547b818fb6ddb487827f7
| 11,937
|
py
|
Python
|
hoomd/md/compute.py
|
NathanDavisBarrett/hoomd-blue
|
017923771396aaffc48578c56a71125aeaa8febc
|
[
"BSD-3-Clause"
] | null | null | null |
hoomd/md/compute.py
|
NathanDavisBarrett/hoomd-blue
|
017923771396aaffc48578c56a71125aeaa8febc
|
[
"BSD-3-Clause"
] | null | null | null |
hoomd/md/compute.py
|
NathanDavisBarrett/hoomd-blue
|
017923771396aaffc48578c56a71125aeaa8febc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2009-2021 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
# Maintainer: joaander / All Developers are free to add commands for new
# features
"""Compute system properties."""
from hoomd.md import _md
from hoomd.operation import Compute
from hoomd.data.parameterdicts import ParameterDict
from hoomd.logging import log
import hoomd
class _Thermo(Compute):
def __init__(self, filter):
self._filter = filter
class ThermodynamicQuantities(_Thermo):
"""Compute thermodynamic properties of a group of particles.
Args:
filter (``hoomd.filter``): Particle filter to compute thermodynamic
properties for.
:py:class:`ThermodynamicQuantities` acts on a given group of particles and
calculates thermodynamic properties of those particles when requested. All
specified :py:class:`ThermodynamicQuantities` objects can be added to a
logger for logging during a simulation, see :py:class:`hoomd.logging.Logger`
for more details.
Examples::
f = filter.Type('A')
compute.ThermodynamicQuantities(filter=f)
"""
def __init__(self, filter):
super().__init__(filter)
def _attach(self):
if isinstance(self._simulation.device, hoomd.device.CPU):
thermo_cls = _md.ComputeThermo
else:
thermo_cls = _md.ComputeThermoGPU
group = self._simulation.state._get_group(self._filter)
self._cpp_obj = thermo_cls(self._simulation.state._cpp_sys_def, group)
super()._attach()
@log(requires_run=True)
def kinetic_temperature(self):
r""":math:`kT_k`, instantaneous thermal energy of the group [energy].
Calculated as:
.. math::
kT_k = 2 \cdot \frac{K}{N_{\mathrm{dof}}}
"""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.kinetic_temperature
@log(requires_run=True)
def pressure(self):
r""":math:`P`, instantaneous pressure of the group (in pressure units).
Calculated as:
.. math::
P = \frac{ 2 \cdot K_{\mathrm{trans}} + W }{D \cdot V},
where :math:`D` is the dimensionality of the system, :math:`V` is the
total volume of the simulation box (or area in 2D), and :math:`W` is
calculated as:
.. math::
W = \frac{1}{2} \sum_{i \in \mathrm{filter}} \sum_{j}
\vec{F}_{ij} \cdot \vec{r_{ij}} + \sum_{k} \vec{F}_{k} \cdot
\vec{r_{k}},
where :math:`i` and :math:`j` are particle tags, :math:`\vec{F}_{ij}`
are pairwise forces between particles and :math:`\vec{F}_k` are forces
due to explicit constraints, implicit rigid body constraints, external
walls, and fields.
"""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.pressure
@log(category='sequence', requires_run=True)
def pressure_tensor(self):
r"""Instantaneous pressure tensor of the group [pressure].
(:math:`P_{xx}`, :math:`P_{xy}`, :math:`P_{xz}`, :math:`P_{yy}`,
:math:`P_{yz}`, :math:`P_{zz}`). calculated as:
.. math::
P_{ij} = \left[ \sum_{k \in \mathrm{filter}} m_k v_{k,i}
v_{k,j} + \sum_{k \in \mathrm{filter}} \sum_{l} \frac{1}{2}
\left(\vec{r}_{kl,i} \vec{F}_{kl,j} + \vec{r}_{kl,j}
\vec{F}_{kl, i} \right) \right]/V
where :math:`V` is the total simulation box volume (or area in 2D).
"""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.pressure_tensor
@log(requires_run=True)
def kinetic_energy(self):
r""":math:`K`, total kinetic energy of particles in the group [energy].
.. math::
K = K_{\mathrm{rot}} + K_{\mathrm{trans}}
"""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.kinetic_energy
@log(requires_run=True)
def translational_kinetic_energy(self):
r""":math:`K_{\mathrm{trans}}`.
Translational kinetic energy of all particles in the group [energy].
.. math::
K_{\mathrm{trans}} = \frac{1}{2}\sum_{i \in \mathrm{filter}}
m_i|\vec{v}_i|^2
"""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.translational_kinetic_energy
@log(requires_run=True)
def rotational_kinetic_energy(self):
r""":math:`K_{\mathrm{rot}}`.
Rotational kinetic energy of all particles in the group [energy].
Calculated as:
.. math::
K_{\mathrm{rot}} = \frac{1}{2} \sum_{i \in \mathrm{filter}}
\frac{L_{x,i}^2}{I_{x,i}} + \frac{L_{y,i}^2}{I_{y,i}} +
\frac{L_{z,i}^2}{I_{z,i}},
where :math:`I` is the moment of inertia and :math:`L` is the angular
momentum in the (diagonal) reference frame of the particle.
"""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.rotational_kinetic_energy
@log(requires_run=True)
def potential_energy(self):
r""":math:`U`.
Potential energy that the group contributes to the entire system
[energy].
The potential energy is calculated as a sum of per-particle energy
contributions:
.. math::
U = \sum_{i \in \mathrm{filter}} U_i,
where :math:`U_i` is defined as:
.. math::
U_i = U_{\mathrm{pair}, i} + U_{\mathrm{bond}, i} +
U_{\mathrm{angle}, i} + U_{\mathrm{dihedral}, i} +
U_{\mathrm{improper}, i} + U_{\mathrm{external}, i} +
U_{\mathrm{other}, i}
and each term on the RHS is calculated as:
.. math::
U_{\mathrm{pair}, i} &= \frac{1}{2} \sum_j V_{\mathrm{pair}, ij}
U_{\mathrm{bond}, i} &= \frac{1}{2} \sum_{(j, k) \in
\mathrm{bonds}} V_{\mathrm{bond}, jk}
U_{\mathrm{angle}, i} &= \frac{1}{3} \sum_{(j, k, l) \in
\mathrm{angles}} V_{\mathrm{angle}, jkl}
U_{\mathrm{dihedral}, i} &= \frac{1}{4} \sum_{(j, k, l, m) \in
\mathrm{dihedrals}} V_{\mathrm{dihedral}, jklm}
U_{\mathrm{improper}, i} &= \frac{1}{4} \sum_{(j, k, l, m) \in
\mathrm{impropers}} V_{\mathrm{improper}, jklm}
In each summation above, the indices go over all particles and we only
use terms where one of the summation indices (:math:`j`, :math:`k`,
:math:`l`, or :math:`m`) is equal to :math:`i`. External and other
potentials are summed similar to the other terms using per-particle
contributions.
"""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.potential_energy
@log(requires_run=True)
def degrees_of_freedom(self):
r""":math:`N_{\mathrm{dof}}`.
Number of degrees of freedom given to the group by its integration
method.
Calculated as:
.. math::
N_{\mathrm{dof}} = N_{\mathrm{dof, trans}}
+ N_{\mathrm{dof, rot}}
"""
return self._cpp_obj.degrees_of_freedom
@log(requires_run=True)
def translational_degrees_of_freedom(self):
r""":math:`N_{\mathrm{dof, trans}}`.
Number of translational degrees of freedom given to the group by its
integration method.
When using a single integration method that is momentum conserving and
operates on all particles,
:math:`N_{\mathrm{dof, trans}} = DN - D - N_{constraints}`, where
:math:`D` is the dimensionality of the system.
Note:
The removal of :math:`D` degrees of freedom accounts for the fixed
center of mass in using periodic boundary conditions. When the
*filter* in :py:class:`ThermodynamicQuantities` selects a subset
of all particles, the removed degrees of freedom are spread
proportionately.
"""
return self._cpp_obj.translational_degrees_of_freedom
@log(requires_run=True)
def rotational_degrees_of_freedom(self):
r""":math:`N_{\mathrm{dof, rot}}`.
Number of rotational degrees of freedom given to the group by its
integration method.
"""
return self._cpp_obj.rotational_degrees_of_freedom
@log(requires_run=True)
def num_particles(self):
""":math:`N`, number of particles in the group."""
return self._cpp_obj.num_particles
@log
def volume(self):
""":math:`V`, volume of the simulation box (area in 2D)."""
if self._attached:
return self._cpp_obj.volume
else:
return None
class HarmonicAveragedThermodynamicQuantities(Compute):
"""Compute harmonic averaged thermodynamic properties of particles.
Args:
filter (``hoomd.filter``): Particle filter to compute thermodynamic
properties for.
kT (float): Temperature of the system.
harmonic_pressure (float): Harmonic contribution to the pressure.
If ommitted, the HMA pressure can still be computed, but will be
similar in precision to the conventional pressure.
:py:class:`HarmonicAveragedThermodynamicQuantities` acts on a given group
of particles and calculates harmonically mapped average (HMA) properties
of those particles when requested. HMA computes properties more precisely
(with less variance) for atomic crystals in NVT simulations. The presence
of dffusion (vacancy hopping, etc.) will prevent HMA from providing
improvement. HMA tracks displacements from the lattice positions, which
are saved either during first call to `Simulation.run` or when the compute
is first added to the simulation, whichever occurs last.
Note:
`HarmonicAveragedThermodynamicQuantities` is an implementation of the
methods section of Sabry G. Moustafa, Andrew J. Schultz, and David A.
Kofke. (2015). "Very fast averaging of thermal properties of crystals
by molecular simulation". Phys. Rev. E 92, 043303
doi:10.1103/PhysRevE.92.043303
Examples::
hma = hoomd.compute.HarmonicAveragedThermodynamicQuantities(
filter=hoomd.filter.Type('A'), kT=1.0)
Attributes:
filter (hoomd.filter.ParticleFilter): Subset of particles compute
thermodynamic properties for.
kT (hoomd.variant.Variant): Temperature of the system.
harmonic_pressure (float): Harmonic contribution to the pressure.
"""
def __init__(self, filter, kT, harmonic_pressure=0):
# store metadata
param_dict = ParameterDict(kT=float(kT),
harmonic_pressure=float(harmonic_pressure))
# set defaults
self._param_dict.update(param_dict)
self._filter = filter
# initialize base class
super().__init__()
def _attach(self):
if isinstance(self._simulation.device, hoomd.device.CPU):
thermoHMA_cls = _md.ComputeThermoHMA
else:
thermoHMA_cls = _md.ComputeThermoHMAGPU
group = self._simulation.state._get_group(self._filter)
self._cpp_obj = thermoHMA_cls(self._simulation.state._cpp_sys_def,
group, self.kT, self.harmonic_pressure)
super()._attach()
@log(requires_run=True)
def potential_energy(self):
"""Average potential energy [energy]."""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.potential_energy
@log(requires_run=True)
def pressure(self):
"""Average pressure [pressure]."""
self._cpp_obj.compute(self._simulation.timestep)
return self._cpp_obj.pressure
| 34.5
| 80
| 0.623021
|
8ed750a5d194c95804f4c047198f7986107f4988
| 1,424
|
py
|
Python
|
quadruped/quadruped/interpolation.py
|
Sdelpeuch/IntroductionRobotique
|
64b088b9c164ab78a0154ed995557201c9d88c9d
|
[
"MIT"
] | null | null | null |
quadruped/quadruped/interpolation.py
|
Sdelpeuch/IntroductionRobotique
|
64b088b9c164ab78a0154ed995557201c9d88c9d
|
[
"MIT"
] | null | null | null |
quadruped/quadruped/interpolation.py
|
Sdelpeuch/IntroductionRobotique
|
64b088b9c164ab78a0154ed995557201c9d88c9d
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
class LinearSpline:
def __init__(self, entry=None):
self.entry = []
if entry != None:
self.entry = entry
def add_entry(self, t, x):
self.entry.append((t, x))
def interpolate(self, t):
x = 0
for i in range( len(self.entry) -1 ):
(ti1, xi1) = self.entry[i]
(ti2, xi2) = self.entry[i+1]
if t <= ti2 and t >= ti1:
x = xi1 + (t-ti1)*(xi2-xi1)/(ti2-ti1)
return x
return x
class LinearSpline3D:
def __init__(self):
self.xentry = []
self.yentry = []
self.zentry = []
def add_entry(self, t, x, y ,z):
self.xentry.append((t, x))
self.yentry.append((t, y))
self.zentry.append((t, z))
def interpolate(self, t):
xLinear = LinearSpline(self.xentry)
yLinear = LinearSpline(self.yentry)
zLinear = LinearSpline(self.zentry)
return xLinear.interpolate(t), yLinear.interpolate(t), zLinear.interpolate(t)
if __name__ == "__main__":
spline = LinearSpline()
spline.add_entry(0., 0.)
spline.add_entry(0.5, 0.2)
spline.add_entry(1.5, -0.4)
spline.add_entry(2.3, 0.6)
spline.add_entry(3, 0)
xs = np.arange(-0.1, 4, 0.1)
ys = []
for x in xs:
ys.append(spline.interpolate(x))
plt.plot(xs, ys)
plt.show()
| 24.551724
| 85
| 0.547753
|
bffe6a65771bacba80c10945aa26333375e55e9f
| 2,052
|
py
|
Python
|
main.py
|
Nyathi-Code/Pong-game
|
164e879ade02c3c255b1baced042c7cf2794265a
|
[
"MIT"
] | 1
|
2020-07-01T10:01:33.000Z
|
2020-07-01T10:01:33.000Z
|
main.py
|
Nyathi-Code/Pong-game
|
164e879ade02c3c255b1baced042c7cf2794265a
|
[
"MIT"
] | null | null | null |
main.py
|
Nyathi-Code/Pong-game
|
164e879ade02c3c255b1baced042c7cf2794265a
|
[
"MIT"
] | null | null | null |
import kivy
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ReferenceListProperty,ObjectProperty
from kivy.vector import Vector
from kivy.clock import Clock
from kivy.core.window import Window
class Game_Screen(Widget):
Window.size = (300, 500)
ball = ObjectProperty(None)
player_one = ObjectProperty(None)
player_two = ObjectProperty(None)
def start_ball(self, vel=(4, 0)):
self.ball.center = self.center
self.ball.velocity = vel
def update(self, dt):
self.ball.move()
self.player_one.ball_bounces(self.ball)
self.player_two.ball_bounces(self.ball)
if (self.ball.y < self.y) or (self.ball.top > self.top):
self.ball.velocity_y *= -1
if self.ball.x < self.x:
self.player_two.score += 1
self.start_ball(vel=(4, 0))
if self.ball.x > self.width:
self.player_one.score += 1
self.start_ball(vel=(-4, 0))
def on_touch_move(self, touch):
if touch.x < self.width / 3:
self.player_one.center_y = touch.y
if touch.x > self.width - self.width / 3:
self.player_two.center_y = touch.y
class Game_Paddles(Widget):
score = NumericProperty(0)
def ball_bounces(self, ball):
if self.collide_widget(ball):
vx, vy = ball.velocity
offset = (ball.center_y - self.center_y) / (self.height/2)
bounced = Vector(-1 * vx, vy)
vel = bounced * 1
ball.velocity = vel.x, vel.y + offset
class PongBall(Widget):
velocity_x = NumericProperty(0)
velocity_y = NumericProperty(0)
velocity = ReferenceListProperty(velocity_x, velocity_y)
def move(self):
self.pos = Vector(*self.velocity) + self.pos
class My_App(App):
def build(self):
game = Game_Screen()
game.start_ball()
Clock.schedule_interval(game.update, 1.0 / 57.0)
return game
if __name__ == "__main__":
My_App().run()
| 25.65
| 81
| 0.623294
|
da794bc14d641582630d4d6797f2ddb035ad71c8
| 1,764
|
py
|
Python
|
unweaver/graphs/digraphgpkg/outer_adjlists/outer_adjlist_view.py
|
jsbeckwith/unweaver
|
a4ba9e4e288c75e93bf7f9d67bc11680f09c3da0
|
[
"Apache-2.0"
] | 4
|
2019-04-24T16:38:57.000Z
|
2021-12-28T20:38:08.000Z
|
unweaver/graphs/digraphgpkg/outer_adjlists/outer_adjlist_view.py
|
jsbeckwith/unweaver
|
a4ba9e4e288c75e93bf7f9d67bc11680f09c3da0
|
[
"Apache-2.0"
] | 3
|
2021-06-02T04:06:33.000Z
|
2021-11-02T01:47:20.000Z
|
unweaver/graphs/digraphgpkg/outer_adjlists/outer_adjlist_view.py
|
jsbeckwith/unweaver
|
a4ba9e4e288c75e93bf7f9d67bc11680f09c3da0
|
[
"Apache-2.0"
] | 1
|
2020-08-13T04:42:05.000Z
|
2020-08-13T04:42:05.000Z
|
"""GeoPackage adapter for immutable networkx outer adjascency list mapping."""
from __future__ import annotations
from collections.abc import Mapping
from typing import AbstractSet, Iterator, Tuple, Type, TYPE_CHECKING
from unweaver.network_adapters import GeoPackageNetwork
from ..inner_adjlists import InnerSuccessorsView
if TYPE_CHECKING:
from ..inner_adjlists import InnerAdjlistView
class OuterAdjlistView(Mapping):
inner_adjlist_factory = InnerSuccessorsView # type: Type[InnerAdjlistView]
iterator_str = "predecessor_nodes"
size_str = "unique_predecessors"
def __init__(self, _network: GeoPackageNetwork):
self.network = _network
self.inner_adjlist_factory = self.inner_adjlist_factory
self.iterator = getattr(self.network.edges, self.iterator_str)
self.size = getattr(self.network.edges, self.size_str)
def __getitem__(self, key: str) -> InnerAdjlistView:
return self.inner_adjlist_factory(self.network, key)
def __iter__(self) -> Iterator[str]:
# This method is overridden to avoid two round trips to the database.
return self.iterator()
def __len__(self) -> int:
return self.size()
def items(self) -> AbstractSet[Tuple[str, InnerAdjlistView]]:
# This method is overridden to avoid two round trips to the database.
return {
(n, self.inner_adjlist_factory(_network=self.network, _n=n))
for n in self.iterator()
}
def __contains__(self, key: object) -> bool:
# This method is overridden because __getitem__ doesn't initially
# check for a key's presence.
# FIXME: should __getitem__ initially check for a key's presence?
return self.network.has_node(str(key))
| 37.531915
| 79
| 0.71542
|
bc9ee594db282891d2ac16747ec01f63642fa79d
| 1,767
|
py
|
Python
|
pkg/codegen/internal/test/testdata/nested-module-thirdparty/python/setup.py
|
suryatmodulus/pulumi
|
a318b4bd39f8c832abf992b9aeb4416bd3c02fb4
|
[
"Apache-2.0"
] | null | null | null |
pkg/codegen/internal/test/testdata/nested-module-thirdparty/python/setup.py
|
suryatmodulus/pulumi
|
a318b4bd39f8c832abf992b9aeb4416bd3c02fb4
|
[
"Apache-2.0"
] | 15
|
2021-07-24T02:43:00.000Z
|
2022-03-31T11:21:33.000Z
|
pkg/codegen/internal/test/testdata/nested-module-thirdparty/python/setup.py
|
suryatmodulus/pulumi
|
a318b4bd39f8c832abf992b9aeb4416bd3c02fb4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by test. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import errno
from setuptools import setup, find_packages
from setuptools.command.install import install
from subprocess import check_call
VERSION = "0.0.0"
PLUGIN_VERSION = "0.0.0"
class InstallPluginCommand(install):
def run(self):
install.run(self)
try:
check_call(['pulumi', 'plugin', 'install', 'resource', 'foo-bar', PLUGIN_VERSION])
except OSError as error:
if error.errno == errno.ENOENT:
print(f"""
There was an error installing the foo-bar resource provider plugin.
It looks like `pulumi` is not installed on your system.
Please visit https://pulumi.com/ to install the Pulumi CLI.
You may try manually installing the plugin by running
`pulumi plugin install resource foo-bar {PLUGIN_VERSION}`
""")
else:
raise
def readme():
try:
with open('README.md', encoding='utf-8') as f:
return f.read()
except FileNotFoundError:
return "foo-bar Pulumi Package - Development Version"
setup(name='foo_bar',
version=VERSION,
long_description=readme(),
long_description_content_type='text/markdown',
cmdclass={
'install': InstallPluginCommand,
},
packages=find_packages(),
package_data={
'foo_bar': [
'py.typed',
'pulumiplugin.json',
]
},
install_requires=[
'parver>=0.2.1',
'pulumi>=3.0.0,<4.0.0',
'semver>=2.8.1'
],
zip_safe=False)
| 29.45
| 94
| 0.580645
|
d8a8e9011de98234082a4511975d27cde8f0c43f
| 1,957
|
py
|
Python
|
testCases/calendar/full_calendar/desktop/TC_101_007_calendar_monthly_view_test.py
|
harry-100/qa-automation-framework
|
5fbe03e930820537e53f2d26b1c2b2bd2b222bf5
|
[
"MIT"
] | null | null | null |
testCases/calendar/full_calendar/desktop/TC_101_007_calendar_monthly_view_test.py
|
harry-100/qa-automation-framework
|
5fbe03e930820537e53f2d26b1c2b2bd2b222bf5
|
[
"MIT"
] | null | null | null |
testCases/calendar/full_calendar/desktop/TC_101_007_calendar_monthly_view_test.py
|
harry-100/qa-automation-framework
|
5fbe03e930820537e53f2d26b1c2b2bd2b222bf5
|
[
"MIT"
] | null | null | null |
import pytest
from datetime import (
timedelta,
date
)
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from time import sleep
# this test checks the functionality of monthly view of Calendar
@pytest.mark.usefixtures("one_time_setup")
class Test_TC101_007_CalendarMonthlyView:
@pytest.fixture(autouse=True)
def class_setup(self, one_time_setup):
self.driver.maximize_window()
self.logIn()
self.wait = WebDriverWait(self.driver, 10)
def test_calender_month_view(self):
self.log.info("starting test {}...".format(__name__))
self.wait.until(EC.title_is("Calendar"))
sleep(0.5)
self.calendar_page_obj.clk_calendar_month_view()
sleep(1)
month_view_info = self.calendar_page_obj.desktop_calendar_month_info()
today_date = date.today()
today_day = today_date.day
first_date = today_date - timedelta(days=today_day - 1)
first_date_weekday = first_date.weekday()
if first_date_weekday == 6:
N = 0
else:
N = first_date_weekday + 1
start_date = first_date - timedelta(days=N)
end_date = start_date + timedelta(days=41)
start_date_format = start_date.strftime("%b %-d")
start_year = start_date.strftime("%Y")
end_date_format = end_date.strftime("%b %-d")
end_year = end_date.strftime("%Y")
exp_date_format = (
start_date_format + ", " + start_year + " - " + end_date_format + ", " + end_year
)
if start_year == end_year:
exp_date_format = start_date_format + " - " + end_date_format + ", " + end_year
else:
exp_date_format = (
start_date_format + ", " + start_year + " - " + end_date_format + ", " + end_year
)
if exp_date_format == month_view_info:
self.log.info("{} passed!".format(__name__))
assert True
else:
self.driver.save_screenshot(
self.pathScreenShot + "Test_TC101_007_CalendarMonthlyView " + self.dateFormat + ".png"
)
self.log.info("{} failed!".format(__name__))
assert False
| 32.081967
| 90
| 0.723556
|
b22c2013ec8fb6da69897b2400f4229ebbf38fa5
| 2,147
|
py
|
Python
|
classwork/11_18_2020.py
|
Katsute/Baruch-CIS-2300-Assignments
|
ea374ed1cb229f5e598863ba1777be5f47eaab9d
|
[
"CC0-1.0"
] | null | null | null |
classwork/11_18_2020.py
|
Katsute/Baruch-CIS-2300-Assignments
|
ea374ed1cb229f5e598863ba1777be5f47eaab9d
|
[
"CC0-1.0"
] | null | null | null |
classwork/11_18_2020.py
|
Katsute/Baruch-CIS-2300-Assignments
|
ea374ed1cb229f5e598863ba1777be5f47eaab9d
|
[
"CC0-1.0"
] | 1
|
2022-01-12T18:17:52.000Z
|
2022-01-12T18:17:52.000Z
|
# substring
print("substring"[0:3]) # same as Java, last index is not included
print("substring"[-6:]) # substring start relative to end, unset is len
print("substring"[:-6]) # substring end relative to end, unset is first (0)
print("substring"[0]) # string index
print("0123456789"[0:10:2]) # third value is step value
# contains (in)
text ="What is CFA, CPA, and ACCA?"
print("Not found" if "what" in text else "It is not found") # contains = in
print("Not found" if "CPC" not in text else ("It is not found"))
# logical
print(text.isalnum()) # is alphanumeric (no whitespace)
print(text.isalpha()) # is all alphabetic (no whitespace)
print(text.isdigit()) # is all number (no whitespace)
print(text.islower()) # is all lowercase
print(text.isupper()) # is all uppercase
print(text.isspace()) # is all whitespace \n, \t)
# char loop
text = "\tabcdefghijklmnopqrstuvwxyz\n"
for c in text: # char loop
char = c # if any can be achieved by checking each char
# string modification
# (similar to Java, strings are immutable, method creates new string)
print(text.lower()) # to lower
print(text.upper()) # to upper
print(text.strip().capitalize())# capitalize first char only
# strip to remove leading/trailing, this does not remove all
# the string is essentially an array of characters to be considered for removal
print(text.lstrip()) # strip leading whitespace
print(text.lstrip("abc\t")) # strip leading chars
print(text.rstrip()) # strip trailing whitespace
print(text.rstrip("\nxyz")) # strip trailing chars
print(text.strip()) # strip leading and trailing whitespace
print(text.strip("\n\tabcxyz")) # strip leading and trailing chars
# full string modification
print(text.endswith("abc")) # ends with
print(text.startswith("abc")) # starts with
print(text.find("abc")) # index of (first), -1 means not found
print(text.count("a")) # count
print(text.replace("\t", "")) # replace
# string repetition
print("c" * 10)
for c in range(1, 10):
print("*" * c)
# string split
print("11/18/2020".split('/'))
| 39.036364
| 80
| 0.669772
|
4992127e1665af86b00f9492086e024235dba926
| 1,835
|
py
|
Python
|
ch1/code.py
|
zachlindsey/adafruit_lib_guide
|
ff4a57c90ff4ea92d8a9b916cf15b67c0bbe4cbf
|
[
"MIT"
] | null | null | null |
ch1/code.py
|
zachlindsey/adafruit_lib_guide
|
ff4a57c90ff4ea92d8a9b916cf15b67c0bbe4cbf
|
[
"MIT"
] | null | null | null |
ch1/code.py
|
zachlindsey/adafruit_lib_guide
|
ff4a57c90ff4ea92d8a9b916cf15b67c0bbe4cbf
|
[
"MIT"
] | null | null | null |
from adafruit_clue import clue
import displayio
import vectorio
import time
display = clue.display
def show_display_info(display):
display.show(None)
print("DISPLAY INFO")
print('height:', display.height)
print('width:', display.width)
print('rotation (deg)', display.rotation)
print('auto_refresh:', display.auto_refresh)
print('brightness:', display.brightness)
print('auto_brightness:', display.auto_brightness)
print('bus_object:', display.bus)
# show_display_info(display)
circle_palette = displayio.Palette(color_count = 2)
circle_palette[0] = 0xffffff
circle_palette[1] = 0xff0000
circle_palette.make_transparent(0)
RADIUS = 30
circle = vectorio.VectorShape(
shape = vectorio.Circle(RADIUS),
pixel_shader = circle_palette,
x = 120,
y = 120
)
rect_palette = displayio.Palette(color_count = 2)
rect_palette[0] = 0xffffff
rect_palette[1] = 0x00ff00
rect_palette.make_transparent(0)
HEIGHT = 120
WIDTH = 60
rectangle = vectorio.VectorShape(
shape = vectorio.Rectangle(WIDTH, HEIGHT),
pixel_shader = rect_palette,
x = 120,
y = 120
)
poly_palette = displayio.Palette(color_count = 2)
poly_palette[0] = 0xffffff
poly_palette[1] = 0x0000ff
poly_palette.make_transparent(0)
points = [
(30,30),
(120, 120),
(120, 30),
(30, 120)
]
polygon = vectorio.VectorShape(
shape = vectorio.Polygon(points),
pixel_shader = poly_palette,
x = 0,
y = 0
)
group = displayio.Group()
group.append(rectangle)
group.append(circle)
group.append(polygon)
dx = 7
dy = 3
display.show(group)
while True:
circle.x += dx
if circle.x > 240+RADIUS:
circle.x -= 240+2*RADIUS
circle.y += dy
if circle.y > 240+RADIUS:
circle.y -= 240+2*RADIUS
rectangle.x -= dy
if rectangle.x < -WIDTH:
rectangle.x += 240+WIDTH
rectangle.y += dx
if rectangle.y > 240:
rectangle.y -= 240 + HEIGHT
time.sleep(0.1)
| 16.834862
| 51
| 0.720981
|
91cb0872934af0e9dfb92bd916445791563b9d4d
| 11,830
|
py
|
Python
|
nes-testsuite/roms/holy_diver_batman/source/tools/make_roms.py
|
lpil/pinky
|
1a25cf2cb1568aef5444a84c48cea76489045f04
|
[
"Apache-2.0"
] | 719
|
2016-10-23T18:26:27.000Z
|
2022-03-20T00:26:35.000Z
|
nes-testsuite/roms/holy_diver_batman/source/tools/make_roms.py
|
lpil/pinky
|
1a25cf2cb1568aef5444a84c48cea76489045f04
|
[
"Apache-2.0"
] | 17
|
2016-10-30T12:46:35.000Z
|
2020-01-31T11:34:38.000Z
|
nes-testsuite/roms/holy_diver_batman/source/tools/make_roms.py
|
lpil/pinky
|
1a25cf2cb1568aef5444a84c48cea76489045f04
|
[
"Apache-2.0"
] | 47
|
2016-10-24T00:30:56.000Z
|
2022-02-10T15:19:41.000Z
|
#!/usr/bin/env python
from __future__ import with_statement, division
import array
INES_MIRRH = 0
INES_MIRRV = 1
INES_MIRR4 = 8
INES_NTSC = 0
INES_PAL = 1
INES_DUAL_TVSYSTEM = 2
MAPPER_NROM = 0
MAPPER_MMC1 = 1
MAPPER_UNROM = 2
MAPPER_CNROM = 3
MAPPER_MMC3 = 4
MAPPER_MMC5 = 5
MAPPER_AOROM = 7
MAPPER_MMC2 = 9
MAPPER_MMC4 = 10
MAPPER_CPROM = 13
MAPPER_A53 = 28
MAPPER_BNROM = 34
MAPPER_GNROM = 66
MAPPER_FME7 = 69
MAPPER_HOLYDIVER = (78, 3)
MAPPER_MMC3_TLSROM = 118
MAPPER_MMC3_TQROM = 119
MAPPER_UNROM_CRAZY = 180
MAPPER_ACTIVE = 228
def log2(value):
if value < 1:
return -1
logvalue = 0
while value >= 0x1000000:
value >>= 24
logvalue += 24
if value >= 0x10000:
value >>= 16
logvalue += 16
if value >= 0x100:
value >>= 8
logvalue += 8
if value >= 0x10:
value >>= 4
logvalue += 4
if value >= 4:
value >>= 2
logvalue += 2
return logvalue + (value >> 1)
def make_nes2_ramsize(size):
if not 0 <= size <= 1048576:
return ValueError("RAM size must be 0 to 1048576 bytes, not %d" % size)
if size == 0:
return 0
return max(1, log2(size - 1) - 5)
def make_nes2_ramsizes(unbacked, backed):
unbacked = make_nes2_ramsize(unbacked)
backed = make_nes2_ramsize(backed)
return unbacked | (backed << 4)
valid_mirrorings = {
0: 0, 1: 1, 8: 8, 9: 9,
'H': 0, 'V': 1, 'h': 0, 'v': 1, '4': 8
}
def format_memsize(size):
if size < 1024:
return "%d" % size
if size < 1048576:
return "%dK" % (size // 1024)
return "%dM" % (size // 1048576)
def make_nes2_header(prgsize, chrsize=0, mapper=0, mirroring=INES_MIRRV,
prgramsize=0, chrramsize=0, tvsystem=0):
"""Make a byte string representing a 16-byte NES 2.0 header.
prgsize -- Size of PRG ROM in bytes (multiple of 16384)
chrsize -- Size of CHR ROM in bytes (multiple of 8192)
mapper -- iNES mapper number (0-4095) or a tuple (mapper, submapper)
mirroring -- iNES mirroring code (0, 1, 8, 9) or letter ('H', 'V', '4')
prgramsize -- Sizes of PRG RAM as tuple (not battery-backed, battery-backed)
chrramsize -- Sizes of CHR RAM as tuple (not battery-backed, battery-backed)
"""
if isinstance(mapper, tuple):
mapper, submapper = mapper
else:
submapper = 0
if not isinstance(prgramsize, tuple):
prgramsize = (prgramsize, 0)
if not isinstance(chrramsize, tuple):
chrramsize = (chrramsize, 0)
if not 16384 <= prgsize < 4096 * 16384:
raise ValueError("PRG ROM size must be 16384 to 67092480 bytes, not %d" % prgsize)
if prgsize % 16384:
raise ValueError("PRG ROM size must be a multiple of 16384 bytes, not %d" % prgsize)
prgsize = prgsize // 16384
if not 0 <= chrsize < 4096 * 8192:
raise ValueError("CHR ROM size must be 0 to 33546240 bytes, not %d" % chrsize)
if chrsize % 8192:
raise ValueError("CHR ROM size must be a multiple of 8192 bytes, not %d" % chrsize)
chrsize = chrsize // 8192
if not 0 <= mapper < 4096:
raise ValueError("mapper must be 0 to 4095, not %d" % mapper)
if not 0 <= submapper < 16:
raise ValueError("submapper must be 0 to 15, not %d" % submapper)
try:
mirroring = valid_mirrorings[mirroring]
except KeyError:
raise ValueError("mirroring must be 0, 1, 8, 9, 'H', or 'V', not %s" % mirroring)
if tvsystem >= 4:
raise ValueError("mirroring must be 0-3, not %d" % tvsystem)
prgramsize = make_nes2_ramsizes(*prgramsize)
chrramsize = make_nes2_ramsizes(*chrramsize)
battery = 2 if ((chrramsize | prgramsize) & 0xF0) else 0
header = array.array('B', "NES\x1a")
header.append(prgsize & 0x0FF)
header.append(chrsize & 0x0FF)
header.append(mirroring | battery | ((mapper & 0x00F) << 4))
header.append((mapper & 0x0F0) | 0x08)
header.append(((mapper & 0xF00) >> 8) | (submapper << 4))
header.append(((prgsize & 0xF00) >> 8) | ((chrsize & 0xF00) >> 4))
header.append(prgramsize)
header.append(chrramsize)
header.append(tvsystem)
header.extend([0] * (16 - len(header)))
return header.tostring()
# PRG ROM size or range, CHR ROM size or range,
# mapper, mirroring, PRG RAM size(s)
romspecs_all = [
# Discretes
(32768, 8192,
MAPPER_NROM, (INES_MIRRV, INES_MIRRH), (0, 2048, 4096)),
(32768, (16384, 32768),
MAPPER_CNROM, (INES_MIRRV, INES_MIRRH), 0),
((32768, 524288), 0,
MAPPER_UNROM, (INES_MIRRV, INES_MIRRH), 0),
((32768, 524288), 0,
MAPPER_UNROM_CRAZY, (INES_MIRRV, INES_MIRRH), 0),
(32768, 0,
MAPPER_CPROM, (INES_MIRRV, INES_MIRRH), 0),
((32768, 262144), 0,
MAPPER_AOROM, 0, 0),
((32768, 524288), 0,
MAPPER_BNROM, (INES_MIRRV, INES_MIRRH), 0),
((65536, 131072), (16384, 32768),
MAPPER_GNROM, (INES_MIRRV, INES_MIRRH), 0),
# SGROM SNROM SUROM SOROM SXROM
((32768, 524288), 0,
MAPPER_MMC1, 0, (0, 8192, (8192, 8192), 32768)),
# MMC1 with CHR ROM
((32768, 262144), (16384, 131072),
MAPPER_MMC1, 0, (0, 8192)),
# TKSROM TLSROM
((32768, 524288), (16384, 262144),
MAPPER_MMC3_TLSROM, 0, (0, 8192)),
# Mega Man 4/6 and TNROM
((32768, 524288), 0,
MAPPER_MMC3, 0, (0, 8192)),
# Rest of MMC3
((32768, 524288), (16384, 262144),
MAPPER_MMC3, 0, (0, 8192)),
# BTR and JxROM
((32768, 262144), (16384, 262144),
MAPPER_FME7, 0, (0, 8192)),
# Holy Diver
((32768, 131072), (16384, 131072),
MAPPER_HOLYDIVER, 0, 0),
# PNROM
((32768, 131072), (16384, 131072),
MAPPER_MMC2, 0, 0),
# FxROM
((32768, 131072), (16384, 131072),
MAPPER_MMC4, 0, (0, 8192)),
]
romspecs_oneofeach = [
# Discretes
(32768, 8192, MAPPER_NROM, INES_MIRRV, 0),
(32768, 32768, MAPPER_CNROM, INES_MIRRH, 0),
(131072, 0, MAPPER_UNROM, INES_MIRRV, 0),
(131072, 0, MAPPER_UNROM_CRAZY, INES_MIRRH, 0),
(131072, 0, MAPPER_AOROM, 0, 0),
(131072, 0, MAPPER_BNROM, INES_MIRRH, 0),
(65536, 16384, MAPPER_GNROM, INES_MIRRV, 0),
(131072, 32768, MAPPER_MMC1, 0, (0, 8192)),
(131072, 0, MAPPER_MMC1, 0, 0),
(524288, 0, MAPPER_MMC1, 0, ((0, 8192), (0, 32768))),
(131072, 131072, MAPPER_MMC1, 0, (0, 8192)),
(131072, 65536, MAPPER_MMC2, INES_MIRRV, 0),
(262144, 262144, MAPPER_MMC3, 0, 0),
(131072, 0, MAPPER_MMC3, 0, 0),
(131072, 65536, MAPPER_MMC3_TLSROM, 0, 0),
(131072, 65536, MAPPER_MMC4, INES_MIRRV, 8192),
(131072, 65536, MAPPER_FME7, 0, 8192),
(131072, 65536, MAPPER_HOLYDIVER, 0, 0),
(524288, 0, MAPPER_A53, 0, 0),
]
romspecs = romspecs_oneofeach
def log_xrange(start=1, end=None, step=2):
if end is None:
start, end = 1, start
while start <= end:
yield start
start = start * step
filename_mirroring = {0: 'H', 1: 'V', 8: '4', 9: '4V'}
switchable_mirror_mappers = {
MAPPER_MMC1, MAPPER_MMC2, MAPPER_MMC3, MAPPER_MMC3_TQROM,
MAPPER_MMC3_TLSROM, MAPPER_MMC4, MAPPER_AOROM, MAPPER_HOLYDIVER,
MAPPER_FME7, MAPPER_A53
}
def handle_single_rom(prgsize, chrsize, mapper, mirror, prgramsize):
filename = ['M%d' % (mapper[0] if isinstance(mapper, tuple) else mapper),
'.%d' % mapper[1] if isinstance(mapper, tuple) else '',
'_P', format_memsize(prgsize),
'_C%s' % format_memsize(chrsize) if chrsize else '',
'_%s' % filename_mirroring[mirror & 0x09]
if mapper not in switchable_mirror_mappers
else '',
'_W%s' % format_memsize(prgramsize[0]) if prgramsize[0] else '',
'_S%s' % format_memsize(prgramsize[1]) if prgramsize[1] else '',
'.nes']
filename = ''.join(filename)
chrramsize = (8192 if mapper == MAPPER_MMC3_TQROM
else 0 if chrsize > 0
else 16384 if mapper == MAPPER_CPROM
else 8192)
header = make_nes2_header(prgsize, chrsize, mapper, mirror,
prgramsize, chrramsize)
# SUROM/SXROM can't guarantee PRG A18 until CHR is set up
# so duplicate the test in all 256K outer banks
dupli_prgsize = min(262144, prgsize)
# Place right bank in the last bank
prgrom = array.array('B', [0xFF]) * (dupli_prgsize - 32768)
prgrom.fromstring(master_prgrom)
# Place wrong bank code into all 4K banks
wrong_bank = array.array('B', master_prgrom[0x7F80:0x8000])
wrong_bank[-4] = 0x80 # Set reset vector
wrong_bank[-3] = 0xFF
for i in xrange(4096, dupli_prgsize, 4096):
prgrom[i - 128:i] = wrong_bank
del wrong_bank
# Emulators commonly boot AOROM, BNROM, GNROM, and
# UNROM (Crazy Climber) to the first bank. There's a stub
# in $BF6C that tries switching to the last bank.
# Put it in all 16K banks.
gnromstub = array.array('B', master_prgrom[0x3F6C:0x3F80])
for i in xrange(0, dupli_prgsize, 16384):
prgrom[i + 0x3F6C:i + 0x3F80] = gnromstub
for i in xrange(0, dupli_prgsize - 16384, 16384):
prgrom[i + 0x3FFC] = 0x6C
prgrom[i + 0x3FFD] = 0xBF
# SUROM/SXROM duplication
prgrom = prgrom * (prgsize // len(prgrom))
# Place right bank in first 16K (for #180 UNROM Crazy)
prgrom[:0x3F6C] = array.array('B', master_prgrom[:0x3F6C])
# Finally, add bank numbers to PRG ROM
for i in xrange(4096 - 8, prgsize, 4096):
prgrom[i] = i // 4096
prgrom[i + 1] = 0
prgrom[-7] = 1
# Add bank numbers to CHR ROM
chrrom = array.array('B', master_chrrom) * (chrsize // len(master_chrrom))
for i in xrange(508, chrsize, 1024):
chrrom[i] = i // 1024
rom = ''.join([header, prgrom.tostring(), chrrom.tostring()])
return (filename, rom)
def expand_romspec(prgsizes, chrsizes, mapper, mirrors, ramsizes):
from collections import Sequence
from itertools import product
if not isinstance(prgsizes, Sequence):
prgsizes = (prgsizes, prgsizes)
if not isinstance(chrsizes, Sequence):
chrsizes = (chrsizes, chrsizes)
if not isinstance(mirrors, Sequence):
mirrors = (mirrors,)
if not isinstance(ramsizes, Sequence):
ramsizes = (ramsizes,)
ramsizes = (spec
for sz in ramsizes
for spec in (set([(sz, 0), (0, sz)])
if not isinstance(sz, Sequence)
else (sz,)))
return product(log_xrange(*prgsizes),
log_xrange(*chrsizes) if chrsizes[0] else (0,),
(mapper,), mirrors, ramsizes)
test_rom_folder = '../testroms'
master_file = '../hdbm-master.nes'
def main():
import os
from itertools import starmap # starmap(f, rows = f(*row) for row in rows)
global master_prgrom, master_chrrom
with open(master_file, 'rb') as infp:
infp.read(16)
master_prgrom = infp.read(32768)
master_chrrom = infp.read(8192)
## h = make_nes2_header(262144, 131072, MAPPER_MMC3, INES_MIRRV,
## (0, 8192), 0, INES_NTSC)
## print " ".join("%02x" % ord(c) for c in h)
specs = [single_rom
for romspec in romspecs
for single_rom in expand_romspec(*romspec)]
print len(specs), "specs"
roms = starmap(handle_single_rom, specs)
total_sz = 0
if not os.path.isdir(test_rom_folder):
os.mkdir(test_rom_folder)
seen_names = set()
for (filename, rom) in roms:
if filename in seen_names:
print "duplicate filename", filename
seen_names.add(filename)
total_sz += len(rom)
with open(os.path.join(test_rom_folder, filename), 'wb') as outfp:
outfp.write(rom)
print "total: %s" % format_memsize(total_sz)
if __name__=='__main__':
main()
| 33.994253
| 92
| 0.61344
|
982f917cb806a26229e3a44403f7af9e851896cb
| 903
|
py
|
Python
|
glue/core/data_exporters/qt/dialog.py
|
sergiopasra/glue
|
c25a217a122a11818382672c99cb21f57a30636f
|
[
"BSD-3-Clause"
] | 1
|
2019-12-17T07:58:35.000Z
|
2019-12-17T07:58:35.000Z
|
glue/core/data_exporters/qt/dialog.py
|
sergiopasra/glue
|
c25a217a122a11818382672c99cb21f57a30636f
|
[
"BSD-3-Clause"
] | null | null | null |
glue/core/data_exporters/qt/dialog.py
|
sergiopasra/glue
|
c25a217a122a11818382672c99cb21f57a30636f
|
[
"BSD-3-Clause"
] | 1
|
2019-08-04T14:10:12.000Z
|
2019-08-04T14:10:12.000Z
|
from __future__ import absolute_import, division, print_function
from qtpy import compat
from glue import config
def export_data(data, components=None, exporter=None):
if exporter is None:
exporters = {}
for e in config.data_exporter:
if e.extension == '':
fltr = "{0} (*)".format(e.label)
else:
fltr = "{0} ({1})".format(e.label, ' '.join('*.' + ext for ext in e.extension))
exporters[fltr] = e.function
filters = ';;'.join(sorted(exporters))
else:
filters = None
filename, fltr = compat.getsavefilename(caption="Choose an output filename",
filters=filters)
filename = str(filename)
if not filename:
return
if filters is not None:
exporter = exporters[fltr]
exporter(filename, data, components=components)
| 28.21875
| 95
| 0.579181
|
fc6ac20d315bc92ca09e8c5c192df8c39e3b9d05
| 922
|
py
|
Python
|
ot/tests/ibry_test.py
|
LawrenceMMStewart/Optimal_Transport_MIT
|
a71a0110fa15110692fd383c1e77a6c347ef9ca3
|
[
"MIT"
] | 3
|
2020-05-04T09:08:55.000Z
|
2020-05-27T05:10:26.000Z
|
ot/tests/ibry_test.py
|
LawrenceMMStewart/Imputatations-with-Optimal-Transport
|
a71a0110fa15110692fd383c1e77a6c347ef9ca3
|
[
"MIT"
] | null | null | null |
ot/tests/ibry_test.py
|
LawrenceMMStewart/Imputatations-with-Optimal-Transport
|
a71a0110fa15110692fd383c1e77a6c347ef9ca3
|
[
"MIT"
] | null | null | null |
#tests for ibpr.py
from ot.ibpr import *
import numpy as np
from ot.ot1d import cost_mat
def precision_eq(a,b):
return (np.abs(a-b)<1e-14).all()
#test iterated bregman projections between two diracs
def test_IBP_ex1():
# X = {0,4}
# hists = {dirac(0),dirac(4)}
hists = np.array([[1.,0],[0,1.]])
cost = 4*(1-np.eye(2))
cost = cost/np.median(cost)
#one would expect the barycentre to be at 2
out = IBP(hists,cost,eps=2/hists.shape[0])
assert precision_eq(out,np.ones((2,1))*0.5)
assert abs(out.sum()-1)<1e-14
def test_IBP_isproba():
#X is randomly sampled
d= 100
N = 10
#create a random ground space
X = np.random.normal(size=(d,1))
C = cost_mat(X,X,100,100,p=2).numpy()
#create random historgrams
hists = np.random.uniform(size=(d,N))
hists = hists/hists.sum(axis=0)
out = IBP(hists,C,eps=2/hists.shape[0],niter=1000)
print(out)
assert np.shape(out)==(d,1)
assert abs(out.sum()-1)<1e-6
| 21.44186
| 53
| 0.668113
|
47aaa23e89435d6b27273d4908580de510a2e741
| 17,804
|
py
|
Python
|
snake_classes.py
|
odanielb/Snake-to-10
|
a071b13d8bf7ae3f7dcc623c7aa20f70b97b9950
|
[
"MIT"
] | null | null | null |
snake_classes.py
|
odanielb/Snake-to-10
|
a071b13d8bf7ae3f7dcc623c7aa20f70b97b9950
|
[
"MIT"
] | null | null | null |
snake_classes.py
|
odanielb/Snake-to-10
|
a071b13d8bf7ae3f7dcc623c7aa20f70b97b9950
|
[
"MIT"
] | null | null | null |
#-------------------------------------------------------------------------------
# File: snake_classes.py
# Name: Bridget O'Daniel
# Username: odanielb
#
# Assignment: FP Final Project
# Purpose: To work with and edit something I've created this year and think about object-oriented program and make improvements.
#
# Acknowledgements: The exit method here: http://docs.python.org/2/library/sys.html#sys.exit
# Also, I did not invent the idea of the game Snake: http://conversations.nokia.com/2010/11/25/the-evolution-of-snake/
#
# Errors: Displays an error when you quit by pressing q. Only does so because the program moves by timer, rather than waiting for key presses,
# meaning that once the user presses quit, it will sometimes have already done something that is impossible once the window closes. I could find
# no way to correct this problem, but all other ways of quitting (by running into something or winning) quit cleanly. Also, speed could not increase
# steadily because processing time slowed down the snake with each added tail, meaning the later tails are closer to the beginning tail in speed.
#
#-------------------------------------------------------------------------------
import turtle
import random
import Tkinter
import sys
import time
##### CLASSES ##################################################################
class Game():
"""A class to create a game of snake using the class Snake. This class can be used to give a Snake an environment to be in
(500x500 window) and prompt user interactivity. Includes Game controls, instructions for a Game of snake and methods to
monitor score, food, speed, and when the Game is won or lost."""
def __init__(self):
turtle.setup(500,500) #Change screensize for easy reference
self.wn = turtle.Screen() #Sets up Game's screen
self.wn.bgcolor("black")
self.on = False #Game has not yet started
self.score = 0 #Game score
self.snake = Snake() #Creates the Snake that Game will use
self.food = turtle.Turtle() #Game creates its food turtle
self.initialize_food() #And initializes its values
self.display = turtle.Turtle() #Game creates a turtle for written instructions, scores, etc.
self.display.hideturtle()
self.display.up()
#Links keys to their designated functions
self.wn.onkey(self.up, "Up") #Game sets controls
self.wn.onkey(self.down, "Down")
self.wn.onkey(self.left, "Left")
self.wn.onkey(self.right, "Right")
self.wn.onkey(self.lose, "q")
self.wn.onkey(self.start, " ")
self.wn.listen() #Game's window listens for key presses
def start(self):
"""Game changes its state to on from the initial intro screen (off)."""
self.on = True
def play(self):
"""The Game plays. This method controls what should happen each turn: checks if the Snake has hit anything, if it's eaten the food,
moves the Snake forward, and checks if the Game is over or not. If Game isn't over, the Game calls this method again for the next turn."""
is_game_over = self.snake.is_hit() #Checks if Snake has hit walls/itself, stores in is_game_over
self.check_food() #Checks if Snake has eaten food; makes necessary changes if so, including new food and new tails
self.snake.forward() #Moves the Snake forward
if is_game_over == True: #If the Snake did hit something...
self.lose() #Game over
elif self.score >= 10: #If the Snake has eaten 10 food items...
self.win() #Game won!
else: #Otherwise...
self.wn.ontimer(self.play(), 1) #On the timer, Game calls play again
##### FOOD AND SPEED MONITORING METHODS ####################################
def initialize_food(self):
"""The Game resets its food turtle to a new random location, thereby creating a new food item for the Snake to get."""
self.food.ht() #Hides food
self.food.up()
self.food.shape("square") #Food is for squares
self.food.color("light green")
self.food.setpos(random.randrange(-230,230), random.randrange(-230,230))#Puts food in a random location on the screen
self.food.st()
def check_food(self):
"""If the Game's Snake has eaten food, adds it to total food eaten, adds a tail to the Snake, and resets the food turtle to a new location."""
if self.snake.has_eaten_food(self.food) == True: #If the game's Snake has eaten food,
self.score += 1 #Add it to the amount eaten
self.display.clear() #Clears display
self.display_score() #Displays new total
self.snake.add_tail() #Adds new tail to Snake
self.update_Snake_speed() #Makes the Snake faster
self.initialize_food() #Puts food in new location
def update_Snake_speed(self):
"""Sets the Game's Snake to the appropriate speed based on the current score."""
foodvsSpeed = {0:1, 1:2, 2:3, 3:4, 4:5, 5:6, 6:7, 7:8, 8:9, 9:10, 10:0} #Each # of food paired with appropriate speed, so that they can be matched below.
self.snake.speed = foodvsSpeed[self.score]
##### GAME CONTROLS / KEY PRESS METHODS ####################################
def up(self):
"""Changes Snake direction to up."""
self.snake.head.setheading(90)
def down(self):
"""Changes Snake direction to down."""
self.snake.head.setheading(270)
def left(self):
"""Changes Snake direction to left."""
self.snake.head.setheading(180)
def right(self):
"""Changes Snake direction to right."""
self.snake.head.setheading(0)
def Quit(self):
"""Quits the program by closing window and then exiting using sys."""
self.wn.bye()
sys.exit()
########### GAME DISPLAY METHODS ###########################################
def display_instructions(self):
"""The Game displays the instructions on screen."""
self.display.color("white")
self.display.setpos(0,170)
self.display.write("Play snake!",move=False,align='center',font=("Arial",30,("bold","normal")))
self.display.setpos(0,140)
self.display.write("Collect food and don't hit walls or your tail.",move=False,align='center',font=("Arial",12,("bold","normal")))
self.display.setpos(0,100)
self.display.write("Get 10 food items to win!",move=False,align='center',font=("Arial",12,("bold","normal")))
self.display.setpos(0,-100)
self.display.write('Press the space bar to begin!',move=False,align='center',font=("Arial",12,("bold","normal")))
self.display.setpos(0,-140)
self.display.write('Use arrow keys to change direction, press "q" to quit.',move=False,align='center',font=("Arial",10,("bold","normal")))
def display_score(self):
"""The Game displays the amount of food eaten in the window."""
self.display.setpos(-230,-230)
self.display.write('Food eaten: '+str(self.score),move=False,align='left',font=("Arial",12,("bold","normal")))
def display_lose(self):
"""The Game displays GAME OVER."""
self.display.clear()
self.display.setpos(0,0)
self.display.write('GAME OVER',move=False,align='center',font=("Arial",50,("bold","normal")))
def display_win(self):
"""The Game displays YOU WIN!"""
self.display.clear()
self.display.setpos(0,0)
self.display.write('YOU WIN!',move=False,align='center',font=("Arial",50,("bold","normal")))
######### WIN OR LOSE METHODS ##############################################
def win(self):
"""Game won! Displays game won and quits."""
self.display_win()
time.sleep(2)
self.Quit()
def lose(self):
"""Game over! Displays game over and quits."""
self.display_lose()
time.sleep(2)
self.Quit()
################################################################################
class Snake():
"""The class Snake is designed for use with the class Game and creates a Snake that is a collection of Turtles
that follow each other in a line. Each Snake starts with only a "head" Turtle and may have "tail" Turtles
added to its attribute tails (a list). Has functionality to test whether Snake has run into its tails, eaten
food, or escaped the bounds of the window. The Snake is designed to move forward 20 pixels
at a time in a 500x500 Turtle window."""
def __init__(self):
self.head = turtle.Turtle()
self.head.shapesize(2,1,1) #Makes it a bit wider than a normal arrow shape
self.head.color("hot pink") #Snake head is hot pink, also leaves a fun hot pink trail
self.speed = 1
self.head.speed(self.speed)
self.tails = []
##### COLLISION METHODS ####################################################
def is_hit(self):
"""Checks if Snake has hit anything that would result in game over. Returns True or False."""
hit = self.has_hit_edges() #Calls to see if Snake has hit edges
if hit == False: #If not...
hit = self.has_hit_tail() #Has it hit its tails?
return hit #Returns whether it has hit anything or not
def has_hit_edges(self):
"""Checks if Snake has hit the edges of the window/gone offscreen. Returns True or False."""
if self.head.xcor() >= 250 or self.head.xcor() <=-250: #If Snake's x coordinates show it's offscreen
return True #Return True
elif self.head.ycor() >= 250 or self.head.ycor() <=-250: #If Snake's y coordinates show it's offscreen
return True #Return True
else:
return False #Otherwise, return False
def has_hit_tail(self):
"""Checks if snake has hit any of its tails. Returns True or False."""
xdistance = 0
ydistance = 0
for t in self.tails: #For each of Snake's tails...
xdistance = self.head.xcor() - t.xcor() #Find the distance between the head and the tail
ydistance = self.head.ycor() - t.ycor()
if abs(xdistance) <= 15 and abs(ydistance) <= 15: #If the distance is too close (touching)
return True #Return True
return False #Else return False
def has_eaten_food(self, food):
"""Checks if the game's Snake and the food turtle have overlapped--meaning that the snake ate the food. Returns True or False."""
xdistance = self.head.xcor() - food.xcor() #Finds distance between the X values of the snake and the food
ydistance = self.head.ycor() - food.ycor() #Finds the distance between the Y values of the snake and the food
if abs(xdistance) <= 20 and abs(ydistance) <= 20: #If those distances are less than 20 pixels in either direction...
return True #It has been eaten, return True
else:
return False #Otherwise, it has not been eaten, return False
##### UPDATING/CHANGING SNAKE: POSITION OR SIZE METHODS ####################
def add_tail(self):
"""Adds a tail (a Turtle object) to the Snake."""
tail = turtle.Turtle()
tail.up()
tail.color("plum")
tail.shape("circle")
tX = 0 #Stores x of new tail
tY = 0 #Stores y of new tail
#Set of if statements tells where to place the new tail relative to the position of the last tail (so that they are lined up)
if len(self.tails) == 0: #If the Snake has previously not had any tails
if self.head.heading() == 90: #If the Snake's head was facing up...
tX = self.head.xcor() #Give the new tail the same x coordinate
tY = self.head.ycor() - 20 #But put it 20 spaces below it
if self.head.heading() == 270: #(And so on)
tX = self.head.xcor()
tY = self.head.ycor() + 20
if self.head.heading() == 180:
tX = self.head.xcor() + 20
tY = self.head.ycor()
if self.head.heading() == 0:
tX = self.head.xcor() - 20
tY = self.head.ycor()
else: #If the snake has at least one tail already
if self.tails[len(self.tails)-1].heading() == 90: #If the tail before the new one was facing up...
tX = self.tails[len(self.tails)-1].xcor() #Give the new tail the same x coordinate
tY = self.tails[len(self.tails)-1].ycor() - 20 #But put it 20 spaces below it
if self.tails[len(self.tails)-1].heading() == 270: #(And so on)
tX = self.tails[len(self.tails)-1].xcor()
tY = self.tails[len(self.tails)-1].ycor() + 20
if self.tails[len(self.tails)-1].heading() == 180:
tX = self.tails[len(self.tails)-1].xcor() + 20
tY = self.tails[len(self.tails)-1].ycor()
if self.tails[len(self.tails)-1].heading() == 0:
tX = self.tails[len(self.tails)-1].xcor() - 20
tY = self.tails[len(self.tails)-1].ycor()
tail.setpos(tX,tY) #Set this as the position of the tail
self.tails.append(tail) #Add it to the list
def forward(self):
"""Moves the entire Snake forward by 20."""
headX = self.head.xcor() #Saves Snake's x coordinate before it moves
headY = self.head.ycor() #Saves Snake's y coordinate before it moves
self.head.speed = self.speed
self.head.forward(20) #Moves Snake forward
self.update_tails(headX, headY) #Moves Snake's tails forward
def update_tails(self, headX, headY):
"""Moves each tail forward to the position the tail in front of it last had. Provided coordinates are used to move the first tail."""
new_posList = [] #Stores updated set of positions for each tail
tX = 0 #Stores current tail's new x coordinate
tY = 0 #Stores current tail's new y coordinate
if len(self.tails) != 0: #If the snake has a tail...
for t in range(len(self.tails)): #For each of the Snake's tails...
if t == 0: #If it's the first tail,
(tX, tY) = (headX, headY) #Make its coordinates the same as the snake head's old ones
else: #Otherwise,
tX = self.tails[t-1].xcor() #Make its coordinates the same as the tail in front of its old ones
tY = self.tails[t-1].ycor()
new_posList.append( (tX, tY) ) #Adds the new set of coordinates to a list
for t in range(len(self.tails)): #For each of the Snake's tails...
self.tails[t].setpos(new_posList[t]) #Set its position to the corresponding updated position in new_posList
self.tails[t].speed(self.speed) #Set its speed to the current speed of the Snake
#End of classes
| 57.247588
| 176
| 0.5082
|
00a2f57708eb6350c51b82ca2773871ea4eed471
| 167
|
py
|
Python
|
web/load.py
|
vikian050194/py
|
95b8a1d4f084c0046eb53248cbace4525b637fd8
|
[
"MIT"
] | null | null | null |
web/load.py
|
vikian050194/py
|
95b8a1d4f084c0046eb53248cbace4525b637fd8
|
[
"MIT"
] | null | null | null |
web/load.py
|
vikian050194/py
|
95b8a1d4f084c0046eb53248cbace4525b637fd8
|
[
"MIT"
] | null | null | null |
import requests
def load(url):
try:
return requests.get(url, allow_redirects=True).content
except Exception as e:
print(e)
return ""
| 16.7
| 62
| 0.616766
|
adca69385085efe4bb3e29c1cc35e74b23b2f741
| 1,729
|
py
|
Python
|
neural_network/mlp_network.py
|
VITA-Group/SFW-Once-for-All-Pruning
|
84ec0780e94d15820140c3d365e02417a707a3fd
|
[
"MIT"
] | 4
|
2022-02-12T01:09:54.000Z
|
2022-03-15T09:48:33.000Z
|
neural_network/mlp_network.py
|
VITA-Group/SFW-Once-for-All-Pruning
|
84ec0780e94d15820140c3d365e02417a707a3fd
|
[
"MIT"
] | null | null | null |
neural_network/mlp_network.py
|
VITA-Group/SFW-Once-for-All-Pruning
|
84ec0780e94d15820140c3d365e02417a707a3fd
|
[
"MIT"
] | null | null | null |
from torch import nn
from neural_network.base_network import BaseNetwork, get_nonlinearity
class MlpNetwork(BaseNetwork):
def __init__(
self,
input_size,
output_size,
hidden_layer=[128, 128],
activation="relu",
output_activation=None,
network_name="mlp"):
super().__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_layer_list = hidden_layer
self.activation_name = activation
self.output_activation_name = "identity" if output_activation is None else output_activation
self.network_name = network_name
self._get_activation()
self._get_linear_layer()
self.layer = [(linear, activation) for linear, activation in zip(self.linear_layer, self.activation)]
def _get_activation(self):
hidden_length = len(self.hidden_layer_list)
self.activation = [None] * (hidden_length + 1)
for i in range(hidden_length):
self.activation[i] = get_nonlinearity(self.activation_name)
self.activation[-1] = get_nonlinearity(self.output_activation_name)
def _get_linear_layer(self):
node_list = [self.input_size] + self.hidden_layer_list + [self.output_size]
self.linear_layer = [None] * (len(node_list) - 1)
for i in range(len(self.linear_layer)):
fc = nn.Linear(node_list[i], node_list[i+1])
setattr(self, f"linear_layer_{i}", fc)
self.linear_layer[i] = fc
def forward(self, inputs):
x = inputs
for linear, activation in self.layer:
x = activation(linear(x))
return x
| 38.422222
| 109
| 0.630422
|
9b6096fc171cbf4c6f8526b26f45ead0e514d51e
| 1,174
|
py
|
Python
|
snappy/urls.py
|
roman-kutlak/snappy
|
1d29bd9193962bf8b95b345f42a8b52c80ac3839
|
[
"MIT"
] | null | null | null |
snappy/urls.py
|
roman-kutlak/snappy
|
1d29bd9193962bf8b95b345f42a8b52c80ac3839
|
[
"MIT"
] | null | null | null |
snappy/urls.py
|
roman-kutlak/snappy
|
1d29bd9193962bf8b95b345f42a8b52c80ac3839
|
[
"MIT"
] | null | null | null |
"""snappy URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from logic.views import homepage, demo, translate
from logic.views import TemplateListView, TemplateDetailView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage, name='homepage'),
url(r'^demo/$', demo, name='demo'),
url(r'^translate/$', translate, name='translate'),
url(r'^templates/$', TemplateListView.as_view(), name='templates-list'),
url(r'^template/(?P<pk>\d+)$', TemplateDetailView.as_view(), name='templates-detail'),
]
| 39.133333
| 90
| 0.698467
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.