repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
XavierBerger/pcd8544 | examples/dimmer.py | Python | gpl-3.0 | 568 | 0.038732 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pcd8544.lcd as lcd
import time, os, sys
if not os.geteuid() == 0:
sys.exit('Script must be run as root')
ON, OFF = [1 | , 0]
try:
lcd.init()
lcd.cls()
if ( lcd.LED != 1 ):
sys.exit('LED pin should be GPIO1 (12)')
# Backlight PWM testing -- off -> 25% -> off
for i in range(0,1023,16):
lcd.set_brightness(i)
time.sleep(0.025)
for i in range(1023,0,-16):
| lcd.set_brightness(i)
time.sleep(0.025)
except KeyboardInterrupt:
pass
finally:
lcd.cls()
lcd.backlight(OFF)
|
enthought/glfwpy | examples/03_instanced_drawing.py | Python | bsd-3-clause | 4,686 | 0.002134 | from glfwpy.glfw import *
import sys
import numpy as np
from OpenGL.GL import *
from OpenGL.arrays import ArrayDatatype
import ctypes
vertex = """
#version 330
in vec3 vin_position;
in vec3 vin_color;
uniform vec3 vu_displacement[2];
out vec3 vout_color;
void main(void)
{
vout_color | = vin_color;
gl_Position = vec4(vin_position + vu_displacement[gl_InstanceID], 1.0);
}
"""
fragment = """
#version 330
in vec3 vout_color;
out vec4 fout_color;
void main(void)
{
fout_color = vec4(vout_color, 1.0);
}
"""
vertex_data = np.array([0.75, 0.75, 0.0,
0.75, -0.75, 0.0,
-0.75, -0.75, 0.0], dtype=np.float32)
color_data = np. | array([1, 0, 0,
0, 1, 0,
0, 0, 1], dtype=np.float32)
displacement_data = np.array([-0.1, 0, 0,
0.2, 0, 0.0], dtype=np.float32)
class ShaderProgram(object):
def __init__(self, vertex, fragment, geometry=None):
self.program_id = glCreateProgram()
vs_id = self.add_shader(vertex, GL_VERTEX_SHADER)
frag_id = self.add_shader(fragment, GL_FRAGMENT_SHADER)
glAttachShader(self.program_id, vs_id)
glAttachShader(self.program_id, frag_id)
glLinkProgram(self.program_id)
if glGetProgramiv(self.program_id, GL_LINK_STATUS) != GL_TRUE:
info = glGetProgramInfoLog(self.program_id)
glDeleteProgram(self.program_id)
glDeleteShader(vs_id)
glDeleteShader(frag_id)
raise RuntimeError('Error linking program: %s' % (info))
glDeleteShader(vs_id)
glDeleteShader(frag_id)
def add_shader(self, source, shader_type):
try:
shader_id = glCreateShader(shader_type)
glShaderSource(shader_id, source)
glCompileShader(shader_id)
if glGetShaderiv(shader_id, GL_COMPILE_STATUS) != GL_TRUE:
info = glGetShaderInfoLog(shader_id)
raise RuntimeError('Shader compilation failed: %s' % (info))
return shader_id
except:
glDeleteShader(shader_id)
raise
def uniform_location(self, name):
return glGetUniformLocation(self.program_id, name)
def attribute_location(self, name):
return glGetAttribLocation(self.program_id, name)
def key_callback(x, y):
print 'Key: %s Action: %s pressed' % (x, y)
if __name__ == "__main__":
if not Init():
print 'GLFW initialization failed'
sys.exit(-1)
OpenWindowHint(OPENGL_VERSION_MAJOR, 3)
OpenWindowHint(OPENGL_VERSION_MINOR, 2)
OpenWindowHint(OPENGL_PROFILE, OPENGL_CORE_PROFILE)
OpenWindowHint(OPENGL_FORWARD_COMPAT, GL_TRUE)
if not OpenWindow(1400, 800, 0, 0, 0, 0, 32, 0, WINDOW):
print "OpenWindow failed"
Terminate()
sys.exit(-1)
SetKeyCallback(key_callback)
SetWindowTitle("Modern opengl example")
Enable(AUTO_POLL_EVENTS)
print 'Vendor: %s' % (glGetString(GL_VENDOR))
print 'Opengl version: %s' % (glGetString(GL_VERSION))
print 'GLSL Version: %s' % (glGetString(GL_SHADING_LANGUAGE_VERSION))
print 'Renderer: %s' % (glGetString(GL_RENDERER))
glClearColor(0.95, 1.0, 0.95, 0)
glEnable(GL_DEPTH_TEST)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
program = ShaderProgram(fragment=fragment, vertex=vertex)
vao_id = glGenVertexArrays(1)
glBindVertexArray(vao_id)
vbo_id = glGenBuffers(2)
glBindBuffer(GL_ARRAY_BUFFER, vbo_id[0])
glBufferData(GL_ARRAY_BUFFER, ArrayDatatype.arrayByteCount(vertex_data), vertex_data, GL_STATIC_DRAW)
glVertexAttribPointer(program.attribute_location('vin_position'), 3, GL_FLOAT, GL_FALSE, 0, ctypes.c_voidp(0))
glEnableVertexAttribArray(0)
glBindBuffer(GL_ARRAY_BUFFER, vbo_id[1])
glBufferData(GL_ARRAY_BUFFER, ArrayDatatype.arrayByteCount(color_data), color_data, GL_STATIC_DRAW)
glVertexAttribPointer(program.attribute_location('vin_color'), 3, GL_FLOAT, GL_FALSE, 0, ctypes.c_voidp(0))
glEnableVertexAttribArray(1)
displacement_loc = program.uniform_location('vu_displacement')
glProgramUniform3fv(program.program_id, displacement_loc, 2, displacement_data)
glBindBuffer(GL_ARRAY_BUFFER, 0)
glBindVertexArray(0)
running = True
while running:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glUseProgram(program.program_id)
glBindVertexArray(vao_id)
glDrawArraysInstanced(GL_TRIANGLES, 0, 3, 2)
glUseProgram(0)
glBindVertexArray(0)
SwapBuffers()
running = running and GetWindowParam(OPENED)
|
sergak01/Vk_bot | interface/search.py | Python | gpl-3.0 | 4,870 | 0.010625 | # -*- coding: utf-8 -*-
import settings
import random
import vk_api
from BotException import BotException
class Interface:
vk = None
vk_service = None
dynamic_settings = dict()
def __init__(self, vk, vk_service, dynamic_settings):
self.vk = vk
self.dynamic_settings = dynamic_settings
self.vk_service = vk_service
def get_keys(self):
keys = [u'найди', u'поиск', u'найти', u'кино', u'фильм']
ret = {}
for key in keys:
ret[key] = self
return ret
def get_rexp(self):
r_keys = [["seach_hesh", r"(#[^,\n]*)"]]
ret = {}
for key in r_keys:
ret[key[0]] = [self, key[1]]
return ret
def call(self, event):
query = ""
words = str(event.text).split()
for word in words:
if word.lower() not in self.get_keys():
query += word + " "
if query != "":
self.vk.method('messages.send', {
'user_id':int(event.user_id),
'message':"Поисковый запрос: " + str(query)})
#print(self.vk.method("groups.getById", {'v': "5.65"})[0].get("id"))
random.seed()
answer_count = self.vk_service.method(
'wall.search',
{
'owner_id':"-" + str(self.vk.method(
"groups.getById",
{'v': "5.65"})[0].get("id")
),
'query':str(query),
'owners_only':1,
'count':0,
'offset':0
}
).get("count | ")
if answer_count > 0:
answer = self.vk_service.method(
'wall.search',
{
'owner_id':"-" + str(self.vk.method(
"groups.getById",
{'v': "5.65"})[0].get("id") |
),
'query':str(query),
'owners_only':1,
'count':1,
'offset':random.randint(1, answer_count)
}
)
else:
answer = self.vk_service.method(
'wall.search',
{
'owner_id':"-" + str(self.vk.method(
"groups.getById",
{'v': "5.65"})[0].get("id")
),
'query':str(query),
'owners_only':1,
'count':1,
'offset':0
}
)
query = ""
#print(answer)
msg_search = ""
if answer.get("items") != [] and answer.get("count") > 0:
msg_search = "Ссылка на пост: https://vk.com/wall" + \
str(answer.get("items")[0].get("owner_id")) + "_" + \
str(answer.get("items")[0].get("id"))
msg_search = msg_search + "\n\n" + \
answer.get("items")[0].get("text")[0:1000] + "..."
attach_array = ""
for attach in answer.get("items")[0].get("attachments"):
type_attach = attach.get("type")
attach_array += attach.get("type")
attach_array += str(
attach.get(type_attach).get("owner_id")
) + "_"
attach_array += str(
attach.get(type_attach).get("id")
) + "_"
attach_array += str(
attach.get(type_attach).get("access_key")
) + ","
#self.vk.method('messages.send', {'user_id':int(event.user_id),'message':str(attach_array)})
self.vk.method('messages.send', {
'user_id':int(event.user_id),
'message':msg_search,
'attachment':attach_array
})
self.vk.method('messages.send', {
'user_id':int(event.user_id),
'message':"Хочешь другой? Отправь запрос еще раз!"
})
else:
self.vk.method('messages.send', {
'user_id':int(event.user_id),
'message':"По вашему запросу ничего не найдено!" + \
" Попробуйте еще раз ;-)"
})
else:
self.vk.method('messages.send', {
'user_id':int(event.user_id),
'message':"Упс!😲 Вы забыли добавить название фильма!"
}) |
francois-vincent/navitia | source/jormungandr/jormungandr/test_settings.py | Python | agpl-3.0 | 964 | 0 | # encoding: utf-8
import logging
# emplacement ou charger les fichier de configuration par instances
INSTANCES_DIR = '/etc/jormungandr.d'
# Start the thread at startup, True | in production, False for test environments
START_MONITORING_THREAD = False
| # chaine de connnection à postgresql pour la base jormungandr
SQLALCHEMY_DATABASE_URI = 'postgresql://navitia:navitia@localhost/jormun_test'
# désactivation de l'authentification
PUBLIC = True
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
# indice de la base de données redis utilisé, entier de 0 à 15 par défaut
REDIS_DB = 0
REDIS_PASSWORD = None
# Desactive l'utilisation du cache, et donc de redis
CACHE_DISABLED = False
# durée de vie des info d'authentification dans le cache en secondes
AUTH_CACHE_TTL = 300
ERROR_HANDLER_FILE = 'jormungandr.log'
ERROR_HANDLER_TYPE = 'rotating' # can be timedrotating
ERROR_HANDLER_PARAMS = {'maxBytes': 20000000, 'backupCount': 5}
LOG_LEVEL = logging.DEBUG
|
nicko96/Chrome-Infra | run.py | Python | bsd-3-clause | 872 | 0.00344 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of thi | s source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper for `python -m` to make running tools simpler.
A tool is defined as a python module with a __main__.py file. This latter file
is run by the present script.
In particular, allows gclient to change directories when running hooks for
infra.
"""
assert __name__ == '__main__'
import imp |
import os
import sys
RUNPY_PATH = os.path.abspath(__file__)
ROOT_PATH = os.path.dirname(RUNPY_PATH)
ENV_PATH = os.path.join(ROOT_PATH, 'ENV')
# Do not want to mess with sys.path, load the module directly.
run_helper = imp.load_source(
'run_helper', os.path.join(ROOT_PATH, 'bootstrap', 'run_helper.py'))
sys.exit(run_helper.run_py_main(sys.argv[1:], RUNPY_PATH, ENV_PATH, 'infra'))
|
Avira/pootle | pootle/apps/pootle_language/models.py | Python | gpl-3.0 | 6,196 | 0.003228 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import locale
from collections import OrderedDict
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _
from pootle.core.cache import make_method_key
from pootle.core.mixins import TreeItem
from pootle.core.url_helpers import get_editor_filter
from pootle.i18n.gettext import tr_lang, language_dir
class LanguageManager(models.Manager):
def get_queryset(self):
"""Mimics `select_related(depth=1)` behavior. Pending review."""
return (
super(LanguageManager, self).get_queryset().select_related(
'directory',
)
)
class LiveLanguageManager(models.Manager):
"""Manager that only considers `live` languages.
A live language is any language containing at least a project with
translatable files.
"""
def get_queryset(self):
return super(LiveLanguageManager, self).get_queryset().filter(
translationproject__isnull=False,
project__isnull=True,
).distinct()
def cached_dict(self, locale_code='en-us'):
"""Retrieves a sorted list of live language codes and names.
:param locale_code: the UI locale for which language full names need to
be localized.
:return: an `OrderedDict`
"""
key = make_method_key(self, 'cached_dict', locale_code)
languages = cache.get(key, None)
if languages is None:
languages = OrderedDict(
sorted([(lang[0], tr_lang(lang[1]))
for lang in self.values_list('code', 'fullname')],
cmp=locale.strcoll,
key=lambda x: x[1])
)
cache.set(key, languages, settings.POOTLE_CACHE_TIMEOUT)
return languages
class Language(models.Model, TreeItem):
code_help_text = _('ISO 639 language code for the language, possibly '
'followed by an underscore (_) and an ISO 3166 country code. '
'<a href="http://www.w3.org/International/articles/language-tags/">'
'More information</a>')
code = models.CharField(max_length=50, null=False, unique=True,
db_index=True, verbose_name=_("Code"), help_text=code_help_text)
fullname = models.CharField(max_length=255, null=False,
verbose_name=_("Full Name"))
specialchars_help_text = _('Enter any special characters that users '
'might find difficult to type')
specialchars = models.CharField(max_length=255, blank=True,
verbose_name=_("Special Characters"),
help_text=specialchars_help_text)
plurals_help_text = _('For more information, visit '
'<a href="http://docs.translatehouse.org/projects/'
'localization-guide/en/latest/l10n/pluralforms.html">'
'our page</a> on plural forms.')
nplural_choices = (
(0, _('Unknown')), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)
)
nplurals = models.SmallIntegerField(default=0, choices=nplural_choices,
verbose_name=_("Number of Plurals"), help_text=plurals_help_text)
pluralequation = models.CharField(max_length=255, blank=True,
verbose_name=_("Plural Equation"), help_text=plurals_help_text)
directory = models.OneToOneField('pootle_app.Directory', db_index=True,
editable=False)
objects = LanguageManager()
live = LiveLanguageManager()
class Meta:
ordering = ['code']
db_table = 'pootle_app_language'
############################ Properties ###################################
@property
def pootle_path(self):
return '/%s/' % self.code
@property
def name(self):
"""Localized fullname for the language."""
return tr_lang(self.fullnam | e)
################## | ########## Methods ######################################
@property
def direction(self):
"""Return the language direction."""
return language_dir(self.code)
def __unicode__(self):
return u"%s - %s" % (self.name, self.code)
def __init__(self, *args, **kwargs):
super(Language, self).__init__(*args, **kwargs)
def __repr__(self):
return u'<%s: %s>' % (self.__class__.__name__, self.fullname)
def save(self, *args, **kwargs):
# create corresponding directory object
from pootle_app.models.directory import Directory
self.directory = Directory.objects.root.get_or_make_subdir(self.code)
super(Language, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
directory = self.directory
super(Language, self).delete(*args, **kwargs)
directory.delete()
def get_absolute_url(self):
return reverse('pootle-language-browse', args=[self.code])
def get_translate_url(self, **kwargs):
return u''.join([
reverse('pootle-language-translate', args=[self.code]),
get_editor_filter(**kwargs),
])
def clean(self):
super(Language, self).clean()
if self.fullname:
self.fullname = self.fullname.strip()
### TreeItem
def get_children(self):
return self.translationproject_set.live()
def get_cachekey(self):
return self.directory.pootle_path
### /TreeItem
@receiver([post_delete, post_save])
def invalidate_language_list_cache(sender, instance, **kwargs):
# XXX: maybe use custom signals or simple function calls?
if instance.__class__.__name__ not in ['Language', 'TranslationProject']:
return
key = make_method_key('LiveLanguageManager', 'cached_dict', '*')
cache.delete_pattern(key)
|
sixtyfive/pcsc-ctapi-wrapper | PCSC/UnitaryTests/FEATURE_GET_TLV_PROPERTIES.py | Python | lgpl-2.1 | 1,604 | 0.000623 | #! /usr/bin/env python
"""
# FEATURE_GET_TLV_PROPERTIES.py: Unitary test for
# FEATURE_GET_TLV_PROPERTIES
# Copyright (C) 2012,2016 Ludovic Rousseau
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY | WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, see <http://www.gnu.org/licenses/>.
# You have to enable the use of Escape commands with the
# DRIVER_OPT | ION_CCID_EXCHANGE_AUTHORIZED bit in the ifdDriverOptions
# option of the CCID driver Info.plist file
from smartcard.System import readers
from smartcard.pcsc.PCSCPart10 import getTlvProperties, SCARD_SHARE_DIRECT
# for each reader
for reader in readers():
print
print "Reader:", reader
card_connection = reader.createConnection()
card_connection.connect(mode=SCARD_SHARE_DIRECT)
# get the TLV PROPERTIES
tlv = getTlvProperties(card_connection)
for key in sorted(tlv):
if key in ["PCSCv2_PART10_PROPERTY_wIdProduct",
"PCSCv2_PART10_PROPERTY_wIdVendor"]:
print "%s: 0x%04X" % (key, tlv[key])
else:
print "%s: %s" % (key, tlv[key])
|
pcmoritz/ray-1 | python/ray/tune/examples/pbt_memnn_example.py | Python | apache-2.0 | 10,874 | 0 | """Example training a memory neural net on the bAbI dataset.
References Keras and is based off of https://keras.io/examples/babi_memnn/.
"""
from __future__ import print_function
from tensorflow.keras.models import Sequential, Model, load_model
from tensorflow.keras.layers import Embedding
from tensorflow.keras.layers import (Input, Activation, Dense, Permute,
Dropout)
from tensorflow.keras.layers import add, dot, concatenate
from tensorflow.keras.layers import LSTM
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras.utils import get_file
from tensorflow.keras.preprocessing.sequence import pad_sequences
from filelock import FileLock
import os
import argparse
import tarfile
import numpy as np
import re
from ray import tune
def tokenize(sent):
"""Return the tokens of a sentence including punctuation.
>>> tokenize("Bob dropped the apple. Where is the apple?")
["Bob", "dropped", "the", "apple", ".", "Where", "is", "the", "apple", "?"]
"""
return [x.strip() for x in re.split(r"(\W+)?", sent) if x and x.strip()]
def parse_stories(lines, only_supporting=False):
"""Parse stories provided in the bAbi tasks format
If only_supporting is true, only the sentences
that support the answer are kept.
"""
data = []
story = []
for line in lines:
line = line.decode("utf-8").strip()
nid, line = line.split(" ", 1)
nid = int(nid)
if nid == 1:
story = []
if "\t" in line:
q, a, supporting = line.split("\t")
q = tokenize(q)
if only_supporting:
# Only select the related substory
supporting = map(int, supporting.split())
substory = [story[i - 1] for i in supporting]
else:
# Provide all the substories
substory = [x for x in story if x]
data.append((substory, q, a))
story.append("")
else:
sent = tokenize(line)
story.append(sent)
return data
def get_stories(f, only_supporting=False, max_length=None):
"""Given a file name, read the file,
retrieve the stories,
and then convert the sentences into a single story.
If max_length is supplied,
any stories longer than max_length tokens will be discarded.
"""
def flatten(data):
return sum(data, [])
data = parse_stories(f.readlines(), only_supporting=only_supporting)
data = [(flatten(story), q, answer) for story, q, answer in data
if not max_length or len(flatten(story)) < max_length]
return data
def vectorize_stories(word_idx, story_maxlen, query_maxlen, data):
inputs, queries, answers = [], [], []
for story, query, answer in data:
inputs.append([word_idx[w] for w in story])
queries.append([word_idx[w] for w in query])
answers.append(word_idx[answer])
return (pad_sequences(inputs, maxlen=story_maxlen),
pad_sequences(queries, maxlen=query_maxlen), np.array(answers))
def read_data(finish_fast=False):
# Get the file
try:
path = get_file(
"babi-tasks-v1-2.tar.gz",
origin="https://s3.amazonaws.com/text-datasets/"
"babi_tasks_1-20_v1-2.tar.gz")
except Exception:
print(
"Error downloading dataset, please download it manually:\n"
"$ wget http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2" # noqa: E501
".tar.gz\n"
"$ mv tasks_1-20_v1-2.tar.gz ~/.keras/datasets/babi-tasks-v1-2.tar.gz" # noqa: E501
)
raise
# Choose challenge
challenges = {
# QA1 with 10,000 samples
"single_supporting_fact_10k": "tasks_1-20_v1-2/en-10k/qa1_"
"single-supporting-fact_{}.txt",
# QA2 with 10,000 samples
"two_supporting_facts_10k": "tasks_1-20_v1-2/en-10k/qa2_"
"two-supporting-facts_{}.txt",
}
challenge_type = "single_supporting_fact_10k"
challenge = challenges[challenge_type]
with tarfile.open(path) as tar:
train_stories = get_stories(tar.extractfile(challenge.format("train")))
test_stories = get_stories(tar.extractfile(challenge.format("test")))
if finish_fast:
train_stories = train_stories[:64]
test_stories = test_stories[:64]
return train_stories, test_stories
class MemNNModel(tune.Trainable):
def build_model(self):
" | ""Helper method for creating the model"""
| vocab = set()
for story, q, answer in self.train_stories + self.test_stories:
vocab |= set(story + q + [answer])
vocab = sorted(vocab)
# Reserve 0 for masking via pad_sequences
vocab_size = len(vocab) + 1
story_maxlen = max(
len(x) for x, _, _ in self.train_stories + self.test_stories)
query_maxlen = max(
len(x) for _, x, _ in self.train_stories + self.test_stories)
word_idx = {c: i + 1 for i, c in enumerate(vocab)}
self.inputs_train, self.queries_train, self.answers_train = (
vectorize_stories(word_idx, story_maxlen, query_maxlen,
self.train_stories))
self.inputs_test, self.queries_test, self.answers_test = (
vectorize_stories(word_idx, story_maxlen, query_maxlen,
self.test_stories))
# placeholders
input_sequence = Input((story_maxlen, ))
question = Input((query_maxlen, ))
# encoders
# embed the input sequence into a sequence of vectors
input_encoder_m = Sequential()
input_encoder_m.add(Embedding(input_dim=vocab_size, output_dim=64))
input_encoder_m.add(Dropout(self.config.get("dropout", 0.3)))
# output: (samples, story_maxlen, embedding_dim)
# embed the input into a sequence of vectors of size query_maxlen
input_encoder_c = Sequential()
input_encoder_c.add(
Embedding(input_dim=vocab_size, output_dim=query_maxlen))
input_encoder_c.add(Dropout(self.config.get("dropout", 0.3)))
# output: (samples, story_maxlen, query_maxlen)
# embed the question into a sequence of vectors
question_encoder = Sequential()
question_encoder.add(
Embedding(
input_dim=vocab_size, output_dim=64,
input_length=query_maxlen))
question_encoder.add(Dropout(self.config.get("dropout", 0.3)))
# output: (samples, query_maxlen, embedding_dim)
# encode input sequence and questions (which are indices)
# to sequences of dense vectors
input_encoded_m = input_encoder_m(input_sequence)
input_encoded_c = input_encoder_c(input_sequence)
question_encoded = question_encoder(question)
# compute a "match" between the first input vector sequence
# and the question vector sequence
# shape: `(samples, story_maxlen, query_maxlen)`
match = dot([input_encoded_m, question_encoded], axes=(2, 2))
match = Activation("softmax")(match)
# add the match matrix with the second input vector sequence
response = add(
[match, input_encoded_c]) # (samples, story_maxlen, query_maxlen)
response = Permute(
(2, 1))(response) # (samples, query_maxlen, story_maxlen)
# concatenate the match matrix with the question vector sequence
answer = concatenate([response, question_encoded])
# the original paper uses a matrix multiplication.
# we choose to use a RNN instead.
answer = LSTM(32)(answer) # (samples, 32)
# one regularization layer -- more would probably be needed.
answer = Dropout(self.config.get("dropout", 0.3))(answer)
answer = Dense(vocab_size)(answer) # (samples, vocab_size)
# we output a probability distribution over the vocabulary
answer = Activation("softmax")(answer)
# build the final model
model = Model([input_sequence, question], answer)
return model
def setup(self, config):
|
MiniSEC/GRR_clone | lib/aff4_objects/client_stats.py | Python | apache-2.0 | 482 | 0.004149 | #!/usr/bin/env python
# Copyright 20 | 12 Google Inc. All Rights Reserved.
"""AFF4 object representing client stats."""
from grr.lib import aff4
from grr.lib import rdfvalue
from grr.lib.aff4_objects import standard
class ClientStats(standard.VFSDirectory):
"""A container for all client statistics."""
class SchemaCls(standard.VFSDire | ctory.SchemaCls):
STATS = aff4.Attribute("aff4:stats", rdfvalue.ClientStats,
"Client Stats.", "Client stats")
|
hrantzsch/signature-verification | tools/plot_clf.py | Python | gpl-3.0 | 5,362 | 0.000746 | import argparse
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_palette("colorblind")
sns.set_color_codes("colorblind")
def parse(logfile):
loss_train = []
loss_test = []
section_loss = []
acc_train = []
acc_test = []
section_acc = []
mean_diff_train = []
mean_diff_test = []
section_mean_diff = []
max_diff_train = []
max_diff_test = []
section_max_diff = []
with open(logfile, 'r') as lf:
for line in lf:
if line.startswith('#') or line == '\n': # skip comments and blank
continue
if 'train' in line:
if len(section_loss) > 0:
loss_test.append(section_loss)
section_loss = []
if len(section_acc) > 0:
acc_test.append(section_acc)
section_acc = []
if len(section_mean_diff) > 0:
mean_diff_test.append(section_mean_diff)
section_mean_diff = []
if len(section_max_diff) > 0:
max_diff_test.append(section_max_diff)
section_max_diff = []
elif 'test' in line:
if len(section_loss) > 0:
loss_train.append(section_loss)
section_loss = []
if len(section_acc) > 0:
acc_train.append(section_acc)
section_acc = []
if len(section_mean_diff) > 0:
mean_diff_train.append(section_mean_diff)
section_mean_diff = []
if len(section_max_diff) > 0:
max_diff_train.append(section_max_diff)
section_max_diff = []
else:
# it, loss, acc, mean_diff = line.split(','); max_diff = None
it, loss, acc, mean_diff, max_diff = line.split(',')
section_loss.append(float(loss))
if acc is not None:
section_acc.append(float(acc))
if mean_diff is not None:
section_mean_diff.append(float(mean_diff))
if max_diff is not None:
section_max_diff.append(float(max_diff))
if len(section_loss) > 0:
loss_test.append(section_loss)
if len(section_acc) > 0:
acc_test.append(section_acc)
if len(section_mean_diff) > 0:
mean_diff_test.append(section_mean_diff)
if len(section_max_diff) > 0:
max_diff_test.append(section_max_diff)
return loss_train, loss_test, acc_train, acc_test,\
mean_diff_train, mean_diff_test, max_diff_train, max_diff_test
def avg(l):
return sum(l) / len(l)
def plot_avg(logfile):
loss_train, loss_test, acc_train, acc_test, mean_diff_train,\
mean_diff_test, max_diff_train, max_diff_test = parse(logfile)
f, axarr = plt.subplots(2, sharex=True)
x = list(range(1, len(loss_train)+1))
# x = list(range(len(loss_train))) # old style starting at epoch 0
axarr[0].plot(x, list(map(avg, loss_train)), '.-', label='train')
axarr[0].plot(x, list(map(avg, loss_test)), '.-', label='test')
# axarr[0].set_ylim([-0.05, 5.0])
axarr[0].set_title("loss")
axarr[0].legend(loc='upper right')
axarr[1].plot(x, list(map(avg, acc_train)), '.-')
if len(acc_train) > 0:
axarr[1].plot(x, list(map(avg, acc_test)), '.-')
# axarr[1].set_ylim([0.0, 1.1])
axarr[1].set_title("accuracy")
# if len(mean_diff_train) > 0:
# axarr[2].plot(x, list(map(avg, mean_diff_train)), '.-')
# if len(mean_diff_test) > 0:
# axarr[2].plot(x, list(map(avg, mean_diff_test)), 'g.-')
# axarr[2].set_title("mean_diff")
#
# if len(max_diff_train) > 0:
# axarr[3].plot(x, list(map(avg, max_diff_train)), '.-')
# if len(max_diff_test) > 0:
# axarr[3].plot( | x, list(map(avg, max_diff_test)), 'g.-')
# axarr[3].set_title("max_diff")
# axarr[1].set_y | lim([0.7, 1.0])
plt.show()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('logfile')
# parser.add_argument('--style', default='avg',
# help="'avg' for average, or 'all'")
args = parser.parse_args()
plot_avg(args.logfile)
# def plot_test(logfile):
# _, loss, _, acc = parse(logfile)
#
# plt.plot(list(map(avg, loss)), label='loss')
# plt.plot(list(map(avg, acc)), label='acc')
# plt.legend(loc='upper left')
# plt.show()
#
#
# def plot_mixed(logfile):
# loss_train, loss_test, acc_train, acc_test = parse(logfile)
#
# f, axarr = plt.subplots(2, sharex=True)
# x = list(range(1, len(loss_train)+1))
# # x = list(range(len(loss_train))) # old style starting at epoch 0
#
# loss_chain = list(chain.from_iterable(loss_train))
# axarr[0].plot(loss_chain, '-')
# axarr[0].plot(list(range(1, len(loss_chain)+1, len(loss_chain)//len(loss_train))),
# list(map(avg, loss_test)), '.-')
# acc_chain = list(chain.from_iterable(acc_train))
# axarr[1].plot(acc_chain, '-')
# axarr[1].plot(list(range(1, len(acc_chain)+1, len(acc_chain)//len(acc_train))),
# list(map(avg, acc_test)), '.-')
# plt.legend(loc='lower right')
# plt.show()
|
mavarick/spider-python | webspider/utils/var2str.py | Python | gpl-2.0 | 6,752 | 0.024629 | # -*- coding: utf-8 -*-
"""
translate variance and its formated character which have regularities
for example:
raw input:
v={'aa': 12345, 'bbbb': [1, 2, 3, 4, {'flag': 'vvvv||||xxxxx'}, set(['y', 'x', 'z'])]}
after `var2str.var2str(v)`
v_str=<aa::12345##bbbb::<1||2||3||4||<flag::vvvv|xxxxx>||<y|||x|||z>>>
then reverse back: `var2str.str2var(v_str)`
v_var={'aa': '12345', 'bbbb': ['1', '2', '3', '4', {'flag': 'vvvv|xxxxx'}, set(['y', 'x', 'z'])]}
NOTATION:
1, KEY of DICT should be string.
2, SET amd TUPLE automatically are transformed to LIST
3, INT/FLOAT/LONG etc. are automatically transformed to STRING
4, SEPERATORS would be replace to '' in character.
"""
import types
# TAKE notation of sequence, which has one order
sep_dict = {
"dict_sep": "##", # seperator of elements of dict
"dict_k_v_sep": "::", # k::v
"list_sep": "||", # list seperator
"set_sep": "|||", # set seper | ator
"tuple_sep": "||" # tuple seperator
}
sep_nest = ("<", ">") # better not repeated char, e.x. ("<-", "->")
# internal operations
sep_values = sep_dict.values()
def erase_sep(s):
for v in sep_values:
s = s.replace(v, "")
for v in sep_nest:
s=s.replace(v, "")
return s
_s=sep_nest[0]
_e=sep_nest[1]
class var2str(obj | ect):
@staticmethod
def var2str(var):
if not var: return ""
if type(var) == types.DictType:
result = []
for key,value in var.items():
v_str = var2str.var2str(value)
k_str = erase_sep("{0}".format(key))
result.append("{key}{sep}{value}".format(
key=k_str,
sep=sep_dict["dict_k_v_sep"],
value=v_str))
return _s+sep_dict["dict_sep"].join(result)+_e
#return sep_dict["dict_sep"].join(result)
elif type(var) == types.ListType:
result = [var2str.var2str(v) for v in var]
return _s+sep_dict["list_sep"].join(result)+_e
#return sep_dict["list_sep"].join(result)
elif type(var) == type(set([])):
result = [var2str.var2str(v) for v in var]
return _s+sep_dict["set_sep"].join(result)+_e
#return sep_dict["set_sep"].join(result)
elif type(var) == types.TupleType:
result = [var2str.var2str(v) for v in var]
return _s+sep_dict["tuple_sep"].join(result)+_e
#return sep_dict["tuple_sep"].join(result)
elif type(var) in [types.StringType,
types.IntType,
types.LongType,
types.FloatType]:
return erase_sep("{0}".format(var))
else:
raise TypeError("Type is not supported. var: {0}, type: {1}".format(
var, type(var)))
@staticmethod
def str2var(value):
# certain the outer nested elements' type
if NestType.is_nest_type(value, _s, _e):
_var = NestType(value)
_var.replace_nest_vars()
var = _var.parse_var()
if type(var) == types.DictType:
for k, v in var.items():
if type(v)==NestType:
var[k] = var2str.str2var(str(v))
if type(var) == types.ListType:
for n, v in enumerate(var):
if type(v) == NestType:
var[n] = var2str.str2var(str(v))
if type(var) == type(set()):
# because element in set must be hashable, so there is no meaning for
# for parsing set
pass
return var
else:
return value
class NestType(object):
def __init__(self, s, s_tag=_s, e_tag=_e):
self.value = str(s)
self.s_tag = s_tag
self.e_tag = e_tag
self.replace_s = None
@staticmethod
def is_nest_type(value, s_tag, e_tag):
if (not value.startswith(s_tag) or
not value.endswith(e_tag)):
return 0
return 1
def _get_obj_str(self, var):
return "[NestType]"+str(hash(var))
def has_nest_element(self):
if self.replace_s is None:
self.replace_nest_vars()
return self.repalce_s == self.value
def _replace_nest_var(self, s, nest_dic={}):
s_len = len(s)
tag_index = 0
s_tag_len, e_tag_len = len(self.s_tag), len(self.e_tag)
nest_index =[]
for i in range(s_len):
if s[i:i+s_tag_len] == self.s_tag:
tag_index +=1
if tag_index == 1: nest_index.append(i)
if s[i:i+e_tag_len] == self.e_tag:
tag_index -=1
if tag_index == 0: nest_index.append(i)
if len(nest_index) == 2: break
if len(nest_index) <2: return s
nest_index_s = nest_index[0]
nest_index_e = nest_index[1] + e_tag_len
nest_str = s[nest_index_s:nest_index_e]
nest_var = NestType(nest_str, s_tag=self.s_tag, e_tag = self.e_tag)
nest_var_str = self._get_obj_str(nest_var)
nest_dic[nest_var_str] = nest_var
return s[0:nest_index_s] + nest_var_str + s[nest_index_e:]
def replace_nest_vars(self):
# trim sign in start and end
nest_dic = {}
if not NestType.is_nest_type(self.value, self.s_tag, self.e_tag):
raise Exception(
"[ERROR] `{0}` does not match NestType format".format(self.value))
s = _trim_tag(self.value, self.s_tag, self.e_tag)
while 1:
replace_s = self._replace_nest_var(s,nest_dic)
if replace_s == s: break
s = replace_s
self.replace_s = replace_s
self.nest_dic = nest_dic
def parse_var(self):
"""string `replace_s` has no nestType at all"""
s = self.replace_s
var = None
dict_sep = sep_dict["dict_sep"]
dict_k_v_sep = sep_dict["dict_k_v_sep"]
list_sep = sep_dict["list_sep"]
set_sep = sep_dict["set_sep"]
if dict_k_v_sep in s: # dict
var = {}
items = s.split(dict_sep)
for item in items:
if not item: continue
k,v=item.split(dict_k_v_sep)
var[k] = self.nest_dic.get(v, v)
elif set_sep in s:
var = set([self.nest_dic.get(t, t) for t in s.split(set_sep)])
elif list_sep in s:
var = [self.nest_dic.get(t, t) for t in s.split(list_sep)]
else:
# just one string
var = s
return var
def __str__(self):
return self.value
def __unicode__(self):
return self.value
def _trim_tag(str, s, e):
"""trim the `str` off start `s` and end `e`"""
return str[len(s):(len(str)-len(e))]
def test():
a = {"aa": 12345, "bbbb":[1,2,3,4,{'flag':"vvvv||||世界是我的"},set(['x', 'y','z'])]}
#a = {}
print a
a_str = var2str.var2str(a)
print ">>", a_str
a_var = var2str.str2var(a_str)
print ">>", a_var
if __name__ == "__main__":
test()
|
liyazhe/AML-Project2 | tests/test_predictor.py | Python | bsd-3-clause | 2,395 | 0.000418 | import os
from unittest import TestCase
from samr import corpus
from samr.predictor import PhraseSentimentPredictor
from samr.data import Datapoint
TESTDATA_PATH = os.path.join(os.path.dirname(__file__), "data")
class TestPhraseSentimentPredictor(TestCase):
def setUp(self):
self.__original_path = corpus.DATA_PATH
corpus.DATA_PATH = TESTDATA_PATH
def tearDown(self):
corpus.DATA_PATH = self.__original_path
def test_fit_returns_self(self):
train, _ = corpus.make_train_test_split("defiant order")
predictor = PhraseSentimentPredictor()
s = predictor.fit(train)
self.assertEqual(predictor, s)
def test_simple_predict(self):
train, test = corpus.make_train_test_split("inhaler")
predictor = PhraseSentimentPredictor()
predictor.fit(train)
predictions = predictor.predict(test)
# Same amount of predictions than input values
self.assertEqual(len(predictions), len(test))
# Predicted labels where seen during training
train_labels = set(x.sentiment for x in train)
predicted_labels = set(predictions)
self.assertEqual(predicted_labels - train_labels, set())
def test_simple_error_matrix(self):
train, test = corpus.make_train_test_split("reflektor", proportion=0.4)
predictor = PhraseSentimentPredictor()
predictor.fit(train)
error = predictor.error_matrix(test)
for real, predicted in error.keys():
self.assertNotEqual(real, predicted)
score = predictor.score(test)
assert score > 0, "Test is valid only if score is more than 0"
N = float(len(test))
wrong = sum(len(xs) for xs in error.values())
self.assertE | qual((N - wrong) / N, score)
def test_simple_duplicates(self):
dupe = Datapoint(phraseid="a", sentenceid="b", phrase="b a", sentiment="1")
# Train has a lot of "2" sentiments
train = [Datapoint(phraseid=str(i),
sentenceid=str(i),
phrase="a b",
sentiment=" | 2") for i in range(10)]
train.append(dupe)
test = [Datapoint(*dupe)]
predictor = PhraseSentimentPredictor(duplicates=True)
predictor.fit(train)
predicted = predictor.predict(test)[0]
self.assertEqual(predicted, "1")
|
mlvfx/vfxAssetBox | assetbox/base/plugins/actions.py | Python | cc0-1.0 | 2,772 | 0.001082 | """
Base actions to be use as a template, and to store generic actions.
"""
import os
from assetbox.base.constants import ActionType
from PySide import QtGui
def filename_input(title='Name Input', outputtext='Text: '):
"""
Decorator to add a file dialog input.
Args:
title (str): title of the file dialog.
outputtext (str): hint text about what we are doing.
Returns:
str: the path found in the dialog.
"""
def name_decorate(func):
def func_wrapper(self, path):
text, ok = QtGui.QInputDialog.getText(None, title, outputtext+path)
if ok and text:
output_path = '{0}/{1}'.format(path, text)
return func(self, output_path)
else:
return func(self, False)
return func_wrapper
return name_decorate
def confirm_dialog(title='Confirm', message='Text:'):
"""
Decorator to add a confirmation dialog.
Args:
title (str): title of the confirm dialog.
message (str): hint text about what we are doing.
Returns:
str: the path confirmed in the dialog.
"""
def dialog_decorate(func):
def func_wrapper(self, path):
confirm = QtGui.QMessageBox.question(None,
title,
message,
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
QtGui.QMessageBox.No)
if confirm == QtGui.QMessageBox.Yes:
return func(self, path)
else:
return func(self, '')
| return func_wrapper
return dialog_decorate
class BaseAction(object):
"""Base Action template, a skeleton for an action."""
name = 'BaseAction'
filetype = 'abc'
actiontype = ActionType.Menu
def __init__(self):
pass
def valid_filetype(self, path, *args):
"""Check the asset selected is valid."""
name, ext = os.path.splitext(path | )
return ext.replace('.', '') == self.filetype
def execute(self):
"""Run the command."""
raise NotImplementedError
class Delete(BaseAction):
"""Delete command that mimics the operating systems delete."""
name = 'Delete'
filetype = 'abc'
@confirm_dialog('Confirm Deletion', 'Are you sure you want to delete this?')
def execute(self, path, **kwargs):
"""Run the command."""
if self.valid_filetype(path):
#TODO: remove from list widget and support multiple file types
os.remove(path)
def register_actions(*args):
"""Register all the base actions into the host app."""
return [Delete()]
|
mcaleavya/bcc | examples/tracing/bitehist.py | Python | apache-2.0 | 1,187 | 0.005055 | #!/usr/bin/python
#
# bitehist.py Block I/O size histogram.
# For Linux, uses BCC, eBPF. Embedded C.
#
# Written as a basic example of using histograms to show a distribution.
#
# A Ctrl-C will print the gathered histogram then exit.
#
# Copyright (c) 2015 Brendan Gregg.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 15-Aug-2015 Brendan Gregg Created this.
# 03-Feb-2019 Xiaozhou Liu added linear histogram.
from __future__ import print_function
from bcc import BPF
from time import sleep
# load BPF program
b = BPF(text="""
#include <uapi/linux/ptrace.h>
#include <linux/blkdev.h>
BPF_HISTOGRAM(dist);
BPF_HISTOGRAM(dist_linear);
int kprobe__blk_account_io_completion(struct pt_regs *ctx, struct request *req)
{
dist.increment(bpf_log2l(req->__data_len / 1024));
dist_linear.increment(req->__data_len / 1024);
return 0;
}
""")
# header
print("Tracing... Hit Ctrl-C to end.")
# trace until Ctrl-C
try:
sleep(99999999)
except KeyboardInterrupt:
print()
# output
print("log2 histogram")
print | ("~~~~~~~~~~~~~~")
b["dist"].print_log2_hist("kbytes")
print("\nlinear histogram")
print("~~~~~ | ~~~~~~~~~~~")
b["dist_linear"].print_linear_hist("kbytes")
|
ChristosChristofidis/h2o-3 | h2o-py/tests/testdir_algos/glm/pyunit_link_functions_tweedie_basicGLM.py | Python | apache-2.0 | 1,103 | 0.017226 | import sys
sys.path.insert(1, "../../../")
import h2o
def link_functions_tweedie_basic(ip,port):
# Connect to h2o
h2o.init(ip,port)
print "Read in prostate data."
hdf = h2o.upload_file(h2o.locate("smalldata/prostate/prostate_complete.csv.zip"))
print "Testing for family: TWEEDIE"
print "Set variables for h2o."
y = "CAPSULE"
x = ["AGE","RACE","DCAPS","PSA","VOL","DPROS","GLEASON"]
print "Create models with canonical link: TWEEDIE"
model_h2o_tweedie = h2o.glm(x=hdf[x], y=hdf[y], family="tweedie", link="tweedie", alpha=[0.5], Lambda = [0])
print "Compare model deviances for | link function tweedie (using precomputed values from R)"
deviance_h2o_tweedie = model_h2o_tweedie.residual_deviance() / model_h2o_tweedie.null_deviance()
assert 0.721452 - deviance_h2o_tweedie <= 0.01, "h2o's residual/null deviance is more than 0.01 lower than R's. h2o: " \
| "{0}, r: {1}".format(deviance_h2o_tweedie, 0.721452)
if __name__ == "__main__":
h2o.run_test(sys.argv, link_functions_tweedie_basic)
|
Naeka/vosae-app | www/organizer/models/embedded/reminder.py | Python | agpl-3.0 | 830 | 0.001205 | # -*- coding:Utf-8 -*-
from mongoengine import EmbeddedDocument, fields
__all__ = (
'ReminderEntry',
'ReminderSettings',
)
class ReminderEntry(EmbeddedDocument):
"""
Per-event reminders settings
"""
METHODS = (
'EMAIL',
'POPUP'
)
method = fields.StringField(choices=METHODS, required=True)
minutes = fields.IntField(min_value=0, max_value=21600, required=True) # Max 15 days before
class ReminderSettings(EmbeddedDocument):
"""
Per-event reminders settings
"""
use_d | efault = fields.BooleanField(default=True)
overrides = fields.ListField(fields.EmbeddedDocumentField("ReminderEntry"))
class NextReminder(EmbeddedDocument):
at = fields.DateTimeField(required=Tru | e)
threshold = fields.IntField(min_value=0, max_value=21600, required=True)
|
lmregus/Portfolio | python/design_patterns/env/lib/python3.7/site-packages/sphinx/domains/c.py | Python | mit | 12,525 | 0.00008 | """
sphinx.domains.c
~~~~~~~~~~~~~~~~
The C language domain.
:copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import string
from docutils import nodes
from sphinx import addnodes
from sphinx.directives import ObjectDescription
from sphinx.domains import Domain, ObjType
from sphinx.locale import _
from sphinx.roles import XRefRole
from sphinx.util.docfields import Field, TypedField
from sphinx.util.nodes import make_refnode
if False:
# For type annotation
from typing import Any, Dict, Iterator, List, Tuple # NOQA
from sphinx.application import Sphinx # NOQA
from sphinx.builders import Builder # NOQA
from sphinx.environment import BuildEnvironment # NOQA
# RE to split at word boundaries
wsplit_re = re.compile(r'(\W+)')
# REs for C signatures
c_sig_re = re.compile(
r'''^([^(]*?) # return type
([\w:.]+) \s* # thing name (colon allowed for C++)
(?: \((.*)\) )? # optionally arguments
(\s+const)? $ # const specifier
''', re.VERBOSE)
c_funcptr_sig_re = re.compile(
r'''^([^(]+?) # return type
(\( [^()]+ \)) \s* # name in parentheses
\( (.*) \) # arguments
(\s+const)? $ # const specifier
''', re.VERBOSE)
c_funcptr_arg_sig_re = re.compile(
r'''^\s*([^(,]+?) # return type
\( ([^()]+) \) \s* # name in parentheses
\( (.*) \) # arguments
(\s+const)? # const specifier
\s*(?=$|,) # end with comma or end of string
''', re.VERBOSE)
c_funcptr_name_re = re.compile(r'^\(\s*\*\s*(.*?)\s*\)$')
class CObject(ObjectDescription):
"""
Description of a C language object.
"""
doc_field_types = [
TypedField('parameter', label=_('Parameters'),
names=('param', 'parameter', 'arg', 'argument'),
typerolename='type', typenames=('type',)),
Field('returnvalue', label=_('Returns'), has_arg=False,
names=('returns', 'return')),
Field('returntype', label=_('Return type'), has_arg=False,
names=('rtype',)),
]
# These C types aren't described anywhere, so don't try to create
# a cross-reference to them
stopwords = {
'const', 'void', 'char', 'wchar_t', 'int', 'short',
'long', 'float', 'double', 'unsigned', 'signed', 'FILE',
'clock_t', 'time_t', 'ptrdiff_t', 'size_t', 'ssize_t',
'struct', '_Bool',
}
def _parse_type(self, node | , ctype):
# type: (nodes.Element, str) -> None
# add cross-ref nodes for all words
for part in [_f for _f in wsplit_re.split(ctype) if _f]:
tnode = nodes.Text(part, part)
if part[0] in string.ascii_letters + '_' and \
part not in self.sto | pwords:
pnode = addnodes.pending_xref(
'', refdomain='c', reftype='type', reftarget=part,
modname=None, classname=None)
pnode += tnode
node += pnode
else:
node += tnode
def _parse_arglist(self, arglist):
# type: (str) -> Iterator[str]
while True:
m = c_funcptr_arg_sig_re.match(arglist)
if m:
yield m.group()
arglist = c_funcptr_arg_sig_re.sub('', arglist)
if ',' in arglist:
_, arglist = arglist.split(',', 1)
else:
break
else:
if ',' in arglist:
arg, arglist = arglist.split(',', 1)
yield arg
else:
yield arglist
break
def handle_signature(self, sig, signode):
# type: (str, addnodes.desc_signature) -> str
"""Transform a C signature into RST nodes."""
# first try the function pointer signature regex, it's more specific
m = c_funcptr_sig_re.match(sig)
if m is None:
m = c_sig_re.match(sig)
if m is None:
raise ValueError('no match')
rettype, name, arglist, const = m.groups()
desc_type = addnodes.desc_type('', '')
signode += desc_type
self._parse_type(desc_type, rettype)
try:
classname, funcname = name.split('::', 1)
classname += '::'
signode += addnodes.desc_addname(classname, classname)
signode += addnodes.desc_name(funcname, funcname)
# name (the full name) is still both parts
except ValueError:
signode += addnodes.desc_name(name, name)
# clean up parentheses from canonical name
m = c_funcptr_name_re.match(name)
if m:
name = m.group(1)
typename = self.env.ref_context.get('c:type')
if self.name == 'c:member' and typename:
fullname = typename + '.' + name
else:
fullname = name
if not arglist:
if self.objtype == 'function' or \
self.objtype == 'macro' and sig.rstrip().endswith('()'):
# for functions, add an empty parameter list
signode += addnodes.desc_parameterlist()
if const:
signode += addnodes.desc_addname(const, const)
return fullname
paramlist = addnodes.desc_parameterlist()
arglist = arglist.replace('`', '').replace('\\ ', '') # remove markup
# this messes up function pointer types, but not too badly ;)
for arg in self._parse_arglist(arglist):
arg = arg.strip()
param = addnodes.desc_parameter('', '', noemph=True)
try:
m = c_funcptr_arg_sig_re.match(arg)
if m:
self._parse_type(param, m.group(1) + '(')
param += nodes.emphasis(m.group(2), m.group(2))
self._parse_type(param, ')(' + m.group(3) + ')')
if m.group(4):
param += addnodes.desc_addname(m.group(4), m.group(4))
else:
ctype, argname = arg.rsplit(' ', 1)
self._parse_type(param, ctype)
# separate by non-breaking space in the output
param += nodes.emphasis(' ' + argname, '\xa0' + argname)
except ValueError:
# no argument name given, only the type
self._parse_type(param, arg)
paramlist += param
signode += paramlist
if const:
signode += addnodes.desc_addname(const, const)
return fullname
def get_index_text(self, name):
# type: (str) -> str
if self.objtype == 'function':
return _('%s (C function)') % name
elif self.objtype == 'member':
return _('%s (C member)') % name
elif self.objtype == 'macro':
return _('%s (C macro)') % name
elif self.objtype == 'type':
return _('%s (C type)') % name
elif self.objtype == 'var':
return _('%s (C variable)') % name
else:
return ''
def add_target_and_index(self, name, sig, signode):
# type: (str, str, addnodes.desc_signature) -> None
# for C API items we add a prefix since names are usually not qualified
# by a module name and so easily clash with e.g. section titles
targetname = 'c.' + name
if targetname not in self.state.document.ids:
signode['names'].append(targetname)
signode['ids'].append(targetname)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
inv = self.env.domaindata['c']['objects']
if name in inv:
self.state_machine.reporter.warning(
'duplicate C object description of %s, ' % name +
'other instance in ' + self.env.doc2path(inv[name][0]),
line=self.lineno)
inv[name] = (self.env.docname, |
DataONEorg/d1_python | gmn/src/d1_gmn/tests/test_scimeta.py | Python | apache-2.0 | 7,205 | 0.002637 | #!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the Lic | ense.
import io
import pytest
import responses
import d1_common.types.exceptions
import django.test
import d1_gm | n.tests.gmn_test_case
import d1_test.d1_test_case
import d1_test.instance_generator.identifier
import d1_test.instance_generator.system_metadata
@d1_test.d1_test_case.reproducible_random_decorator("TestSciMeta")
class TestSciMeta(d1_gmn.tests.gmn_test_case.GMNTestCase):
def _create_and_check_scimeta(self, client, pid, format_id, xml_str):
sysmeta_pyxb = d1_test.instance_generator.system_metadata.generate_from_file(
client,
io.BytesIO(xml_str.encode("utf-8")),
{"identifier": pid, "formatId": format_id, "replica": None},
)
self.call_d1_client(
client.create, pid, io.BytesIO(xml_str.encode("utf-8")), sysmeta_pyxb
)
self.get_obj(client, pid)
@responses.activate
def test_1000(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): Uninstalled schema causes validation to be
silently skipped."""
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"http://www.icpsr.umich.edu/DDI",
"not a valid XML doc",
)
@responses.activate
def test_1010(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): Unknown formatId causes validation to be silently
skipped."""
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"unknown_format_id",
"not a valid XML doc",
)
@responses.activate
def test_1020(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): onedcx does not validate as EML."""
with pytest.raises(
d1_common.types.exceptions.InvalidRequest,
match="XML document does not validate",
):
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"eml://ecoinformatics.org/eml-2.1.1",
self.test_files.load_xml_to_str("scimeta_dc_1.xml"),
)
@responses.activate
def test_1030(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): onedcx validates successfully as DataONE Dublin
Core Extended."""
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"http://ns.dataone.org/metadata/schema/onedcx/v1.0",
self.test_files.load_xml_to_str("scimeta_dc_1.xml"),
)
@responses.activate
def test_1040(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): ISO/TC 211 does not validate as Dryad."""
with pytest.raises(
d1_common.types.exceptions.InvalidRequest,
match="XML document does not validate",
):
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"http://datadryad.org/profile/v3.1",
self.test_files.load_xml_to_str("isotc211/nsidc.xml"),
)
@responses.activate
def test_1050(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): Valid EML 2.1.1."""
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"eml://ecoinformatics.org/eml-2.1.1",
self.test_files.load_xml_to_str("scimeta_eml_valid.xml"),
)
@responses.activate
def test_1060(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): Invalid EML 2.1.1: Unexpected element."""
with pytest.raises(
d1_common.types.exceptions.InvalidRequest, match="unexpectedElement"
):
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"eml://ecoinformatics.org/eml-2.1.1",
self.test_files.load_xml_to_str("scimeta_eml_invalid_1.xml"),
)
@responses.activate
def test_1070(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): Invalid EML 2.1.1: Missing child element."""
with pytest.raises(
d1_common.types.exceptions.InvalidRequest, match="Missing child element"
):
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"eml://ecoinformatics.org/eml-2.1.1",
self.test_files.load_xml_to_str("scimeta_eml_invalid_2.xml"),
)
@responses.activate
def test_1080(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): Test settings SCIMETA_VALIDATION_MAX_SIZE and
SCIMETA_VALIDATION_OVER_SIZE_ACTION = 'reject'"""
with django.test.override_settings(
SCIMETA_VALIDATION_MAX_SIZE=10, SCIMETA_VALIDATION_OVER_SIZE_ACTION="reject"
):
with pytest.raises(
d1_common.types.exceptions.InvalidRequest,
match="above size limit for validation",
):
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"eml://ecoinformatics.org/eml-2.1.1",
self.test_files.load_xml_to_str("scimeta_eml_invalid_2.xml"),
)
@responses.activate
def test_1090(self, gmn_client_v1_v2):
"""MNStorage.create(SciMeta): Test settings SCIMETA_VALIDATION_MAX_SIZE and
SCIMETA_VALIDATION_OVER_SIZE_ACTION = 'accept'"""
with django.test.override_settings(
SCIMETA_VALIDATION_MAX_SIZE=10, SCIMETA_VALIDATION_OVER_SIZE_ACTION="accept"
):
self._create_and_check_scimeta(
gmn_client_v1_v2,
d1_test.instance_generator.identifier.generate_pid("PID_SCIMETA_"),
"eml://ecoinformatics.org/eml-2.1.1",
self.test_files.load_xml_to_str("scimeta_eml_invalid_2.xml"),
)
|
dart-lang/sdk | runtime/tools/bin_to_assembly.py | Python | bsd-3-clause | 5,566 | 0.000719 | #!/usr/bin/env python3
#
# Copyright (c) 2017, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
# Generates an assembly source file the defines a symbol with the bytes from
# a given file.
import os
import sys
from optparse import OptionParser
def Main():
parser = OptionParser()
parser.add_option(
"--output",
action="store",
type="string",
help="output assembly file name")
parser.add_option(
"--input", action="store", type="string", help="input binary blob file")
parser.add_option("--symbol_name", action="store", type="s | tring")
parser.add_option("--executable", action="store_true", default=False)
parser.add_option("--target_os", action="store", type="string | ")
parser.add_option("--size_symbol_name", action="store", type="string")
parser.add_option("--target_arch", action="store", type="string")
parser.add_option("--incbin", action="store_true", default=False)
(options, args) = parser.parse_args()
if not options.output:
sys.stderr.write("--output not specified\n")
parser.print_help()
return -1
if not options.input:
sys.stderr.write("--input not specified\n")
parser.print_help()
return -1
if not os.path.isfile(options.input):
sys.stderr.write("input file does not exist: %s\n" % options.input)
parser.print_help()
return -1
if not options.symbol_name:
sys.stderr.write("--symbol_name not specified\n")
parser.print_help()
return -1
if not options.target_os:
sys.stderr.write("--target_os not specified\n")
parser.print_help()
return -1
with open(options.output, "w") as output_file:
if options.target_os in ["mac", "ios"]:
if options.executable:
output_file.write(".text\n")
else:
output_file.write(".const\n")
output_file.write(".global _%s\n" % options.symbol_name)
output_file.write(".balign 32\n")
output_file.write("_%s:\n" % options.symbol_name)
elif options.target_os in ["win"]:
output_file.write("ifndef _ML64_X64\n")
output_file.write(".model flat, C\n")
output_file.write("endif\n")
if options.executable:
output_file.write(".code\n")
else:
output_file.write(".const\n")
output_file.write("public %s\n" % options.symbol_name)
output_file.write("%s label byte\n" % options.symbol_name)
else:
if options.executable:
output_file.write(".text\n")
output_file.write(".type %s STT_FUNC\n" % options.symbol_name)
else:
output_file.write(".section .rodata\n")
output_file.write(".type %s STT_OBJECT\n" % options.symbol_name)
output_file.write(".global %s\n" % options.symbol_name)
output_file.write(".balign 32\n")
output_file.write("%s:\n" % options.symbol_name)
size = 0
with open(options.input, "rb") as input_file:
if options.target_os in ["win"]:
for byte in input_file.read():
output_file.write("byte %d\n" % (byte if isinstance(byte, int) else ord(byte)))
size += 1
else:
incbin = options.incbin
for byte in input_file.read():
size += 1
if not incbin:
output_file.write(
".byte %d\n" %
(byte if isinstance(byte, int) else ord(byte)))
if incbin:
output_file.write(".incbin \"%s\"\n" % options.input)
if options.target_os not in ["mac", "ios", "win"]:
output_file.write(".size {0}, .-{0}\n".format(options.symbol_name))
if options.size_symbol_name:
if not options.target_arch:
sys.stderr.write("--target_arch not specified\n")
parser.print_help()
return -1
is64bit = 0
if options.target_arch:
if options.target_arch in ["arm64", "x64"]:
is64bit = 1
if options.target_os in ["win"]:
output_file.write("public %s\n" % options.size_symbol_name)
output_file.write("%s label byte\n" % options.size_symbol_name)
if (is64bit == 1):
output_file.write("qword %d\n" % size)
else:
output_file.write("dword %d\n" % size)
else:
if options.target_os in ["mac", "ios"]:
output_file.write(
".global _%s\n" % options.size_symbol_name)
output_file.write("_%s:\n" % options.size_symbol_name)
else:
output_file.write(".global %s\n" % options.size_symbol_name)
output_file.write("%s:\n" % options.size_symbol_name)
if (is64bit == 1):
output_file.write(".quad %d\n" % size)
else:
output_file.write(".long %d\n" % size)
if options.target_os in ["win"]:
output_file.write("end\n")
return 0
if __name__ == "__main__":
sys.exit(Main())
|
Geotexan/calculinn | prototype/muro_geotexan.py | Python | gpl-3.0 | 10,868 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Prototipo de cálculo y recomendación para la aplicación de muros.
"""
from __future__ import print_function
import sys
import argparse
import csv
import re
def main():
"""
Abre los ficheros fuente de cálculo de parámetros y de recomendación de
producto para la aplicación en muros.
Lee los seis parámetros de entrada desde la línea de comandos y recorre
todas las filas de los ficheros fuente determinando si algún valor
cumple con las entradas. En caso contrario, muestra error.
"""
# NOTE: Solo acepta enteros como entrada para los valores. De momento nada
# de cadenas (en carreteras se usa) ni floats (algunos valores son float
# en realidad, incluso aquí en muros).
parser = argparse.ArgumentParser(description="Prototipo cálculo muros.")
parser.add_argument("-e", "--espesor", "--tongada", dest="e",
help="Espesor tongada de relleno (cm) t",
default=None, type=int)
parser.add_argument("-a", "--angulo", "--rozamiento", dest="a",
help="Ángulo de rozamiento de relleno (grados) φ",
default=None, type=int)
parser.add_argument("-l", "--altura", dest="h",
help="Altura de muro (m) H",
default=None, type=int)
parser.add_argument("-i", "--inclinacion", dest="i",
help="Inclinación de muro (grados) α",
default=None, type=int)
parser.add_argument("-d", "--densidad", dest="d",
help="Densidad relleno (t/m³) Υ",
default=None, type=int)
parser.add_argument("-s", "--sobrecarga", dest="s",
help="Sobrecarga (kN/m²) P",
default=None, type=int)
if len(sys.argv) < 7:
parser.print_help()
sys.exit(1)
else:
args = parser.parse_args()
calculo, header_calculo = parse_calculo()
catalogo, header_recomendacion = parse_recomendacion()
out_calculo = calcular(args.e, args.a, args.h, args.i, args.d, args.s,
calculo)
out_recomendacion = recomendar(args.e, args.a, args.h, args.i, args.d,
args.s, catalogo)
entradas = (args.e, args.a, args.h, args.i, args.d, args.s)
if out_calculo:
dump(entradas, out_calculo, header_calculo,
nombre_calculo="Cálculo Geotexan")
else:
print("Combinación de parámetros incorrecta para cálculo.",
file=sys.stderr)
sys.exit(2)
if out_recomendacion:
dump(entradas, out_recomendacion, header_recomendacion,
nombre_calculo="Geocompuesto recomendado")
else:
print("Combinación de parámetros incorrecta para recomendación.",
file=sys.stderr)
sys.exit(3)
sys.exit(0)
def parse_calculo(nomfichero="400 muro_Geotexan (muro-contencion.html).csv"):
"""
Abre el fichero de la tabla de cálculo y lee todas las líneas para
devolver una lista de tuplas de la siguiente forma:
[
("50", "[25..35)", "[0..5)", "[50..60)", ...),
("50", "[25..35)", ...),
...
]
Cada valor de esas listas contiene un número o un rango (como cadena) para
comparar con los valores de entrada recibidos (e, a, h, i, d, s), que
están en el mismo orden en el fichero de entrada. El resto de valores (del
séptimo en adelante) se consideran valores de salida.
También devuelve una tupla con los nombres de los parámetros en el mismo
orden.
"""
res = []
cabecera = ()
try:
file_in = open(nomfichero)
except IOError:
print("El fichero {} no existe.".format(nomfichero))
sys.exit(4)
reader = csv.reader(file_in)
for fila in reader:
if es_cabecera(fila):
# La segunda cabecera, la de verdad, machacará a la de (In, Out).
cabecera = fila
continue
res.append(tuple(fila))
file_in.close()
return res, cabecera
def es_cabecera(lista):
"""
Devuelve por heurística si es una lista correspondiente a la cabecera de
la hoja de cálculo (True).
"""
# Podría hacerse simplemente mirando si es la fila 0 o la 1, pero no me
# fío de que no cambien las tablas y soy así de rebuscado.
# NOTE: Si alguna cabecera de columna tiene un número entre el texto,
# **no** se considerará cabecera.
res = False # res es True si hay algún valor numérico, = NO cabecera.
for valor in lista:
hay_al_menos_un_valor_numerico = False
for caracter in valor:
if caracter.isdigit():
hay_al_menos_un_valor_numerico = (
hay_al_menos_un_valor_numerico or True)
res = res or hay_al_menos_un_valor_numerico
if res: # OPTIMIZACIÓN: Si he encontrado uno, me salgo.
break
return not res
def parse_recomendacion(nomfi="401 muro_geomalla (muro-contencion.html).csv"):
"""
"Parsea" y devuelve una lista de tuplas similar a la de parse_calculo.
"""
return parse_calculo(nomfi)
# pylint: disable=invalid-name, too-many-arguments, too-many-locals
def calcular(e, a, h, i, d, s, tabla):
"""
Compara la combinación de los seis valores de entrada (e, a, h, i, d, s)
con cada una de las listas del array bidimensional «tabla». Devuelve la
primer lista con la que coincida o None si no hay coincidencia.
Las listas pueden contener números enteros, un valor de cadena o un
rango de valores como string (por ejemplo: "[25..35)"). En este último
caso se parsea la cadena y se obtiene el rango correspondiente con el que
se compara el valor.
El orden de los parámetros recibidos y el de la lista con los rangos a
comparar **debe ser el mismo**.
El cálculo se hace por fuerza bruta y es O(n). Podría hacerse O(log_n) si
se implementara como árbol de decisión, PERO NO HAY TIEMPO Y ESTO ES SÓLO
UN PROTOTIPO que a | cabará en JavaScript.
"""
res = None
for fila in tabla:
aciertos = []
espesor, angulo, altura, inclinacion, densidad, sobrecarga = fila[:6]
for valor, referencia in ((e, espesor),
(a, angulo),
(h, altura),
(i, inclinacion) | ,
(d, densidad),
(s, sobrecarga)):
if referencia.isdigit(): # OJO: Solo vale para enteros.
referencia = int(referencia)
resultado = comparar(valor, referencia)
aciertos.append(resultado)
if check_aciertos(aciertos):
res = fila
break
return res
def comparar(valor, referencia):
"""
Aquí está el meollo de la cuestión. Se trata de interpretar el valor de
referencia. Si es un número, directamente hace la comparación de
igualdad.
Si es una cadena:
- Si es interpretable como rango --conteiene extremos abiertos o
cerrados en la forma "[|]|(|)"-- determina si el valor está dentro
del rango.
- En otro caso hace una comaración como cadena (para los casos como el
de carretera, aunque no se den en aquí en muros).
"""
if isinstance(referencia, str):
# NOTE: Esto en Javascript seguramente haya que hacerlo de otra forma.
if ".." in referencia: # Es un rango.
res = valor in parse_rango(referencia)
else: # Es cadena.
res = valor == referencia
else: # Es númerico.
res = valor == referencia
return res
def parse_rango(str_rango):
"""
Devuelve un rango numérico correspondiente al rango recibido como cadena.
La cadena siempre tiene la forma "[(x..y)]" donde el primer y el último
carácter indican si es un intervalo abierto «()» o cerrado «[]» en cada
extremo.
`x` es el extermo inferior.
`y` es el extremo superior (puede ser infinito: ∞)
""" |
timkrentz/SunTracker | IMU/VTK-6.2.0/Common/DataModel/Testing/Python/TestTemplates.py | Python | mit | 4,490 | 0.000445 | #!/usr/bin/env python
"""Test template support in VTK-Python
VTK-python decides which template specializations
to wrap according to which ones are used in typedefs
and which ones appear as superclasses of other classes.
In addition, the wrappers are hard-coded to wrap the
vtkDenseArray and vtkSparseArray classes over a broad
range of types.
Created on May 29, 2011 by David Gobbi
"""
import sys
import exceptions
import vtk
from vtk.test import Testing
arrayTypes = ['char', 'int8', 'uint8', 'int16', 'uint16',
'int32', 'uint32', int, 'uint', 'int64', 'uint64',
'float32', float, str, 'unicode', vtk.vtkVariant]
arrayCodes = ['c', 'b', 'B', 'h', 'H',
'i', 'I', 'l', 'L', 'q', 'Q',
'f', 'd']
class TestTemplates(Testing.vtkTest):
def testDenseArray(self):
"""Test vtkDenseArray template"""
for t in (arrayTypes + arrayCodes):
a = vtk.vtkDenseArray[t]()
a.Resize(1)
i = vtk.vtkArrayCoordinates(0)
if t in ['bool', '?']:
value = 1
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['float32', 'float64', 'float', 'f', 'd']:
value = 3.125
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['char', 'c']:
value = 'c'
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in [str, 'str', 'unicode']:
value = unicode("hello")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['vtkVariant', vtk.vtkVariant]:
value = vtk.vtkVariant("world")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
else:
value = 12
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
def testSparseArray(self):
"""Test vtkSparseArray template"""
for t in (arrayTypes + arrayCodes):
a = vtk.vtkSparseArray[t]()
a.Resize(1)
i = vtk.vtkArrayCoordinates(0)
if t in ['bool', '?']:
value = 0
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['float32', 'float64', 'float', 'f', 'd']:
value = 3.125
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['char', 'c']:
value = 'c'
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in [str, 'str', 'unicode']:
value = unicode("hello")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['vtkVariant', vtk.vtkVariant]:
value = vtk.vtkVariant("world")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
else:
value = 12
| a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
def testArray(self):
"""Test array CreateArray"""
o = vtk.vtkArray.CreateArray(vtk.vtkArray.DENSE, vtk.VTK_DOUBLE)
self.assertEqual(o.__class__, vtk.vtkDenseArray[float])
def testVector(self):
"""Test vector templates"" | "
# make sure Rect inherits operators
r = vtk.vtkRectf(0, 0, 2, 2)
self.assertEqual(r[2], 2.0)
c = vtk.vtkColor4ub(0, 0, 0)
self.assertEqual(list(c), [0, 0, 0, 255])
e = vtk.vtkVector['float32', 3]([0.0, 1.0, 2.0])
self.assertEqual(list(e), [0.0, 1.0, 2.0])
i = vtk.vtkVector3['i'](0)
self.assertEqual(list(i), [0, 0, 0])
if __name__ == "__main__":
Testing.main([(TestTemplates, 'test')])
|
indrajitr/ansible | lib/ansible/modules/fetch.py | Python | gpl-3.0 | 3,790 | 0.00343 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# This is a virtual module that is entirely implemented as an action plugin and runs on the controller
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: fetch
short_description: Fetch files from remote nodes
description:
- This module works like M(copy), but in reverse.
- It is used for fetching files from remote machines and storing them locally in a file tree, organized by hostname.
- Files that already exist at I(dest) will be overwritten if they are different than the I(src).
- This module is also supported for Windows targets.
version_added: '0.2'
options:
src:
description:
- The file on the remote system to fetch.
- This I(must) be a file, not a directory.
- Recursive fetching may be supported in a later release.
required: yes
dest:
description:
- A directory to save the file into.
- For example, if the I(dest) directory is C(/backup) a I(src) file named C(/etc/profile) on host
C(host.example.com), would be saved into C(/backup/host.example.com/etc/profile).
The host name is based on the inventory name.
required: yes
fail_on_missing:
version_added: '1.1'
description:
- When set to C(yes), the task will fail if the remote file cannot be read for any reason.
- Prior to Ansible 2.5, setting this would only fail if the source file was missing.
- The default was changed to C(yes) in Ansible 2.5.
type: bool
default: yes
validate_checksum:
version_added: '1.4'
description:
- Verify that the source and destination checksums match after the files are fetched.
type: bool
default: yes
flat:
version_added: '1.2'
description:
- Allows you to override the default behavior of appending hostname/path/to/file to the destination.
- If C(dest) ends with '/', it will use the basename of the source file, similar to the copy module.
- This can be useful if working with a single host, or if retrieving files that are uniquely named per host.
- If using multiple hosts with the same filename, the file will be overwritten for each host.
type: bool
default: no
notes:
- When running fetch with C(become), the M(slurp) module will also be
used to fetch the contents of the file for determining the remote
checksum. This effectively doubles the transfer size, and
depending on the file size can consume all available memory on the
remote or local hosts causing a C(MemoryError). Due to this it is
advisable to run this module without C(become) whenever possible.
- Prior to Ansible 2.5 this module would not fail if reading the remote
file was impossible unless C(fail_on_missing) was set.
- In Ansible 2.5 or later, playbook authors are encouraged to use
C(fail_when) or C(ignore_errors) to get this ability. They may
also explicitly set C(fail_on_missing) to C(no) to get the
non-failing behaviour.
- This module is also supported for Windows targets.
seealso:
- module: copy
- mo | dule: slurp
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = r'''
- name: Store | file into /tmp/fetched/host.example.com/tmp/somefile
fetch:
src: /tmp/somefile
dest: /tmp/fetched
- name: Specifying a path directly
fetch:
src: /tmp/somefile
dest: /tmp/prefix-{{ inventory_hostname }}
flat: yes
- name: Specifying a destination path
fetch:
src: /tmp/uniquefile
dest: /tmp/special/
flat: yes
- name: Storing in a path relative to the playbook
fetch:
src: /tmp/uniquefile
dest: special/prefix-{{ inventory_hostname }}
flat: yes
'''
|
shubhamdhama/zulip | tools/lib/test_script.py | Python | apache-2.0 | 3,557 | 0.00253 | import glob
import os
import subprocess
import sys
from distutils.version import LooseVersion
from typing import Iterable, List, Optional, Tuple
from scripts.lib.zulip_tools import get_dev_uuid_var_path
from version import PROVISION_VERSION
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def get_major_version(v: str) -> int:
return int(v.split('.')[0])
def get_version_file() -> str:
uuid_var_path = get_dev_uuid_var_path()
return os.path.join(uuid_var_path, 'provision_version')
PREAMBLE = '''
Before we run tests, we make sure your provisioning version
is correct by looking at var/provision_version, which is at
version %s, and we compare it to the version in source
control (version.py), which is %s.
'''
def preamble(version: str) -> str:
text = PREAMBLE % (version, PROVISION_VERSION)
text += '\n'
return text
NEED_TO_DOWNGRADE = '''
It looks like you checked out a branch that expects an older
version of dependencies than the version you provisioned last.
This may be ok, but it's likely that you either want to rebase
your branch on top of upstream/master or re-provision your VM.
Do this: `./tools/provision`
'''
NEED_TO_UPGRADE = '''
It looks like you checked out a branch that has added
dependencies beyond what you last provisioned. Your command
is likely to fail until you add dependencies by provisioning.
Do this: `./tools/provision`
'''
def get_provisioning_status() -> Tuple[bool, Optional[str]]:
version_file = get_version_file()
if not os.path.exists(version_file):
# If the developer doesn't have a version_file written by
# a previous provision, then we don't do any safety checks
# here on the assumption that the developer is managing
# their own dependencies and not running provision.
return True, None
with open(version_file) as f:
version = f.read().strip()
# Normal path for p | eople that provision--we're all good!
if version == PROVISION_VERSION:
return True, None
# We may be more provisioned than the branch we just moved to. As
# long as the major version hasn't changed, then we should be ok.
if LooseVersion(version) > LooseVersion | (PROVISION_VERSION):
if get_major_version(version) == get_major_version(PROVISION_VERSION):
return True, None
else:
return False, preamble(version) + NEED_TO_DOWNGRADE
return False, preamble(version) + NEED_TO_UPGRADE
def assert_provisioning_status_ok(force: bool) -> None:
if not force:
ok, msg = get_provisioning_status()
if not ok:
print(msg)
print('If you really know what you are doing, use --force to run anyway.')
sys.exit(1)
def find_js_test_files(test_dir: str, files: Iterable[str]) -> List[str]:
test_files = []
for file in files:
for file_name in os.listdir(test_dir):
if file_name.startswith(file):
file = file_name
break
if not os.path.exists(file):
file = os.path.join(test_dir, file)
test_files.append(os.path.abspath(file))
if not test_files:
test_files = sorted(glob.glob(os.path.join(test_dir, '*.js')))
return test_files
def prepare_puppeteer_run() -> None:
os.chdir(ZULIP_PATH)
subprocess.check_call(['node', 'node_modules/puppeteer/install.js'])
os.makedirs('var/puppeteer', exist_ok=True)
for f in glob.glob('var/puppeteer/puppeteer-failure*.png'):
os.remove(f)
|
ionelmc/python-nameless | ci/appveyor-download.py | Python | bsd-2-clause | 3,820 | 0.002094 | #!/usr/bin/env python
"""
Use the AppVeyor API to download Windows artifacts.
Taken from: https://bitbucket.org/ned/coveragepy/src/tip/ci/download_appveyor.py
# Licensed under the Apache Lic | ense: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""
from __future__ import unicode_literals
import argparse
import os
import zipfile
import requests
def make_auth_headers():
"""Make the authentication headers needed to use the Appveyor | API."""
path = os.path.expanduser("~/.appveyor.token")
if not os.path.exists(path):
raise RuntimeError(
"Please create a file named `.appveyor.token` in your home directory. "
"You can get the token from https://ci.appveyor.com/api-token"
)
with open(path) as f:
token = f.read().strip()
headers = {
'Authorization': 'Bearer {}'.format(token),
}
return headers
def download_latest_artifacts(account_project, build_id):
"""Download all the artifacts from the latest build."""
if build_id is None:
url = "https://ci.appveyor.com/api/projects/{}".format(account_project)
else:
url = "https://ci.appveyor.com/api/projects/{}/build/{}".format(account_project, build_id)
build = requests.get(url, headers=make_auth_headers()).json()
jobs = build['build']['jobs']
print(u"Build {0[build][version]}, {1} jobs: {0[build][message]}".format(build, len(jobs)))
for job in jobs:
name = job['name']
print(u" {0}: {1[status]}, {1[artifactsCount]} artifacts".format(name, job))
url = "https://ci.appveyor.com/api/buildjobs/{}/artifacts".format(job['jobId'])
response = requests.get(url, headers=make_auth_headers())
artifacts = response.json()
for artifact in artifacts:
is_zip = artifact['type'] == "Zip"
filename = artifact['fileName']
print(u" {0}, {1} bytes".format(filename, artifact['size']))
url = "https://ci.appveyor.com/api/buildjobs/{}/artifacts/{}".format(job['jobId'], filename)
download_url(url, filename, make_auth_headers())
if is_zip:
unpack_zipfile(filename)
os.remove(filename)
def ensure_dirs(filename):
"""Make sure the directories exist for `filename`."""
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
def download_url(url, filename, headers):
"""Download a file from `url` to `filename`."""
ensure_dirs(filename)
response = requests.get(url, headers=headers, stream=True)
if response.status_code == 200:
with open(filename, 'wb') as f:
for chunk in response.iter_content(16 * 1024):
f.write(chunk)
else:
print(u" Error downloading {}: {}".format(url, response))
def unpack_zipfile(filename):
"""Unpack a zipfile, using the names in the zip."""
with open(filename, 'rb') as fzip:
z = zipfile.ZipFile(fzip)
for name in z.namelist():
print(u" extracting {}".format(name))
ensure_dirs(name)
z.extract(name)
parser = argparse.ArgumentParser(description='Download artifacts from AppVeyor.')
parser.add_argument('--id',
metavar='PROJECT_ID',
default='ionelmc/python-nameless',
help='Project ID in AppVeyor.')
parser.add_argument('build',
nargs='?',
metavar='BUILD_ID',
help='Build ID in AppVeyor. Eg: master-123')
if __name__ == "__main__":
# import logging
# logging.basicConfig(level="DEBUG")
args = parser.parse_args()
download_latest_artifacts(args.id, args.build)
|
vipins/ccccms | env/Lib/site-packages/cms/models/__init__.py | Python | bsd-3-clause | 3,419 | 0.006727 | # -*- coding: utf-8 -*-
from django.conf import settings as d_settings
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import get_resolver, get_script_prefix, \
NoReverseMatch
from django.utils.encoding import iri_to_uri
from moderatormodels import *
from pagemodel import *
from permissionmodels import *
from placeholdermodel import *
from pluginmodel import *
from titlemodels import *
import django.core.urlresolvers
# must be last
from cms import signals as s_import
def validate_settings():
if not "django.core.context_processors.request" in d_settings.TEMPLATE_CONTEXT_PROCESSORS:
raise ImproperlyConfigured('django-cms needs django.core.context_processors.request in settings.TEMPLATE_CONTEXT_PROCESSORS to work correctly.')
if not 'mptt' in d_settings.INSTALLED_APPS:
raise ImproperlyConfigured('django-cms needs django-mptt installed.')
if 'cms.middleware.multilingual.MultilingualURLMiddleware' in d_settings.MIDDLEWARE_CLASSES and 'django.middleware.locale.LocaleMiddleware' in d_settings.MIDDLEWARE_CLASSES:
raise ImproperlyConfigured('django-cms MultilingualURLMiddleware replaces django.middleware.locale.LocaleMiddleware! Please remove django.middleware.locale.LocaleMiddleware from your MIDDLEWARE_CLASSES settings.')
def validate_dependencies():
# check for right version of reversions
if 'reversion' in d_settings.INSTALLED_APPS:
from reversion.admin import VersionAdmin
if not hasattr(VersionAdmin, 'get_urls'):
raise ImproperlyConfigured('django-cms requires never version of reversion (VersionAdmin must contain get_urls method)')
def remove_current_root(url):
current_root = "/%s/" % get_language()
if url[:len(current_root)] == current_root:
url = url[len(current_root) - 1:]
return url
def monkeypatch_reverse():
if hasattr(django.core.urlresolvers.reverse, 'cms_monkeypatched'):
return
django.core.urlresolvers.old_reverse = django.core.urlresolvers.reverse
def new_reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None, current_app=None):
| url = ''
i18n = 'cms.middleware.multilingual.MultilingualURLMiddleware' in settings.MIDDLEWA | RE_CLASSES
lang = None
if isinstance(viewname, basestring) and viewname.split(":")[0] in dict(settings.LANGUAGES).keys():
lang = viewname.split(":")[0]
try:
url = django.core.urlresolvers.old_reverse(viewname, urlconf=urlconf, args=args, kwargs=kwargs, prefix=prefix, current_app=current_app)
if lang:
url = "/%s%s" % (lang, url)
except NoReverseMatch, e:
if i18n:
if not lang:
try:
lang = get_language()
ml_viewname = "%s:%s" % ( lang, viewname)
url = django.core.urlresolvers.old_reverse(ml_viewname, urlconf=urlconf, args=args, kwargs=kwargs, prefix=prefix, current_app=current_app)
return url
except NoReverseMatch:
pass
raise e
url = remove_current_root(url)
return url
new_reverse.cms_monkeypatched = True
django.core.urlresolvers.reverse = new_reverse
validate_dependencies()
validate_settings()
monkeypatch_reverse()
|
RackSec/ansible | lib/ansible/modules/network/avi/avi_sslkeyandcertificate.py | Python | gpl-3.0 | 5,751 | 0.001217 | #!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 17.1.1
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_sslkeyandcertificate
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of SSLKeyAndCertificate Avi RESTful Object
description:
- This module is used to configure SSLKeyAndCertificate object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
ca_certs:
description:
- Ca certificates in certificate chain.
certificate:
description:
- Sslcertificate settings for sslkeyandcertificate.
required: true
certificate_management_profile_ref:
description:
- It is a reference to an object of type certificatemanagementprofile.
created_by:
description:
- Creator name.
dynamic_params:
description:
- Dynamic parameters needed for certificate management profile.
enckey_base64:
description:
- Encrypted private key corresponding to the private key (e.g.
- Those generated by an hsm such as thales nshield).
enckey_name:
description:
- Name of the encrypted private key (e.g.
- Those generated by an hsm such as thales nshield).
hardwaresecuritymodulegroup_ref:
description:
- It is a reference to an object of type hardwaresecuritymodulegroup.
key:
description:
| - Private key.
key_params:
description:
- Sslkeyparams settings for sslkeyandcertificate.
name:
description:
- Name of the object.
required: true
status:
description:
- Enum options - ssl_c | ertificate_finished, ssl_certificate_pending.
- Default value when not specified in API or module is interpreted by Avi Controller as SSL_CERTIFICATE_FINISHED.
tenant_ref:
description:
- It is a reference to an object of type tenant.
type:
description:
- Enum options - ssl_certificate_type_virtualservice, ssl_certificate_type_system, ssl_certificate_type_ca.
- Default value when not specified in API or module is interpreted by Avi Controller as SSL_CERTIFICATE_TYPE_VIRTUALSERVICE.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = '''
- name: Create a SSL Key and Certificate
avi_sslkeyandcertificate:
controller: 10.10.27.90
username: admin
password: AviNetworks123!
key: |
-----BEGIN PRIVATE KEY-----
....
-----END PRIVATE KEY-----
certificate:
self_signed: true
certificate: |
-----BEGIN CERTIFICATE-----
....
-----END CERTIFICATE-----
type: SSL_CERTIFICATE_TYPE_VIRTUALSERVICE
name: MyTestCert
'''
RETURN = '''
obj:
description: SSLKeyAndCertificate (api/sslkeyandcertificate) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
ca_certs=dict(type='list',),
certificate=dict(type='dict', required=True),
certificate_management_profile_ref=dict(type='str',),
created_by=dict(type='str',),
dynamic_params=dict(type='list',),
enckey_base64=dict(type='str',),
enckey_name=dict(type='str',),
hardwaresecuritymodulegroup_ref=dict(type='str',),
key=dict(type='str', no_log=True,),
key_params=dict(type='dict',),
name=dict(type='str', required=True),
status=dict(type='str',),
tenant_ref=dict(type='str',),
type=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'sslkeyandcertificate',
set(['key']))
if __name__ == '__main__':
main()
|
caleb531/cache-simulator | cachesimulator/word_addr.py | Python | mit | 316 | 0 | #!/usr/bin/env python3
class WordAddress(int):
# Retrieves all consecutive words for the given word address (including
# itself)
de | f get_consecutive_words(self, num_words_per_block):
offset = self % num_words_per_block
return [(self - offset + i) for i i | n range(num_words_per_block)]
|
MichaelYusko/PyGiphy | setup.py | Python | mit | 871 | 0 | from distutils.core import setup
import pygiphy
VERSION = pygiphy.__version__
AUTHOR = pygiphy.__author__
setup_kwargs = {
'name': 'pygiphy',
| 'version': VERSION,
'url': 'https://github.com/MichaelYusko/PyGiphy',
'license': 'MIT',
'author': AUTHOR,
'author_email': 'freshjelly12@yahoo.com',
'description': 'Python interface for the Giphy API',
'packages': ['pygiphy'],
'classifiers': [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Programmin | g Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License'
],
}
requirements = ['requests>=2.13.0']
setup_kwargs['install_requires'] = requirements
setup(**setup_kwargs)
print(u"\n\n\t\t "
"PyGiphy version {} installation succeeded.\n".format(VERSION))
|
openpolis/op-accesso | project/accesso/users/views.py | Python | bsd-3-clause | 819 | 0.001221 | from django.contrib.auth import get_user_model
from django.views.generic import DetailView
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from .serializers import UserSerializer
class UserProfileView(DetailView):
model = g | et_user_model()
def get_object(self, | queryset=None):
return self.request.user
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
permission_classes = [permissions.IsAuthenticated, ]
model = get_user_model()
serializer_class = UserSerializer
@list_route(methods=['get', ])
def me(self, request):
serializer = self.get_serializer(request.user)
return Response(serializer.data) |
oasisvali/pythonchallenge | ch3.py | Python | apache-2.0 | 390 | 0.020513 | import re
filename = "equality.html"
f = open(filename,'r')
dump = f.read()
results = re.findall(r'<!--(.*?)-->',dump,re.DOTALL)
print results[1]
chrlist = re.findall(r'[a-z][A-Z]{3}([a-z])[A-Z]{3}[a-z]',results[1])
chrdict = {} |
for charac in chrlist:
if charac in chrdict:
chrdict[charac] +=1
else:
| chrdict[charac] = 1
print ''.join(chrdict.keys()+['i','l'])
f.close()
|
jgliss/pyplis | scripts/ex03_plume_background.py | Python | gpl-3.0 | 11,609 | 0 | # -*- coding: utf-8 -*-
#
# Pyplis is a Python library for the analysis of UV SO2 camera data
# Copyright (C) 2017 Jonas Gliß (jonasgliss@gmail.com)
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License a
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Pyplis example script no. 3 - Plume background analysis.
This example script introduces features related to plume background modelling
and tau image calculations.
"""
from __future__ import (absolute_import, division)
from SETTINGS import check_version
import numpy as np
from os.path import join
import pyplis
from matplotlib.pyplot import show, subplots, close
# IMPORT GLOBAL SETTINGS
from SETTINGS import SAVEFIGS, SAVE_DIR, FORMAT, DPI, IMG_DIR, OPTPARSE
# Check script version
check_version()
# SCRIPT OPTIONS
# If this is True, then sky reference areas are set in auto mode (note that
# in this case, the tests at the end of the script will fail!)
USE_AUTO_SETTINGS = False
# intensity threshold to init mask for bg surface fit
POLYFIT_2D_MASK_THRESH = 2600
# Choose the background correction modes you want to use
BG_CORR_MODES = [0, # 2D poly surface fit (without sky radiance image)
1, # Scaling of sky radiance image
4, # Scaling + linear gradient correction in x & y direction
6] # Scaling + quadr. gradient correction in x & y direction
# Image file paths relevant for this script
PLUME_FILE = join(IMG_DIR, 'EC2_1106307_1R02_2015091607065477_F01_Etna.fts')
BG_FILE = join(IMG_DIR, 'EC2_1106307_1R02_2015091607022602_F01_Etna.fts')
OFFSET_FILE = join(IMG_DIR, 'EC2_1106307_1R02_2015091607064723_D0L_Etna.fts')
DARK_FILE = join(IMG_DIR, 'EC2_1106307_1R02_2015091607064865_D1L_Etna.fts')
# SCRIPT FUNCTION DEFINITIONS
def init_background_model():
"""Create background model and define relevant sky reference areas."""
# Create background modelling object
m = pyplis.plumebackground.PlumeBackgroundModel()
# Define default gas free areas in plume image
w, h = 40, 40 # width/height of rectangles
m.scale_rect = [1280, 20, 1280 + w, 20 + h]
m.xgrad_rect = [20, 20, 20 + w, 20 + h]
m.ygrad_rect = [1280, 660, 1280 + w, 660 + h]
# Define coordinates of horizontal and vertical profile lines
# row number of profile line for horizontal corretions in the sky
# gradient...
m.xgrad_line_rownum = 40
# ... and start / stop columns for the corrections
m.xgrad_line_startcol = 20
m.xgrad_line_stopcol = 1323
# col number of profile line for vertical corretions in the sky gradient...
m.ygrad_line_colnum = 1300
# ... and start / stop rows for the corrections
m.ygrad_line_startrow = 10
m.ygrad_line_stoprow = 700
# Order of polyonmial fit applied for the gradient correction
m.ygrad_line_polyorder = 2
return m
def load_and_prepare_images():
"""Load images defined above and prepare them for the background analysis.
Returns
-------
- Img, plume image
- Img, plume image vignetting corrected
- Img, sky radiance image
"""
# get custom load method for ECII
fun = pyplis.custom_image_import.load_ecII_fits
# Load the image objects and peform dark correction
plume, bg = pyplis.Img(PLUME_FILE, fun), pyplis.Img(BG_FILE, fun)
dark, offset = pyplis.Img(DARK_FILE, fun), pyplis.Img(OFFSET_FILE, fun)
# Model dark image for tExp of plume image
dark_plume = pyplis.image.model_dark_image(pl | ume.meta["texp"],
dark, offset)
# Model dark image for tExp of background image
dark_bg = pyplis.image.model_dark_image(bg.meta["texp"],
dark, offset)
plume.subtract_dark_image(dark_plume)
| bg.subtract_dark_image(dark_bg)
# Blur the images (sigma = 1)
plume.add_gaussian_blurring(1)
bg.add_gaussian_blurring(1)
# Create vignetting correction mask from background image
vign = bg.img / bg.img.max() # NOTE: potentially includes y & x gradients
plume_vigncorr = pyplis.Img(plume.img / vign)
return plume, plume_vigncorr, bg
def autosettings_vs_manual_settings(bg_model):
"""Perform automatic retrieval of sky reference areas.
If you are lazy... (i.e. you dont want to define all these reference areas)
then you could also use the auto search function, a comparison is plotted
here.
"""
auto_params = pyplis.plumebackground.find_sky_reference_areas(plume)
current_params = bg_model.settings_dict()
fig, axes = subplots(1, 2, figsize=(16, 6))
axes[0].set_title("Manually set parameters")
pyplis.plumebackground.plot_sky_reference_areas(plume, current_params,
ax=axes[0])
pyplis.plumebackground.plot_sky_reference_areas(plume, auto_params,
ax=axes[1])
axes[1].set_title("Automatically set parameters")
return auto_params, fig
def plot_pcs_profiles_4_tau_images(tau0, tau1, tau2, tau3, pcs_line):
"""Plot PCS profiles for all 4 methods."""
fig, ax = subplots(1, 1)
tau_imgs = [tau0, tau1, tau2, tau3]
for k in range(4):
img = tau_imgs[k]
profile = pcs_line.get_line_profile(img)
ax.plot(profile, "-", label=r"Mode %d: $\phi=%.3f$"
% (BG_CORR_MODES[k], np.mean(profile)))
ax.grid()
ax.set_ylabel(r"$\tau_{on}$", fontsize=20)
ax.set_xlim([0, pcs_line.length()])
ax.set_xticklabels([])
ax.set_xlabel("PCS", fontsize=16)
ax.legend(loc="best", fancybox=True, framealpha=0.5, fontsize=12)
return fig
# SCRIPT MAIN FUNCTION
if __name__ == "__main__":
close("all")
# Create a background model with relevant sky reference areas
bg_model = init_background_model()
# Define exemplary plume cross section line
pcs_line = pyplis.LineOnImage(x0=530,
y0=730,
x1=890,
y1=300,
line_id="example PCS",
color="lime")
plume, plume_vigncorr, bg = load_and_prepare_images()
auto_params, fig0 = autosettings_vs_manual_settings(bg_model)
# Script option
if USE_AUTO_SETTINGS:
bg_model.update(**auto_params)
# Model 4 exemplary tau images
# list to store figures of tau plotted tau images
_tau_figs = []
# mask for corr mode 0 (i.e. 2D polyfit)
mask = np.ones(plume_vigncorr.img.shape, dtype=np.float32)
mask[plume_vigncorr.img < POLYFIT_2D_MASK_THRESH] = 0
# First method: retrieve tau image using poly surface fit
tau0 = bg_model.get_tau_image(plume_vigncorr,
mode=BG_CORR_MODES[0],
surface_fit_mask=mask,
surface_fit_polyorder=1)
# Plot the result and append the figure to _tau_figs
_tau_figs.append(bg_model.plot_tau_result(tau0, PCS=pcs_line))
# Second method: scale background image to plume image in "scale" rect
tau1 = bg_model.get_tau_image(plume, bg, mode=BG_CORR_MODES[1])
_tau_figs.append(bg_model.plot_tau_result(tau1, PCS=pcs_line))
# Third method: Linear correction for radiance differences based on two
# rectangles (scale, ygrad)
tau2 = bg_model.get_tau_image(plume, bg, mode=BG_CORR_MODES[2])
_tau_figs.append(bg_model.plot_tau_result(tau2, PCS=pcs_line))
# 4th method: 2nd order polynomial fit along vertical profile line
# For this method, determine tau on tau off and AA image
tau3 = |
jaeilepp/mne-python | mne/channels/tests/test_montage.py | Python | bsd-3-clause | 22,545 | 0 | # Author: Teon Brooks <teon.brooks@gmail.com>
#
# License: BSD (3-clause)
import os.path as op
import warnings
from nose.tools import assert_equal, assert_true, assert_raises
import numpy as np
from scipy.io import savemat
from numpy.testing import (assert_array_equal, assert_almost_equal,
assert_allclose, assert_array_almost_equal,
assert_array_less)
from mne.tests.common import assert_dig_allclose
from mne.channels.montage import read_montage, _set_montage, read_dig_montage
from mne.utils import _TempDir, run_tests_if_main
from mne import create_info, EvokedArray, read_evokeds
from mne.bem import _fit_sphere
from mne.coreg import fit_matched_points
from mne.transforms import apply_trans, get_ras_to_neuromag_trans
from mne.io.constants import FIFF
from mne.io.meas_info import _read_dig_points
from mne.io.kit import read_mrk
from mne.io import read_raw_brainvision, read_raw_egi, read_raw_fif
from mne.datasets import testing
data_path = testing.data_path(download=False)
fif_dig_montage_fname = op.join(data_path, 'montage', 'eeganes07.fif')
egi_dig_montage_fname = op.join(data_path, 'montage', 'coordinates.xml')
egi_raw_fname = op.join(data_path, 'montage', 'egi_dig_test.raw')
egi_fif_fname = op.join(data_path, 'montage', 'egi_dig_raw.fif')
locs_montage_fname = op.join(data_path, 'EEGLAB', 'test_chans.locs')
evoked_fname = op.join(data_path, 'montage', 'level2_raw-ave.fif')
io_dir = op.join(op.dirname(__file__), '..', '..', 'io')
kit_dir = op.join(io_dir, 'kit', 'tests', 'data')
elp = op.join(kit_dir, 'test_elp.txt')
hsp = op.join(kit_dir, 'test_hsp.txt')
hpi = op.join(kit_dir, 'test_mrk.sqd')
bv_fname = op.join(io_dir, 'brainvision', 'tests', 'data', 'test.vhdr')
fif_fname = op.join(io_dir, 'tests', 'data', 'test_raw.fif')
def test_montage():
"""Test making montages."""
tempdir = _TempDir()
inputs = dict(
sfp='FidNz 0 9.071585155 -2.359754454\n'
'FidT9 -6.711765 0.040402876 -3.251600355\n'
'very_very_very_long_name -5.831241498 -4.494821698 4.955347697\n'
'Cz 0 0 8.899186843',
csd='// MatLab Sphere coordinates [degrees] Cartesian coordinates\n' # noqa: E501
'// Label Theta Phi Radius X Y Z off sphere surface\n' # noqa: E501
'E1 37.700 -14.000 1.000 0.7677 0.5934 -0.2419 -0.00000000000000011\n' # noqa: E501
'E3 51.700 11.000 1.000 0.6084 0.7704 0.1908 0.00000000000000000\n' # noqa: E501
'E31 90.000 -11.000 1.000 0.0000 0.9816 -0.1908 0.00000000000000000\n' # noqa: E501
'E61 158.000 -17.200 1.000 -0.8857 0.3579 -0.2957 -0.00000000000000022', # noqa: E501
mm_elc='# ASA electrode file\nReferenceLabel avg\nUnitPosition mm\n' # noqa:E501
'NumberPositions= 68\n'
'Positions\n'
'-86.0761 -19.9897 -47.9860\n'
'85.7939 -20.0093 -48.0310\n'
'0.0083 86.8110 -39.9830\n'
'-86.0761 -24.9897 -67.9860\n'
'Labels\nLPA\nRPA\nNz\nDummy\n',
m_elc='# ASA electrode file\nReferenceLabel avg\nUnitPosition m\n'
'NumberPositions= 68\nPositions\n-.0860761 -.0199897 -.0479860\n' # noqa:E501
'.0857939 -.0200093 -.0480310\n.0000083 .00868110 -.0399830\n'
'.08 -.02 -.04\n'
'Labels\nLPA\nRPA\nNz\nDummy\n',
txt='Site Theta Phi\n'
'Fp1 -92 -72\n'
'Fp2 92 72\n'
'very_very_very_long_name -92 72\n'
'O2 92 -90\n',
elp='346\n'
'EEG\t F3\t -62.027\t -50.053\t 85\n'
'EEG\t Fz\t 45.608\t 90\t 85\n'
'EEG\t F4\t 62.01\t 50.103\t 85\n'
'EEG\t FCz\t 68.01\t 58.103\t 85\n',
hpts='eeg Fp1 -95.0 -3. -3.\n'
'eeg AF7 -1 -1 -3\n'
'eeg A3 -2 -2 2\n'
'eeg A 0 0 0',
bvef='<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
'<!-- Generated by EasyCap Configurator 19.05.2014 -->\n'
'<Electrodes defaults="false">\n'
' <Electrode>\n'
' <Name>Fp1</Name>\n'
' <Theta>-90</Theta>\n'
' <Phi>-72</Phi>\n'
' <Radius>1</Radius>\n'
' <Number>1</Number>\n'
' </Electrode>\n'
' <Electrode>\n'
' <Name>Fz</Name>\n'
' <Theta>45</Theta>\n'
' <Phi>90</Phi>\n'
' <Radius>1</Radius>\n'
' <Number>2</Number>\n'
' </Electrode>\n'
' <Electrode>\n'
' <Name>F3</Name>\n'
' <Theta>-60</Theta>\n'
' <Phi>-51</Phi>\n'
' <Radius>1</Radius>\n'
' <Number>3</Number>\n'
' </Electrode>\n'
' <Electrode>\n'
' <Name>F7</Name>\n'
' <Theta>-90</Theta>\n'
' <Phi>-36</Phi>\n'
' <Radius>1</Radius>\n'
' <Number>4</Number>\n'
' </Electrode>\n'
'</Electrodes>',
)
# Get actual positions and save them for checking
# csd comes from the string above, all others come from commit 2fa35d4
poss = dict(
sfp=[[0.0, 9.07159, -2.35975], [-6.71176, 0.0404, -3.2516],
[-5.83124, -4.49482, 4.95535], [0.0, 0.0, 8.89919]],
mm_elc=[[-0.08608, -0.01999, -0.04799], [0.08579, -0.02001, -0.04803],
[1e-05, 0.08681, -0.03998], [-0.08608, -0.02499, -0.06799]],
m_elc=[[-0.08608, -0.01999, -0.04799], [0.08579, -0.02001, -0.04803],
[1e-05, 0.00868, -0.03998], [0.08, -0.02, -0.04]],
txt=[[-26.25044, 80.79056, -2.96646], [26.25044, 80.79056, -2.96646],
[-26.25044, -80.79056, -2.96646], [0.0, -84.94822, -2.96646]],
elp=[[-48.20043, 57.55106, 39.86971], [0.0, 60.73848, 59.4629],
[48.1426, 57.58403, 39.89198], [41.64599, 66.91489, 31.8278]],
hpts=[[-95, -3, -3], [-1, -1., -3.], [-2, -2, 2.], [0, 0, 0]],
bvef=[[-26.266444, 80.839803, 5.204748e-15],
[3.680313e-15, 60.104076, 60.104076],
[-46.325632, 57.207392, 42.500000],
[-68.766444, 49.961746, 5.204748e-15]],
)
for key, text in inputs.items():
kind = key.split('_')[-1]
fname = op.join(tempdir, 'test.' + kind)
| with open(fname, 'w') as fid:
fid.write(text)
montage = read_montage(fname)
if kind in ('sfp', 'txt'):
assert_true('very_very_very_long_name' in montage.ch_names)
assert_equal(len(montage.ch_names), 4)
assert_equal(len(montage.ch_names), len(montage.pos))
assert_equal(montage.pos.shape, (4, 3))
assert_equal(montage.kind, 'test')
if kind == 'csd':
d | type = [('label', 'S4'), ('theta', 'f8'), ('phi', 'f8'),
('radius', 'f8'), ('x', 'f8'), ('y', 'f8'), ('z', 'f8'),
('off_sph', 'f8')]
try:
table = np.loadtxt(fname, skip_header=2, dtype=dtype)
except TypeError:
table = np.loadtxt(fname, skiprows=2, dtype=dtype)
poss['csd'] = np.c_[table['x'], table['y'], table['z']]
if kind == 'elc':
# Make sure points are reasonable distance from geometric centroid
centroid = np.sum(montage.pos, axis=0) / montage.pos.shape[0]
distance_from_centroid = np.apply_along_axis(
np.linalg.norm, 1,
montage.pos - centroid)
assert_array_less(distance_from_centroid, 0.2)
assert_array_less(0.01, distance_from_centroid)
assert_array_almost_equal(poss[key], montage.pos, 4, err_msg=key)
# Test reading in different letter case.
ch_names = ["F3", "FZ", |
OBIGOGIT/etch | binding-python/runtime/src/main/python/etch/binding/util/StrStrHashMapSerializer.py | Python | apache-2.0 | 2,642 | 0.006435 | """
# Licensed to the Apache Software Foundation (ASF) under one *
# or more contributor license agreements. See the NOTICE file *
# distributed with this work for additional information *
# regarding copyright ownership. The ASF licenses this file *
# to you under the Apache License, Version 2.0 (the *
# "License"); you may not use this file except in compliance *
# with the License. You may obtain a copy of the License at *
# *
# http://www.apache.org/licenses/LICENSE-2.0 *
# *
# Unless required by applicable law or agreed to in writing, *
# software distributed under the License is distributed on an *
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
# KIND, either express or implied. See the License for the *
# specific language governing permissions and limitations *
# under the License.
"""
from __future__ import absolute_import
from .StrStrHashMap import *
from ..msg.Field import *
from ..msg.ImportExportHelper import *
from ..msg.StructValue import *
from ..msg.Type import *
from ..msg.ValueFactory import *
from ..support.Class2TypeMap import *
from ..support.Validator_object import *
class StrStrHashMapSerializer(ImportExportHelper):
"""
etch serializer for StrStrHashMap
"""
FIELD_NAME = "keysAndValues"
@classmethod
def init(cls, typ, class2type):
"""
Defines custom fields in the value factory so that the importer can fin | d them
@param typ
@param class2type
"""
field = typ.getField(cls.FIELD_NAME)
class2type.put( StrStrHashMap , typ )
| typ.setComponentType( StrStrHashMap )
typ.setImportExportHelper( StrStrHashMapSerializer(typ, field))
typ.putValidator(field, Validator_object.get(1))
typ.lock()
def __init__(self, typ, field):
self.__type = typ
self.__field = field
def importHelper(self, struct):
m = StrStrHashMap()
keysAndValues = struct.get(self.__field)
for i in range(0, len(keysAndValues), 2):
m[keysAndValues[i]] = keysAndValues[i+1]
return m
def exportValue(self, vf, value):
m = StrStrHashMap(value)
keysAndValues = []
for i in m.keys():
keysAndValues.append(i)
keysAndValues.append(m[i])
struct = StructValue(self.__type, vf)
struct.put(self.__field, keysAndValues)
return struct
|
faunalia/processing_addons | ogr2ogrdifference.py | Python | agpl-3.0 | 6,648 | 0.003008 | # -*- coding: utf-8 -*-
"""
***************************************************************************
clipbypolygon.py
---------------------
Date : November 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterTableField
from processing.core.outputs import OutputVector
from processing.core.outputs import OutputHTML
from processing.tools.system import *
from processing.tools import dataobjects
from processing.algs.gdal.OgrAlgorithm import OgrAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
class ogr2ogrdifference(OgrAlgorithm):
INPUT_LAYER_A = 'INPUT_LAYER_A'
INPUT_LAYER_B = 'INPUT_LAYER_B'
FIELD_A = 'FIELD_A'
FIELD_B = 'FIELD_B'
FIELDS_A = 'FIELDS_A'
TABLE = 'TABLE'
SCHEMA = 'SCHEMA'
MULTI = 'MULTI'
OPTIONS = 'OPTIONS'
OUTPUT = 'OUTPUT'
def getIcon(self):
return QIcon(os.path.dirname(__file__) + '/icons/postgis.png')
def defineCharacteristics(self):
self.name = 'Polygon Difference (non symmetrical)'
self.group = '[OGR] PostGIS Geoprocessing'
self.addParameter(ParameterVector(self.INPUT_LAYER_A, 'Input layer',
[ParameterVector.VECTOR_TYPE_POLYGON], False))
self.addParameter(ParameterTableField(self.FIELD_A, 'First input layer ID',
self.INPUT_LAYER_A, optional=False))
self.addParameter(ParameterString(self.FIELDS_A, 'Fields/attributes of input layer to be kept in results (comma separated list)',
'', optional=False))
self.addParameter(ParameterVector(self.INPUT_LAYER_B, 'Layer to be subtracted',
[ParameterVector.VECTOR_TYPE_POLYGON], False))
self.addParameter(ParameterTableField(self.FIELD_B, 'Second input layer ID',
self.INPUT_LAYER_B, optional=False))
self.addParameter(ParameterString(self.SCHEMA, 'Output schema',
'public', optional=False))
self.addParameter(ParameterString(self.TABLE, 'Output table name',
'difference', optional=False))
self.addParameter(ParameterBoolean(self.MULTI,
'Output as multipart geometries?', True))
self.addParameter(ParameterString(self.OPTIONS, 'Additional creation options (see ogr2ogr manual)',
'', optional=True))
self.addOutput(OutputHTML(self.OUTPUT, 'Output log'))
def processAlgorithm(self, progress):
inLayerA = self.getParameterValue(self.INPUT_LAYER_A)
ogrLayerA = self.ogrConnectionString(inLayerA)
layernameA = self.ogrLayerName(inLayerA)
inLayerB = self.getParameterValue(self.INPUT_LAYER_B)
ogrLayerB = self.ogrConnectionString(inLayerB)
layernameB = self.ogrLayerName(inLayerB)
fieldA = unicode(self.getParameterValue(self.FIELD_A))
fieldB = unicode(self.getParameterValue(self.FIELD_B))
fieldsA = unicode(self.getParameterValue(self.FIELDS_A))
dsUriA = QgsDataSourceURI(self.getParameterValue(self.INPUT_LAYER_A))
geomColumnA = dsUriA.geometryColumn()
dsUriB = QgsDataSourceURI(self.getParameterValue(self.INPUT_LAYER_B))
geomColumnB = dsUriB.geometryColumn()
schema = unicode(self.getParameterValue(self.SCHEMA))
table = unicode(self.getParameterValue(self.TABLE))
multi = self.getParameterValue(self.MULTI)
if len(fieldsA) > 0:
fieldstring = "," + fieldsA
else | :
fieldstring = ""
if multi:
sqlstring = "-sql \"SELECT (ST_Multi(ST_Differe | nce(g1." + geomColumnA + ",ST_Union(g2." + geomColumnB + "))))::geometry(MultiPolygon) AS geom, g1. " + fieldA + " AS id_input" + fieldstring + " FROM " + layernameA + " AS g1, " + layernameB + " AS g2 GROUP BY g1." + geomColumnA + ",g1." + fieldA + "\""" -nln " + table + " -lco SCHEMA=" + schema + " -lco FID=gid -nlt MULTIPOLYGON -lco GEOMETRY_NAME=geom --config PG_USE_COPY YES"
else:
sqlstring = "-sql \"SELECT (ST_Dump(ST_Difference(g1." + geomColumnA + ",ST_Union(g2." + geomColumnB + ")))).geom::geometry(Polygon) AS geom, g1. " + fieldA + " AS id_input" + fieldstring + " FROM " + layernameA + " AS g1, " + layernameB + " AS g2 GROUP BY g1." + geomColumnA + ",g1." + fieldA + "\""" -nln " + table + " -lco SCHEMA=" + schema + " -lco FID=gid -nlt POLYGON -lco GEOMETRY_NAME=geom --config PG_USE_COPY YES"
options = unicode(self.getParameterValue(self.OPTIONS))
arguments = []
arguments.append('-f')
arguments.append('PostgreSQL')
arguments.append(ogrLayerA)
arguments.append(ogrLayerA)
arguments.append(sqlstring)
arguments.append('-overwrite')
if len(options) > 0:
arguments.append(options)
commands = []
if isWindows():
commands = ['cmd.exe', '/C ', 'ogr2ogr.exe',
GdalUtils.escapeAndJoin(arguments)]
else:
commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)]
GdalUtils.runGdal(commands, progress)
output = self.getOutputValue(self.OUTPUT)
f = open(output, 'w')
f.write('<pre>')
for s in GdalUtils.getConsoleOutput()[1:]:
f.write(unicode(s))
f.write('</pre>')
f.close() |
ella/mypage | mypage/widgets/forms.py | Python | bsd-3-clause | 384 | 0.005208 | from django import forms
class BaseDisplayForm(forms.Form):
pass
class BaseConfig | Form(forms.Form):
pass
class FieldChoice(object):
def __init__(self, choice, is_checked):
self.choice = choice
self.is_checked = is_checked
@property
def name(self):
| return self.choice[1]
@property
def value(self):
return self.choice[0]
|
ianmilliken/rwf | backend/apps/farmwork/forms.py | Python | apache-2.0 | 1,542 | 0.001297 | #
# farmwork/forms.py
#
from django import forms
from django.utils.text import slugify
from .models import Farmwork
# ========================================================
# FARMWORK FORM
# ========================================================
class FarmworkForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(FarmworkForm, self).__init__(*args, **kwargs)
class Meta:
model = Farmwork
fields = [
'job_role',
'job_fruit',
'job_ | pay',
'job | _pay_type',
'job_start_date',
'job_duration',
'job_duration_type',
'job_description',
'con_first_name',
'con_surname',
'con_number',
'con_email',
'con_description',
'acc_variety',
'acc_price',
'acc_price_type',
'acc_description',
'loc_street_address',
'loc_city',
'loc_state',
'loc_post_code',
]
# --
# AUTO GENERATE SLUG ON SAVE
# Credit: https://keyerror.com/blog/automatically-generating-unique-slugs-in-django
# --
def save(self):
if self.instance.pk:
return super(FarmworkForm, self).save()
instance = super(FarmworkForm, self).save(commit=False)
instance.slug = slugify(instance.get_job_fruit_display() + '-' + instance.get_job_role_display() + '-in-' + instance.loc_city)
instance.save()
return instance
|
hoffmabc/OpenBazaar | features/steps/ws.py | Python | mit | 2,130 | 0 | import logging
from behave import given, then, when
from node.openbazaar_daemon import MarketApplication
from test_util import (
get_db_path,
ip_address,
node_uri,
node_to_ws_port,
set_store_description,
storeDescription,
ws_connect,
ws_receive_myself,
ws_send
)
@given('there is a node')
def step_impl(context):
create_nodes(context, 1)
@when('we connect')
def step_impl(context):
context.response = ws_connect(0)
@then('it will introduce itself')
def step_impl(context):
assert context.response['result']['type'] == u'myself'
def create_nodes(context, num_nodes):
app = []
for i in range(num_nodes):
app.append(
MarketApplication(
ip_address(i),
12345,
i, db_path=get_db_path(i),
dev_mode=True
)
)
app[i].listen(node_to_ws_port(i))
set_store_description(i)
context.app = app
def create_connected_nodes(context, num_nodes):
create_nodes(context, num_nodes)
for i in range(num_nodes - 1):
ws_send(i, 'connect', {'uri': node_uri(i + 1)})
@given('{num_nodes} connected nodes')
def step_impl(context, num_nod | es):
create_connected_nodes(context, int(num_nodes))
@given('{num_nodes} nodes')
def step_impl(context, num_nodes):
create_nodes(context, int(num_nodes))
@when('node {i} connects to node {j}')
def step_impl(context, i, j):
ws_send(int(i), 'connect', {'uri': node_uri(int(j))})
@then('node {i} is conn | ected to node {j}')
def step_impl(context, i, j):
i = int(i)
j = int(j)
response = ws_receive_myself(i)['result']
assert response['type'] == 'myself'
assert(node_uri(j) in [x['uri'] for x in response['peers']])
@then('node {i} can query page of node {j}')
def step_impl(context, i, j):
guid_j = ws_connect(int(j))[u'result']['settings']['guid']
response = ws_send(int(i), 'query_page', {'findGUID': guid_j})[u'result']
logging.getLogger().info('response %s' % response)
assert response['type'] == u'page'
assert response['text'] == storeDescription(j)
|
insequent/quark | quark/tests/functional/plugin_modules/test_subnets.py | Python | apache-2.0 | 12,282 | 0 | # Copyright 2013 Openstack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY | KIND, either express or implied. See the
# License for# the specific language governing permissions and limitations
# under the License.
import mock
import netaddr
from neutron.common import exceptions
from oslo.config import cfg
impor | t contextlib
from quark.db import api as db_api
import quark.ipam
# import below necessary if file run by itself
from quark import plugin # noqa
import quark.plugin_modules.ip_policies as policy_api
import quark.plugin_modules.networks as network_api
import quark.plugin_modules.subnets as subnet_api
from quark.tests.functional.base import BaseFunctionalTest
CONF = cfg.CONF
class QuarkGetSubnets(BaseFunctionalTest):
@contextlib.contextmanager
def _stubs(self, network, subnet):
self.ipam = quark.ipam.QuarkIpamANY()
with self.context.session.begin():
net_mod = db_api.network_create(self.context, **network)
subnet["network"] = net_mod
sub1 = db_api.subnet_create(self.context, **subnet)
subnet["id"] = 2
sub2 = db_api.subnet_create(self.context, do_not_use=True,
**subnet)
yield net_mod, sub1, sub2
def test_get_subnet_do_not_use_not_returned(self):
network = dict(name="public", tenant_id="fake", network_plugin="BASE")
subnet = dict(id=1, ip_version=4, next_auto_assign_ip=2,
cidr="0.0.0.0/24", first_ip=0, last_ip=255,
ip_policy=None, tenant_id="fake")
with self._stubs(network, subnet) as (net, sub1, sub2):
subnets = db_api.subnet_find_ordered_by_most_full(self.context,
net["id"]).all()
self.assertEqual(len(subnets), 1)
self.assertEqual(subnets[0][0]["id"], "1")
class QuarkGetSubnetsFromPlugin(BaseFunctionalTest):
@contextlib.contextmanager
def _stubs(self, network, subnet):
self.ipam = quark.ipam.QuarkIpamANY()
with contextlib.nested(mock.patch("neutron.common.rpc.get_notifier")):
net = network_api.create_network(self.context, network)
subnet['subnet']['network_id'] = net['id']
sub1 = subnet_api.create_subnet(self.context, subnet)
yield net, sub1
def test_toggle_ip_policy_id_from_subnet_view(self):
cidr = "192.168.1.0/24"
ip_network = netaddr.IPNetwork(cidr)
network = dict(name="public", tenant_id="fake", network_plugin="BASE")
network = {"network": network}
subnet = dict(id=1, ip_version=4, next_auto_assign_ip=2,
cidr=cidr, first_ip=ip_network.first,
last_ip=ip_network.last, ip_policy=None,
tenant_id="fake")
subnet = {"subnet": subnet}
original = cfg.CONF.QUARK.show_subnet_ip_policy_id
with self._stubs(network, subnet) as (net, sub1):
cfg.CONF.set_override('show_subnet_ip_policy_id', True, "QUARK")
subnet = subnet_api.get_subnet(self.context, 1)
self.assertTrue('ip_policy_id' in subnet)
cfg.CONF.set_override('show_subnet_ip_policy_id', False, "QUARK")
subnet = subnet_api.get_subnet(self.context, 1)
self.assertFalse('ip_policy_id' in subnet)
cfg.CONF.set_override('show_subnet_ip_policy_id', original, "QUARK")
class QuarkCreateSubnets(BaseFunctionalTest):
@contextlib.contextmanager
def _stubs(self, network, subnet):
self.ipam = quark.ipam.QuarkIpamANY()
with contextlib.nested(mock.patch("neutron.common.rpc.get_notifier")):
net = network_api.create_network(self.context, network)
subnet['subnet']['network_id'] = net['id']
sub1 = subnet_api.create_subnet(self.context, subnet)
yield net, sub1
def test_create_allocation_pools_over_quota_fail(self):
original_pool_quota = cfg.CONF.QUOTAS.quota_alloc_pools_per_subnet
cidr = "1.1.1.0/8"
ip_network = netaddr.IPNetwork(cidr)
network = dict(name="public", tenant_id="fake", network_plugin="BASE")
network = {"network": network}
pools = [{"start": "1.0.1.2", "end": "1.0.2.0"},
{"start": "1.0.2.2", "end": "1.0.3.0"}]
subnet = dict(id=1, ip_version=4, next_auto_assign_ip=2,
cidr=cidr, first_ip=ip_network.first,
last_ip=ip_network.last, ip_policy=None,
tenant_id="fake", allocation_pools=pools)
subnet = {"subnet": subnet}
with self.assertRaises(exceptions.OverQuota):
cfg.CONF.set_override('quota_alloc_pools_per_subnet', 1, "QUOTAS")
with self._stubs(network, subnet) as (net, sub):
self.assertTrue(sub)
cfg.CONF.set_override('quota_alloc_pools_per_subnet',
original_pool_quota, "QUOTAS")
def test_create_allocation_pools_under_quota_pass(self):
original_pool_quota = cfg.CONF.QUOTAS.quota_alloc_pools_per_subnet
cidr = "1.1.1.0/8"
ip_network = netaddr.IPNetwork(cidr)
network = dict(name="public", tenant_id="fake", network_plugin="BASE")
network = {"network": network}
pools = [{"start": "1.0.1.2", "end": "1.0.2.0"}]
subnet = dict(id=1, ip_version=4, next_auto_assign_ip=2,
cidr=cidr, first_ip=ip_network.first,
last_ip=ip_network.last, ip_policy=None,
tenant_id="fake", allocation_pools=pools)
subnet = {"subnet": subnet}
cfg.CONF.set_override('quota_alloc_pools_per_subnet', 1, "QUOTAS")
with self._stubs(network, subnet) as (net, sub):
self.assertTrue(sub)
cfg.CONF.set_override('quota_alloc_pools_per_subnet',
original_pool_quota, "QUOTAS")
def test_create_allocation_pools_empty(self):
cidr = "192.168.1.0/24"
ip_network = netaddr.IPNetwork(cidr)
network = dict(name="public", tenant_id="fake", network_plugin="BASE")
network = {"network": network}
pools = []
subnet = dict(id=1, ip_version=4, next_auto_assign_ip=2,
cidr=cidr, first_ip=ip_network.first,
last_ip=ip_network.last, ip_policy=None,
tenant_id="fake", allocation_pools=pools)
subnet = {"subnet": subnet}
with self._stubs(network, subnet) as (net, sub1):
self.assertEqual(sub1["allocation_pools"], [])
def test_create_allocation_pools_none(self):
cidr = "192.168.1.0/24"
ip_network = netaddr.IPNetwork(cidr)
network = dict(name="public", tenant_id="fake", network_plugin="BASE")
network = {"network": network}
pools = None
subnet = dict(id=1, ip_version=4, next_auto_assign_ip=2,
cidr=cidr, first_ip=ip_network.first,
last_ip=ip_network.last, ip_policy=None,
tenant_id="fake", allocation_pools=pools)
subnet = {"subnet": subnet}
with self._stubs(network, subnet) as (net, sub1):
self.assertEqual(sub1["allocation_pools"],
[dict(start="192.168.1.1", end="192.168.1.254")])
def test_create_allocation_pools_full(self):
cidr = "192.168.1.0/24"
ip_network = netaddr.IPNetwork(cidr)
network = dict(name="public", tenant_id="fake", network_plugin="BASE")
network = {"network": network}
pools = [dict(start="192.168.1.0", end="192.168.1.255")]
subnet = dict(id=1, ip_version=4, next_auto_assign_ip=2,
cidr=cidr, f |
heibanke/python_do_something | Code/Chapter4/homework4-2_ex.py | Python | apache-2.0 | 2,080 | 0.025784 | #!/usr/bin/env python
# coding: utf-8
#copyRight by heibanke
import csv
import re
import pprint
def readData():
csvfile = open('beijing_jt.csv','r')
reader = csv.reader(csvfile)
reader.next()
result={}
while True:
try:
jt_info = reader.next()
except:
break
#print jt_info[1].decode('utf-8')
# convert stations info format
station_pattern = (r'(?P<number>[0-9]+)\s(?P<name>\D+)')
station_list = []
stations = re.findall(station_pattern,jt_info[-1].decode('utf-8'))
for t | mp in stations:
#print tmp[0],tmp[1].strip()
station_list.append(tmp[1].strip())
result[jt_info[1]]=station_list
csvfile.close()
return result
def find_station(s,stations):
line_list=[]
for k,v in stations.iteritems():
if unicode(s,'utf-8') in v:
print k,s
line_list.append((k,v))
return line_list
def print_lines(lines):
for l in lines:
pri | nt unicode(l[0],'utf-8'),unicode(l[1],'utf-8'),u"中转站:",l[2]
if __name__=="__main__":
stations=readData()
print u"请输入你想查询的起始站名:"
start_station = raw_input()
start_lines=find_station(start_station,stations)
print u"请输入你想查询的终点站名:"
end_station = raw_input()
end_lines=find_station(end_station,stations)
#无需换乘
your_selects=[]
for i in start_lines:
if i in end_lines:
your_selects.append(i[0])
print u"直达的公交:"
for l in your_selects:
print unicode(l,'utf-8')
#换乘一次
huancheng_one=[]
for i in start_lines:
for j in end_lines:
for mid_station in i[1]:
if mid_station in j[1]:
huancheng_one.append((i[0],j[0],mid_station))
break
print u"换乘一次的公交:"
print_lines(huancheng_one)
|
cruor99/KivyMD | kivymd/spinner.py | Python | mit | 4,907 | 0.000408 | # -*- coding: utf-8 -*-
from kivy.lang import Builder
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ListProperty, BooleanProperty
from kivy.animation import Animation
from kivymd.theming import ThemableBehavior
Builder.load_string('''
<MDSpinner>:
canvas.before:
PushMatrix
Rotate:
angle: self._rotation_angle
origin: self.center
canvas:
Color:
rgba: self.color
a: self._alpha
SmoothLine:
circle: self.center_x, self.center_y, self.width / 2, \
self._angle_start, self._angle_end
cap: 'square'
width: dp(2.25)
canvas.after:
PopMatrix
''')
class MDSpinner(ThemableBehavior, Widget):
""":class:`MDSpinner` is an implementation of the circular progress
indicator in Google's Material Design.
It can be used either as an indeterminate indicator that loops while
the user waits for something to happen, or as a determinate indicator.
Set :attr:`determinate` to **True** to activate determinate mode, and
:attr:`determinate_time` to set the duration of the animation.
"""
determinate = BooleanProperty(False)
""":attr:`determinate` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False
"""
determinate_time = NumericProperty(2)
""":attr:`determinate_time` is a :class:`~kivy.properties.NumericProperty`
and defaults to 2
"""
active = BooleanProperty(True)
"""Use :attr:`active` to start or stop the spinner.
:attr:`active` is a :class:`~kivy.properties.BooleanProperty` and
defaults to True
"""
color = ListProperty([])
""":attr:`color` is a :class:`~kivy.properties.ListProperty` and
defaults to 'self.theme_cls.primary_color'
"""
_alpha = NumericProperty(0)
_rotation_angle = NumericProperty(360)
_angle_start = NumericProperty(0)
_angle_end = NumericProperty(8)
def __init__(self, **kwargs):
super(MDSpinner, self).__init__(**kwargs)
self.color = self.theme_cls.primary_color
self._alpha_anim_in = Animation(_alpha=1, duration=.8, t='out_quad')
self._alpha_anim_out = Animation(_alpha=0, duration=.3, t='out_quad')
self._alpha_anim_out.bind(on_complete=self._reset)
self.theme_cls.bind(primary_color=self._update_color)
if self.determinate:
self._start_determinate()
else:
self._start_loop()
def _update_color(self, *args):
self.color = self.theme_cls.primary_color
def _start_determinate(self, *args):
self._alpha_anim_in.start(self)
_rot_anim = Animation(_rotation_angle=0,
duration=self.determinate_time * .7,
t='out_quad')
_rot_anim.start(self)
_angle_start_anim = Animation(_angle_end=360,
duration=self | .determinate_time,
t='in_out_quad')
_angle_start_anim.bind(on_complete=lambda *x: \
self._alpha_anim_out.start(self | ))
_angle_start_anim.start(self)
def _start_loop(self, *args):
if self._alpha == 0:
_rot_anim = Animation(_rotation_angle=0,
duration=2,
t='linear')
_rot_anim.start(self)
self._alpha = 1
self._alpha_anim_in.start(self)
_angle_start_anim = Animation(_angle_end=self._angle_end + 270,
duration=.6,
t='in_out_cubic')
_angle_start_anim.bind(on_complete=self._anim_back)
_angle_start_anim.start(self)
def _anim_back(self, *args):
_angle_back_anim = Animation(_angle_start=self._angle_end - 8,
duration=.6,
t='in_out_cubic')
_angle_back_anim.bind(on_complete=self._start_loop)
_angle_back_anim.start(self)
def on__rotation_angle(self, *args):
if self._rotation_angle == 0:
self._rotation_angle = 360
if not self.determinate:
_rot_anim = Animation(_rotation_angle=0,
duration=2)
_rot_anim.start(self)
def _reset(self, *args):
Animation.cancel_all(self, '_angle_start', '_rotation_angle',
'_angle_end', '_alpha')
self._angle_start = 0
self._angle_end = 8
self._rotation_angle = 360
self._alpha = 0
self.active = False
def on_active(self, *args):
if not self.active:
self._reset()
else:
if self.determinate:
self._start_determinate()
else:
self._start_loop()
|
AssembleSoftware/IoTPy | IoTPy/agent_types/actuators_simple.py | Python | bsd-3-clause | 930 | 0.004301 | def print_from_queue(q): |
"""
prints values read from queue q to
standard out.
"""
while True:
v = q.get()
if v is None:
# exit loop
return
else:
print (str(v))
class queue_to_file(object):
"""
self.actuate(a) puts values from a queue q
into the file called self.filename
"""
def __init__(self, filename, timeout=0):
self.f | ilename = filename
self.timeout = timeout
def actuate(self, q):
with open(self.filename, 'w') as the_file:
while True:
try:
v = q.get(timeout=self.timeout)
except:
# No more input for this actuator
return
if v is None:
# exit loop
return
else:
the_file.write(str(v) + '\n')
|
philanthropy-u/edx-platform | openedx/features/course_duration_limits/migrations/0001_initial.py | Python | agpl-3.0 | 1,719 | 0.004654 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-08 19:43
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('course_overviews', '0014_courseoverview_certificate_available_date'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('sites', '0002_alter_domain_unique'),
]
operations = [
migrations.CreateModel(
name='CourseDurationLimitConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.NullBooleanField(default=None, verbose_name='Enabled')),
('org', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('enabled_as_of', models.DateField(blank=True, default=None, null=True, verbose_name='Enabled As Of')),
('changed_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion | .PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='Changed by')),
('course', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='course_overviews.CourseOverview')),
('site', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='sites.Site')),
],
options={
| 'abstract': False,
},
),
]
|
tejoesperanto/pasportaservo | hosting/forms/visibility.py | Python | agpl-3.0 | 11,039 | 0.002446 | from django import forms
from django.db.models import (
BinaryField, BooleanField, Case, CharField, Q, Value, When,
)
from django.utils.functional import cached_property
from django.utils.html import format_html
from django.utils.translation import ugettext_lazy as _
from ..models import (
Place, VisibilitySettings, VisibilitySettingsForFamilyMembers,
VisibilitySettingsForPhone, VisibilitySettingsForPlace,
VisibilitySettingsForPublicEmail,
)
from ..utils import value_without_invalid_marker
class VisibilityForm(forms.ModelForm):
class Meta:
model = VisibilitySettings
fields = [
'visible_online_public',
'visible_online_authed',
'visible_in_book',
]
# The textual note (for assistive technologies), placed before the data.
hint = forms.CharField(required=False, disabled=True)
# Whether to nudge the data to the right, creating a second level.
indent = forms.BooleanField(required=False, disabled=True)
def __init__(self, *args, **kwargs):
self.requested_pk = kwargs.pop('request_pk', None)
self.profile = kwargs.pop('request_profile', None)
read_only = kwargs.pop('read_only', False)
super().__init__(*args, **kwargs)
if read_only:
for f in self.fields:
self.fields[f].disabled = True
widget_settings = {
'data-toggle': 'toggle',
'data-on': _("Yes"),
'data-off': _("No"),
'data-size': 'mini',
'data-on-ajax-setup': 'updateVisibilitySetup',
'data-on-ajax-success': 'updateVisibilityResult',
'data-on-ajax-error': 'updatePrivacyFailure',
# autocomplete attribute is required for Firefox to drop
# caching and refresh the checkbox on each page reload.
'autocomplete': 'off',
}
widget_classes = ' ajax-on-change'
for venue in self.venues():
attrs = venue.field.widget.attrs
attrs.update(widget_settings)
attrs['class'] = attrs.get('class', '') + widget_classes
if venue.venue_name == 'in_book' and not self.obj.visibility.printable:
venue.field.disabled = True
venue.field.initial = False
self.initial[venue.field_name] = False
if isinstance(self.obj, Place):
venue.hosting_notification = True
elif not read_only:
venue.field.disabled = not self.obj.visibility.rules[venue.venue_name]
if venue.venue_name.startswith('online_'):
attrs.update({'data-tied': str(self.obj.visibility.rules.get('tied_online', False))})
def venues(self, restrict_to=''):
"""
Generator of bound fields corresponding to the visibility venues:
online_public, online_authed, and in_book. Each bound field is updated
to include the name of the venue.
"""
for f in self.fields:
if f.startswith(self._meta.model._PREFIX+restrict_to):
bound_field = self[f]
bound_field.field_name, bound_field.venue_name = f, f[len(self._meta.model._PREFIX):]
yield bound_field
@cached_property
def obj(self):
"""
Returns the object itself or a wrapper (in the case of a field), for
simplified and unified access to the visibility object in templates.
"""
if self.is_bound and self.instance.model_type == self.instance.DEFAULT_TYPE:
raise ValueError("Form is bound but no visibility settings object was provided, "
"for key {pk} and {profile!r}. This most likely indicates tampering."
.format(pk=self.requested_pk, profile=self.profile))
wrappers = {
VisibilitySettingsForFamilyMembers.type(): self.FamilyMemberAsset,
VisibilitySettingsForPublicEmail.type(): self.EmailAsset,
}
return wrappers.get(self.instance.model_type, lambda c: c)(self.instance.content_object)
class FamilyMemberAsset:
"""
Wrapper for the `family_members` field of a Place.
"""
def __init__(self, for_place):
self.title = for_place._meta.get_field('family_members').verbose_name
self.visibility = for_place.family_members_visibility
@property
def icon(self):
template = ('<span class="fa ps-users" title="{title}" '
' data-toggle="tooltip" data-placement="left"></span>')
return format_html(template, title=_("family members").capitalize())
def __str__(self):
return str(self.title)
class EmailAsset:
"""
Wrapper for the `email` field of a Profile.
"""
def __init__(self, for_profile):
self.data = for_profile.email
self.visibility = for_profile.email_visibility
@property
def icon(self):
template = ('<span class="fa fa-envelope" title="{title}" '
' data-toggle="tooltip" data-placement="bottom"></span>')
return format_html(template, title=_("public email").capitalize())
def __str__(self) | :
return value_without_invalid_marker(self.data)
def clean_visible_in_book(self):
"""
The in_book venue is manipulated manually in form init, so that the
checkbox appears as "off" when place is not offered for accommodation,
independently of its actual value.
The clean method ensures that the value is restored to the actual one | ;
otherwise the database will be updated with the "off" value.
"""
venue = next(self.venues('in_book'))
if venue.field.disabled:
return self.obj.visibility[venue.venue_name]
else:
return self.cleaned_data['visible_in_book']
def save(self, commit=True):
"""
Adds a bit of magic to the saving of the visibility object.
When data is made visible in public, it will automatically become
visible also to the authorized users. And when it is made hidden for
authorized users, it will automatically become hidden for public.
"""
visibility = super().save(commit=False).as_specific()
venue, field_name = None, ''
for field in self.venues('online'):
if field.field_name in self.changed_data:
venue, field_name = field.venue_name, field.field_name
value = self.cleaned_data.get(field_name)
counterparty = {
'online_public': 'online_authed', 'online_authed': 'online_public', None: '',
}[venue]
ripple = [
(value) and venue == 'online_public',
(not value) and venue == 'online_authed',
(not value) and venue == 'online_public' and visibility.rules.get('tied_online'),
]
if any(ripple):
visibility[counterparty] = value
if commit:
visibility.save(update_fields=[v.field_name for v in self.venues()])
return visibility
class VisibilityFormSetBase(forms.BaseModelFormSet):
"""
Provides a unified basis for a FormSet of the visibility models, linked to
a specific profile. The linkage is to all relevant objects, such as places
and phones, and to fields with selective display-ability.
"""
def __init__(self, *args, **kwargs):
self.profile = kwargs.pop('profile')
self.read_only = kwargs.pop('read_only', False)
self.modified_venue = kwargs.pop('dirty', None)
super().__init__(*args, **kwargs)
PLACE, FAMILY_MEMBERS, PHONE, PUBLIC_EMAIL = (vis.type() for vis in [
VisibilitySettingsForPlace, VisibilitySettingsForFamilyMembers,
VisibilitySettingsForPhone, VisibilitySettingsForPublicEmail,
])
# Gathering all data items linked to the profile.
what = Q()
owned_places = self.profile.owned_places.exclude(deleted=True).prefetch_related('family_members')
what |= Q(model_t |
acdha/django-modeltranslation | modeltranslation/utils.py | Python | bsd-3-clause | 4,065 | 0.001968 | # -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.utils.encoding import force_unicode
from django.utils.translation import get_language as _get_language
from django.utils.functional import lazy
from modeltranslation import settings
def get_language():
"""
Return an active language code that is guaranteed to be in
settings.LANGUAGES (Django does not seem to guarantee this for us).
"""
lang = _get_language()
if lang not in settings.AVAILABLE_LANGUAGES and '-' in lang:
lang = lang.split('-')[0]
if lang in settings.AVAILABLE_LANGUAGES:
return lang
return settings.DEFAULT_LANGUAGE
def get_translation_fields(field):
"""
Returns a list of localized fieldnames for a given field.
"""
return [build_localized_fieldname(field, l) for l in settings.AVAILABLE_LANGUAGES]
def build_localized_fieldname(field_name, lang):
return str('%s_%s' % (field_name, lang.repla | ce('-', '_' | )))
def _build_localized_verbose_name(verbose_name, lang):
return u'%s [%s]' % (force_unicode(verbose_name), lang)
build_localized_verbose_name = lazy(_build_localized_verbose_name, unicode)
def _join_css_class(bits, offset):
if '-'.join(bits[-offset:]) in settings.AVAILABLE_LANGUAGES + ['en-us']:
return '%s-%s' % ('_'.join(bits[:len(bits) - offset]), '_'.join(bits[-offset:]))
return ''
def build_css_class(localized_fieldname, prefix=''):
"""
Returns a css class based on ``localized_fieldname`` which is easily
splitable and capable of regionalized language codes.
Takes an optional ``prefix`` which is prepended to the returned string.
"""
bits = localized_fieldname.split('_')
css_class = ''
if len(bits) == 1:
css_class = str(localized_fieldname)
elif len(bits) == 2:
# Fieldname without underscore and short language code
# Examples:
# 'foo_de' --> 'foo-de',
# 'bar_en' --> 'bar-en'
css_class = '-'.join(bits)
elif len(bits) > 2:
# Try regionalized language code
# Examples:
# 'foo_es_ar' --> 'foo-es_ar',
# 'foo_bar_zh_tw' --> 'foo_bar-zh_tw'
css_class = _join_css_class(bits, 2)
if not css_class:
# Try short language code
# Examples:
# 'foo_bar_de' --> 'foo_bar-de',
# 'foo_bar_baz_de' --> 'foo_bar_baz-de'
css_class = _join_css_class(bits, 1)
return '%s-%s' % (prefix, css_class) if prefix else css_class
def unique(seq):
"""
>>> list(unique([1, 2, 3, 2, 2, 4, 1]))
[1, 2, 3, 4]
"""
seen = set()
return (x for x in seq if x not in seen and not seen.add(x))
def resolution_order(lang, override=None):
"""
Return order of languages which should be checked for parameter language.
First is always the parameter language, later are fallback languages.
Override parameter has priority over FALLBACK_LANGUAGES.
"""
if override is None:
override = {}
fallback_for_lang = override.get(lang, settings.FALLBACK_LANGUAGES.get(lang, ()))
fallback_def = override.get('default', settings.FALLBACK_LANGUAGES['default'])
order = (lang,) + fallback_for_lang + fallback_def
return tuple(unique(order))
@contextmanager
def auto_populate(mode='all'):
"""
Overrides translation fields population mode (population mode decides which
unprovided translations will be filled during model construction / loading).
Example:
with auto_populate('all'):
s = Slugged.objects.create(title='foo')
s.title_en == 'foo' // True
s.title_de == 'foo' // True
This method may be used to ensure consistency loading untranslated fixtures,
with non-default language active:
with auto_populate('required'):
call_command('loaddata', 'fixture.json')
"""
current_population_mode = settings.AUTO_POPULATE
settings.AUTO_POPULATE = mode
try:
yield
finally:
settings.AUTO_POPULATE = current_population_mode
|
AlienCowEatCake/ImageViewer | src/ThirdParty/Exiv2/exiv2-0.27.5-Source/tests/tiff_test/test_tag_compare.py | Python | gpl-3.0 | 6,645 | 0.003612 | # -*- coding: utf-8 -*-
import system_tests
class OutputTagExtract(metaclass=system_tests.CaseMeta):
"""
Test whether exiv2 -pa $file and exiv2 -pS $file produces the same output.
"""
def parse_pa(self, stdout):
"""
Parse the output of exiv2 -pa $file, which looks like this:
Exif.Image.NewSubfileType Long 1 Primary image
into a list of dictionaries with the keys:
tag: last word of the first column (here NewSubfileType)
type: lowercase second column
len: third column
val: fourth column
It is furthermore checked that the first column begins with 'Exif.Image'
"""
data = []
for line in stdout:
tmp = line.split()
exif, image, tag = tmp[0].split('.')
self.assertEquals(exif, "Exif")
self.assertEquals(image, "Image")
data.append({
"tag": tag,
"type": tmp[1].lower(),
"len": int(tmp[2]),
"val": " ".join(tmp[3:])
})
return data
def parse_pS(self, stdout):
"""
Parse the output of exiv2 -pS $file, which looks like this:
STRUCTURE OF TIFF FILE (II): $file
address | tag | type | count | offset | value
254 | 0x00fe NewSubfileType | LONG | 1 | | 0
...
END $file
into a list of dictionaries with the following keys:
tag: the string after the hex number in the second column
type: lowercase third column
len: fourth column
val: fifth column
The first two lines and the last line are ignored, as they contain
explanatory output.
"""
data = []
for i, line in enumerate(stdout):
if i < 2 or i == len(stdout) - 1:
continue
tmp = line.split(" | ")
data.append({
"tag": tmp[1].split()[1],
"type": tmp[2].replace(' ', '').lower(),
"len": int(tmp[3].replace(' ', '')),
"val": tmp[5]
})
return data
def compare_pS_pa(self):
"""
Compares the output from self.parse_pa() and self.parse_pS() (saved in
self.pa_data & self.pS_data respectively).
All dictionaries in the lists are compared for equality for the keys
tag, len and type but only some for val. This is due to differently
processed output (exiv2 -pa produces more readable output,
e.g. compression is written in words and not as a number as it is by
exiv2 -pS)
"""
for pa_elem, pS_elem in zip(self.pa_data, self.pS_data):
for key in ["tag", "type", "len"]:
self.assertEquals(pa_elem[key], pS_elem[key])
if pa_elem["tag"] in [
"ImageWidth", "ImageLength", "BitsPerSample",
"DocumentName", "ImageDescription", "StripOffsets",
"SamplesPerPixel", "StripByteCounts"]:
self.assertEquals(pa_elem["val"], pS_elem["val"])
def compare_stdout(self, i, command, got_stdout, expected_stdout):
super().compare_stdout(i, command, got_stdout, expected_stdout)
if '-pa' in command:
self.pa_data = self.parse_pa(got_stdout.splitlines())
if '-pS' in command:
self.pS_data = self.parse_pS(got_stdout.splitlines())
if i == 1:
self.compare_pS_pa()
commands = [
"$exiv2 %s $data_path/mini9.tif" % (opt) for opt in ["-pa", "-pS"]
]
stderr = [""] * 2
retval = [0] * 2
stdout = [
"""Exif.Image.NewSubfileType Long 1 Primary image
Exif.Image.ImageWidth Short 1 9
Exif.Image.ImageLength Short 1 9
Exif.Image.BitsPerSample Short 3 8 8 8
Exif.Image.Compression Short 1 Uncompressed
Exif.Image.PhotometricInterpretation | Short 1 RGB
Exif.Image.DocumentName Ascii 24 /home/ahuggel/mini9.tif
Exif.Image.ImageDescription Ascii 18 Created with GIMP
Exif.Image.StripOffsets Long 1 8
Exif.Image.Orientation Short 1 top, left
Exif.Image.SamplesPerPixel S | hort 1 3
Exif.Image.RowsPerStrip Short 1 64
Exif.Image.StripByteCounts Long 1 243
Exif.Image.XResolution Rational 1 72
Exif.Image.YResolution Rational 1 72
Exif.Image.PlanarConfiguration Short 1 Chunky
Exif.Image.ResolutionUnit Short 1 inch
""",
"""STRUCTURE OF TIFF FILE (II): $data_path/mini9.tif
address | tag | type | count | offset | value
254 | 0x00fe NewSubfileType | LONG | 1 | | 0
266 | 0x0100 ImageWidth | SHORT | 1 | | 9
278 | 0x0101 ImageLength | SHORT | 1 | | 9
290 | 0x0102 BitsPerSample | SHORT | 3 | 462 | 8 8 8
302 | 0x0103 Compression | SHORT | 1 | | 1
314 | 0x0106 PhotometricInterpretation | SHORT | 1 | | 2
326 | 0x010d DocumentName | ASCII | 24 | 468 | /home/ahuggel/mini9.tif
338 | 0x010e ImageDescription | ASCII | 18 | 492 | Created with GIMP
350 | 0x0111 StripOffsets | LONG | 1 | | 8
362 | 0x0112 Orientation | SHORT | 1 | | 1
374 | 0x0115 SamplesPerPixel | SHORT | 1 | | 3
386 | 0x0116 RowsPerStrip | SHORT | 1 | | 64
398 | 0x0117 StripByteCounts | LONG | 1 | | 243
410 | 0x011a XResolution | RATIONAL | 1 | 510 | 1207959552/16777216
422 | 0x011b YResolution | RATIONAL | 1 | 518 | 1207959552/16777216
434 | 0x011c PlanarConfiguration | SHORT | 1 | | 1
446 | 0x0128 ResolutionUnit | SHORT | 1 | | 2
END $data_path/mini9.tif
"""]
|
GigaSpaces-ProfessionalServices/cloudify-openstack-plugin | nova_plugin/userdata.py | Python | apache-2.0 | 1,696 | 0.00059 | #########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import requests
from cloudify import compute
from cloudify import exceptions
from cloudify import ctx
def handle_userdata(server):
existing_userdata = server.get('userdata')
install_agent_userdata = ctx.agent.init_script()
if not (existing_userdata or install_agent_userdata):
return
if isinstance(existing_userdata, dict):
ud_type = existing_userdata['type']
if ud_t | ype not in userdata_handlers:
raise exceptions.NonRecoverableError(
"Invalid type '{0}' for server userdata)".format(ud_type))
existing_userdata = userdata_handlers[ud_type](existing_userdata)
if not existing_userdata:
final_userdata = install_agent_userdata
elif not install_agent_userdata:
final_userdata = existing_userdata
else:
final_userdata = compute.create_multi_mimetype_userdata(
[existing_u | serdata, install_agent_userdata])
server['userdata'] = final_userdata
userdata_handlers = {
'http': lambda params: requests.get(params['url']).text
}
|
milisarge/toxfs | webserver.py | Python | gpl-3.0 | 3,418 | 0.043924 | # -*- coding: utf-8 -*-
from flask import Flask
from flask import Flask,jsonify, request, Response, session,g,redirect, url_for,abort, render_template, flash
from islem import *
from bot import *
import sys
import time
import datetime
reload(sys)
sys.setdefaultencoding("utf-8")
app = Flask(__name__)
toxbot = tox_factory(ProfileHelper.open_profile("tox_save.tox"))
sonek=str(toxbot.self_get_address())[0:2]
karsi_dosyalar="gelen_cevaplar"+sonek
komut_dosyasi="gelen_komutlar"+sonek
@app.route('/')
def indeks():
arkadaslar=""
for num in toxbot.self_get_friend_list():
arkadaslar+="<tr><td><a href=/toxsys?fno="+str(num)+">"+str(num)+"</td><td>"+toxbot.friend_get_name(num)+"</td><td>"+str(toxbot.friend_get_status_message(num))+"</td><td>"+str(toxbot.friend_get_public_key(num))+"</td></tr>"
return '''<html>
<h2>Tox Yönetim Sayfası</h2>
<table border=1>
<tr><td>no</td><td>isim</td><td>publickey</td></tr>
<tr><td>-1</td><td>'''+toxbot.self_get_name()+'''</td><td>'''+toxbot.self_get_status_message()+'''</td><td>'''+toxbot.self_get_address()+'''</td></tr>
'''+arkadaslar+'''
</tr></table>
<a href="/toxfs">toxfs</a>
</html>'''
@app.route('/toxfs', methods = ['GET','POST'])
def toxfs():
# localhost:2061
#if request.method == 'GET':
islem=Islem()
islem.fno = request.args.get('fno')
islem.tip = request.args.get('tip')
islem.mesaj = request.args.get('mesaj')
islem.komut="---"
print "islem icerik:"
islem.icerik()
islem.dosyala(komut_dosyasi)
return "komut icra edildi."
#else:
#return '''<html>
#paremetreyle gönderin</html>'''
@app.route('/toxsys', methods = ['GET','POST'])
def toxsys():
dosyalar_html=""
# localhost:2061
#if request.method == 'GET':
islem=Islem()
if 'fno' in request.args and 'dosya' not in request.args:
islem.fno = request.args.get('fno')
islem.tip = "komut"
islem.mesaj = "x"
islem.komut = "@100@dlist"
print "islem icerik:"
islem.icerik()
islem.dosyala(komut_dosyasi)
cevap_geldi=False
dosya_bek_bas = datetime.datetime.now()
#6sn bekle cevap icin
t_end = time.time() + 6
while not cevap_geldi :
if os.path.exists(karsi_dosyalar):
time.sleep(1)
cevaplar=open(karsi_dosyalar,"r").read()
cevaplar=cevaplar.split("\n")
for dosya in cevaplar:
dosyalar_html+="<tr><td><a href=/toxsys?fno="+str(islem.fno)+"&dosya="+dosya+">"+dosya+"</td><td></tr>"
os.remove(karsi_dosyalar)
cevap_geldi=True
return '''<html>
<h3>dosyalar</h3>
<table border=1>
'''+dosyalar_html+'''
</tr>
<a href="./"> | anasayfa</a>
</html>'''
dosya_bek_son = datetime.datetime.now()
krono=dosya_bek_son-dosya_bek_bas
if krono.total_seconds() > 6 :
break
else:
print "dlist sonucu bekleniyor.",krono.total_seconds()
if 'fno' in request.args and 'dosya' in request.args:
islem.fno = request.args.get('fno')
dosya = request.args.get('dosya')
islem.tip = "komut"
islem.mesaj = "x"
islem.komut = "@102@"+dosya |
islem.dosyala(komut_dosyasi)
cevap_geldi=False
while not cevap_geldi:
time.sleep(0.5)
#md5sum kontrol
if os.path.exists(karsi_dosyalar):
cevap=open(karsi_dosyalar,"r").read()
if cevap =="dosya_inme_tamam":
cevap_geldi=True
os.remove(karsi_dosyalar)
return "dosya geldi statikte"
else:
return redirect(url_for('indeks'))
if __name__ == '__main__':
app.run(debug=True,host='0.0.0.0', port=2061)
|
IfcOpenShell/IfcOpenShell | test/run.py | Python | lgpl-3.0 | 14,053 | 0.00861 | ###############################################################################
# #
# This file is part of IfcOpenShell. #
# #
# IfcOpenShell is free software: you can redistribute it and/or modify #
# it under the terms of the Lesser GNU General Public License as published by #
# the Free Software Foundation, either version 3.0 of the License, or #
# (at your option) any later version. #
# #
# IfcOpenShell is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# Lesser GNU General Public License for more details. #
# #
# You should have received a copy of the Lesser GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
###############################################################################
# #
# The IfcOpenShell test suite downloads IFC files that are publicely #
# available on the internet and uses Blender and IfcOpenShell to generate a #
# render of the parsed file. For the script to run, Blender needs to be #
# installed and added to PATH. #
# #
###############################################################################
import os
import sys
import shutil
import inspect
import subprocess
from zipfile import ZipFile
from urllib.request import urlretrieve
# Test whether Blender and IfcOpenShell are installed
if subprocess.call(['blender','-b','-P','bpy.py','TEST']) != 0:
print("[Error] Failed to launch Blender")
sys.exit(1)
else:
print("[Notice] Found Blender and IfcOpenShell on system")
# Global variables for keeping track of test cases
test_cases = []
failed = []
# Create the output directory
cwd = os.path.abspath(os.path.dirname(inspect.getfile(inspect.currentframe())))
os.chdir(cwd)
if not os.path.exists("output"): os.mkdir("output")
if not os.path.exists("input"): os.mkdir("input")
def extension(fn):
return os.path.splitext(fn)[-1].lower()
# Class to download extract and convert IFC files
class TestFile:
def __init__(self,fn,store_as=None):
global test_cases
self.fn = fn
self.store_as = store_as
self.failed = []
test_cases.append(self)
def __call__(self):
if self.fn.startswith("http://") or self.fn.startswith("ftp://"):
fn = self.store_as if self.store_as else self.fn.split("/")[-1]
if os.path.exists(os.path.join("input",fn)):
print ("[Notice] Already downloaded:",fn)
else:
print ("[Notice] Downloading:",fn)
urlretrieve(self.fn,os.path.join("input",fn))
self.fn = fn
if extension(self.fn) == '.zip':
print ("[Notice] Extracting:",self.fn)
zf = ZipFile(os.path.join("input",self.fn))
self.fn = [n for n in zf.namelist() if extension(n) == '.ifc' and not n.startswith('__') and not n.startswith('.')]
for fn in self.fn:
| if not os.path.exists(os.path.join("input",fn)): zf.extract(fn,"input")
zf.close()
else: self.fn = [self.fn]
for fn in self.fn:
print ("[Notice] Rendering:",fn)
succes = subprocess.call(['blender','-b','-P','bpy.py','render',os.path.join("input",fn)]) == 0
if not succes: self.failed.append(fn)
return len(self.failed) == 0
def __str__(self): return "\n".join(self.failed) if len(self.failed) else ""
# Kar | lsruher Institut fuer Technologie
TestFile("http://iai-typo3.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/ADT-FZK-Haus-2005-2006.zip")
TestFile("http://iai-typo3.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/Nem-FZK-Haus-2x3.zip")
TestFile("http://iai-typo3.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/AC14-FZK-Haus.zip")
TestFile("http://iai-typo3.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/FZK-Haus-EliteCAD.zip")
TestFile("http://iai-typo3.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/FJK-Project-Final.zip")
TestFile("http://www.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/Bien-Zenker_Jasmin-Sun-AC14-V2-IFC.zip")
TestFile("http://www.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/ADT-Smiley-West-Project-14-10-2005.zip")
TestFile("http://www.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/Allplan-Smiley-West.zip")
TestFile("http://www.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/AC-11-Smiley-West-04-07-2007-IFC.zip")
TestFile("http://www.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/Allplan-2008-Institute-Var-2-IFC.zip")
TestFile("http://www.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/AC11-Institute-Var-2-IFC.zip")
TestFile("http://iai-typo3.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/PART02_Wilfer_200302_20070209_IFC.zip")
TestFile("http://iai-typo3.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/PART06_Kermi_200405_20070401_IFC.zip")
TestFile("http://iai-typo3.iai.fzk.de/www-extern-kit/fileadmin/download/download-vrsys/Ettenheim-GIS-05-11-2006_optimized.zip")
# Selvaag Gruppen
TestFile("ftp://ftp.dds.no/pub/ifc/Munkerud/Munkerud_hus6_BE.zip")
# Statsbygg
TestFile("ftp://ftp.dds.no/pub/ifc/HiTOS/2x3_HiTOS_EL_new.zip")
TestFile("ftp://ftp.dds.no/pub/ifc/HiTOS/2x3_HiTOS_HVAC_new.zip")
TestFile("ftp://ftp.dds.no/pub/ifc/HiTOS/HITOS_Architectural_2006-10-25.zip")
# Nemetschek Vectorworks
TestFile("http://download2cf.nemetschek.net/www_misc/bim/DCR-LOD_100.zip")
TestFile("http://download2cf.nemetschek.net/www_misc/bim/DCR-LOD_200.zip")
# Rather large:
# TestFile("http://download2cf.nemetschek.net/www_misc/bim/DCR-LOD_300.zip")
# Data Design Systems
TestFile("ftp://ftp.dds.no/pub/ifc/BardNa/Dds_BardNa.zip")
# Common Building Information Model Files
TestFile("http://projects.buildingsmartalliance.org/files/?artifact_id=4278","2011-09-14-Duplex-IFC.zip")
TestFile("http://projects.buildingsmartalliance.org/files/?artifact_id=4284","2011-09-14-Office-IFC.zip")
# Rather large:
# TestFile("http://projects.buildingsmartalliance.org/files/?artifact_id=4289","2011-09-14-Clinic-IFC.zip")
# http://openifcmodel.cs.auckland.ac.nz IAI
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912101-01wall_layers_number_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912101-02wall_opening_straight_ac_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912101-03wall_recess_ben_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912101-04wall_L-shape_all_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912102-01beam_profile_basic_rev_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912102-01beam_profile_para_ac_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912102-02brep_beams_opening_ben_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912102-02extruded_beam_open_tek_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912103-01col_profile_clip_ben_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912103-01columns_basic_all_1.ifc")
TestFile("http://openifcmodel.cs.auckland.ac.nz/_models/0912103-02col_brep_opening_ben_1.ifc")
TestFile("http |
ymap/aioredis | aioredis/commands/generic.py | Python | mit | 11,140 | 0 | from aioredis.util import wait_convert, wait_ok, _NOTSET, _ScanIter
class GenericCommandsMixin:
"""Generic commands mixin.
For commands details see: http://redis.io/commands/#generic
"""
def delete(self, key, *keys):
"""Delete a key."""
fut = self.execute(b'DEL', key, *keys)
return wait_convert(fut, int)
def dump(self, key):
"""Dump a key."""
retur | n self.execute(b'DUMP', key)
def exists(self, key, *keys):
"""Check if key(s) exists.
.. versionchanged:: v0.2.9
Accept multiple keys; **return** type **changed** from bool to int.
"""
return self.execute(b'EXISTS', key, *keys)
def expire(self, key, timeout):
"""Set a timeout on key.
if timeout is float it will be multiplied by 1000
coerc | ed to int and passed to `pexpire` method.
Otherwise raises TypeError if timeout argument is not int.
"""
if isinstance(timeout, float):
return self.pexpire(key, int(timeout * 1000))
if not isinstance(timeout, int):
raise TypeError(
"timeout argument must be int, not {!r}".format(timeout))
fut = self.execute(b'EXPIRE', key, timeout)
return wait_convert(fut, bool)
def expireat(self, key, timestamp):
"""Set expire timestamp on a key.
if timeout is float it will be multiplied by 1000
coerced to int and passed to `pexpireat` method.
Otherwise raises TypeError if timestamp argument is not int.
"""
if isinstance(timestamp, float):
return self.pexpireat(key, int(timestamp * 1000))
if not isinstance(timestamp, int):
raise TypeError("timestamp argument must be int, not {!r}"
.format(timestamp))
fut = self.execute(b'EXPIREAT', key, timestamp)
return wait_convert(fut, bool)
def keys(self, pattern, *, encoding=_NOTSET):
"""Returns all keys matching pattern."""
return self.execute(b'KEYS', pattern, encoding=encoding)
def migrate(self, host, port, key, dest_db, timeout, *,
copy=False, replace=False):
"""Atomically transfer a key from a Redis instance to another one."""
if not isinstance(host, str):
raise TypeError("host argument must be str")
if not isinstance(timeout, int):
raise TypeError("timeout argument must be int")
if not isinstance(dest_db, int):
raise TypeError("dest_db argument must be int")
if not host:
raise ValueError("Got empty host")
if dest_db < 0:
raise ValueError("dest_db must be greater equal 0")
if timeout < 0:
raise ValueError("timeout must be greater equal 0")
flags = []
if copy:
flags.append(b'COPY')
if replace:
flags.append(b'REPLACE')
fut = self.execute(b'MIGRATE', host, port,
key, dest_db, timeout, *flags)
return wait_ok(fut)
def migrate_keys(self, host, port, keys, dest_db, timeout, *,
copy=False, replace=False):
"""Atomically transfer keys from one Redis instance to another one.
Keys argument must be list/tuple of keys to migrate.
"""
if not isinstance(host, str):
raise TypeError("host argument must be str")
if not isinstance(timeout, int):
raise TypeError("timeout argument must be int")
if not isinstance(dest_db, int):
raise TypeError("dest_db argument must be int")
if not isinstance(keys, (list, tuple)):
raise TypeError("keys argument must be list or tuple")
if not host:
raise ValueError("Got empty host")
if dest_db < 0:
raise ValueError("dest_db must be greater equal 0")
if timeout < 0:
raise ValueError("timeout must be greater equal 0")
if not keys:
raise ValueError("keys must not be empty")
flags = []
if copy:
flags.append(b'COPY')
if replace:
flags.append(b'REPLACE')
flags.append(b'KEYS')
flags.extend(keys)
fut = self.execute(b'MIGRATE', host, port,
"", dest_db, timeout, *flags)
return wait_ok(fut)
def move(self, key, db):
"""Move key from currently selected database to specified destination.
:raises TypeError: if db is not int
:raises ValueError: if db is less than 0
"""
if not isinstance(db, int):
raise TypeError("db argument must be int, not {!r}".format(db))
if db < 0:
raise ValueError("db argument must be not less than 0, {!r}"
.format(db))
fut = self.execute(b'MOVE', key, db)
return wait_convert(fut, bool)
def object_refcount(self, key):
"""Returns the number of references of the value associated
with the specified key (OBJECT REFCOUNT).
"""
return self.execute(b'OBJECT', b'REFCOUNT', key)
def object_encoding(self, key):
"""Returns the kind of internal representation used in order
to store the value associated with a key (OBJECT ENCODING).
"""
# TODO: set default encoding to 'utf-8'
return self.execute(b'OBJECT', b'ENCODING', key)
def object_idletime(self, key):
"""Returns the number of seconds since the object is not requested
by read or write operations (OBJECT IDLETIME).
"""
return self.execute(b'OBJECT', b'IDLETIME', key)
def persist(self, key):
"""Remove the existing timeout on key."""
fut = self.execute(b'PERSIST', key)
return wait_convert(fut, bool)
def pexpire(self, key, timeout):
"""Set a milliseconds timeout on key.
:raises TypeError: if timeout is not int
"""
if not isinstance(timeout, int):
raise TypeError("timeout argument must be int, not {!r}"
.format(timeout))
fut = self.execute(b'PEXPIRE', key, timeout)
return wait_convert(fut, bool)
def pexpireat(self, key, timestamp):
"""Set expire timestamp on key, timestamp in milliseconds.
:raises TypeError: if timeout is not int
"""
if not isinstance(timestamp, int):
raise TypeError("timestamp argument must be int, not {!r}"
.format(timestamp))
fut = self.execute(b'PEXPIREAT', key, timestamp)
return wait_convert(fut, bool)
def pttl(self, key):
"""Returns time-to-live for a key, in milliseconds.
Special return values (starting with Redis 2.8):
* command returns -2 if the key does not exist.
* command returns -1 if the key exists but has no associated expire.
"""
# TODO: maybe convert negative values to:
# -2 to None - no key
# -1 to False - no expire
return self.execute(b'PTTL', key)
def randomkey(self, *, encoding=_NOTSET):
"""Return a random key from the currently selected database."""
return self.execute(b'RANDOMKEY', encoding=encoding)
def rename(self, key, newkey):
"""Renames key to newkey.
:raises ValueError: if key == newkey
"""
if key == newkey:
raise ValueError("key and newkey are the same")
fut = self.execute(b'RENAME', key, newkey)
return wait_ok(fut)
def renamenx(self, key, newkey):
"""Renames key to newkey only if newkey does not exist.
:raises ValueError: if key == newkey
"""
if key == newkey:
raise ValueError("key and newkey are the same")
fut = self.execute(b'RENAMENX', key, newkey)
return wait_convert(fut, bool)
def restore(self, key, ttl, value):
"""Creates a key associated with a value that is obtained via DUMP."""
return self.execute(b'RESTORE', key, ttl, value)
def scan(self, cursor=0, match=None, count=No |
polyaxon/polyaxon | platform/coreapi/polyaxon/apis/apps.py | Python | apache-2.0 | 1,334 | 0 | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# i | sort: skip_file
from django.apps import AppConfig
class APIsConfig(AppConfig):
name = "apis"
verbose_name = "APIs"
def ready(self):
from polycommon import conf
from polycommon import auditor
from coredb import executor, operations
from polycommon import query
conf.validate_and_setup()
query.validate_and_setup()
operations.validate_and_se | tup()
executor.validate_and_setup()
auditor.validate_and_setup()
import coredb.signals.runs # noqa
import polycommon.options.conf_subscriptions # noqa
from polycommon.events import auditor_subscriptions # noqa
from coredb.administration import register # noqa
|
USGSDenverPychron/pychron | pychron/processing/permutator/view.py | Python | apache-2.0 | 2,784 | 0 | # =============================== | ================================================
# Copyright 201 | 4 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from traits.api import HasTraits, Instance, List, Property
from traitsui.api import View, UItem, TabularEditor
# ============= standard library imports ========================
from numpy import array
from uncertainties import nominal_value
# ============= local library imports ==========================
from traitsui.tabular_adapter import TabularAdapter
from pychron.core.helpers.formatting import floatfmt
from pychron.pipeline.plot.editors.graph_editor import GraphEditor
class ResultsAdapter(TabularAdapter):
columns = [
("Identifier", "identifier"),
("Min (Ma)", "mi"),
("Max (Ma)", "ma"),
("Spread (Ma)", "spread"),
("Std.", "std"),
]
mi_text = Property
ma_text = Property
spread_text = Property
std_text = Property
def _get_mi_text(self):
return floatfmt(self.item.mi)
def _get_ma_text(self):
return floatfmt(self.item.ma)
def _get_spread_text(self):
return floatfmt(self.item.spread)
def _get_std_text(self):
return floatfmt(self.item.std)
class ResultRecord(object):
ma = 0
mi = 0
spread = 0
std = 0
identifier = ""
def __init__(self, records):
ages = array([nominal_value(ai.age) for ai in records])
self.mi = min(ages)
self.ma = max(ages)
self.std = ages.std()
self.identifier = records[0].identifier
self.spread = self.ma - self.mi
class PermutatorResultsView(HasTraits):
editor = Instance(GraphEditor)
results = List
def append_results(self, records):
self.results.append(ResultRecord(records))
def traits_view(self):
v = View(
UItem("editor", style="custom"),
UItem("results", editor=TabularEditor(adapter=ResultsAdapter())),
width=700,
height=600,
)
return v
# ============= EOF =============================================
|
GNOME/gnome-schedule | src/data.py | Python | gpl-2.0 | 3,607 | 0.013862 | # data.py: Contains the backend to the gconf database
# Copyright (C) 2004, 2005 Philip Van Hoof <me at pvanhoof dot be>
# Copyright (C) 2004 - 2009 Gaute Hope <eg at gaute dot vetsj dot com>
# Copyright (C) 2004, 2005 Kristof Vansant <de_lupus at pandora dot be>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either ver | sion 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public Lice | nse for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#pygtk modules
import gconf
#python modules
import os
#gnome-schedule
import config
class ConfigBackend:
def __init__(self, parent, type):
self.parent = parent
self.type = "gconf"
self.gconf_client = gconf.client_get_default()
self.gconf_client.add_dir ("/apps/gnome-schedule", gconf.CLIENT_PRELOAD_NONE)
self.gconf_client.notify_add ("/apps/gnome-schedule/advanced", self.on_gconfkey_advanced_changed)
def get_not_inform_working_dir (self):
if ((self.get_not_inform_working_dir_crontab () and self.get_not_inform_working_dir_at ()) or self.gconf_client.get_bool ("/apps/gnome-schedule/inform_working_dir")):
return True
else:
return False
def set_not_inform_working_dir (self, value):
self.gconf_client.set_bool ("/apps/gnome-schedule/inform_working_dir", value)
def get_not_inform_working_dir_crontab (self):
return self.gconf_client.get_bool ("/apps/gnome-schedule/inform_working_dir_crontab")
def set_not_inform_working_dir_crontab (self, value):
self.gconf_client.set_bool ("/apps/gnome-schedule/inform_working_dir_crontab", value)
def get_not_inform_working_dir_at (self):
return self.gconf_client.get_bool ("/apps/gnome-schedule/inform_working_dir_at")
def set_not_inform_working_dir_at (self, value):
self.gconf_client.set_bool ("/apps/gnome-schedule/inform_working_dir_at", value)
def set_window_state (self, x, y, height, width):
self.gconf_client.set_int ("/apps/gnome-schedule/x", x)
self.gconf_client.set_int ("/apps/gnome-schedule/y", y)
self.gconf_client.set_int ("/apps/gnome-schedule/height", height)
self.gconf_client.set_int ("/apps/gnome-schedule/width", width)
def get_window_state (self):
h = self.gconf_client.get_int ("/apps/gnome-schedule/height")
w = self.gconf_client.get_int ("/apps/gnome-schedule/width")
x = self.gconf_client.get_int ("/apps/gnome-schedule/x")
y = self.gconf_client.get_int ("/apps/gnome-schedule/y")
return x, y, h, w
def get_advanced_option(self):
return self.gconf_client.get_bool ("/apps/gnome-schedule/advanced")
def set_advanced_option(self,value):
self.gconf_client.set_bool ("/apps/gnome-schedule/advanced", value)
def on_gconfkey_advanced_changed (self, client, connection_id, entry, args):
val = self.gconf_client.get_bool ("/apps/gnome-schedule/advanced")
if val:
self.parent.switchView("advanced")
else:
self.parent.switchView("simple")
|
assisi/assisipy-examples | remote_sensors/spoke.py | Python | lgpl-3.0 | 3,590 | 0.0039 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
for all "spoke" CASUs, they emit when reading one specific directional
IR sensor.
set the direction that is sensed by the -d flag, from NESW
this is borrowed from `examples/targeted_messaging`, with server setup
that happens behind the scenes (i.e. from RTC files!)
'''
from assisipy import casu
import argparse, os
import time
class CasuController:
def __init__(self, rtc_file, direction, verb=False):
self.__casu = casu.Casu(rtc_file)
self._ctr_dir = direction
self.old_state = 'Off'
self.state = 'Off'
self.verb = verb
def stop(self):
self.__casu.stop()
def send_msg(self):
while True:
# North side => yellow
if self._ctr_dir == 'F' and self.__casu.get_range(casu.IR_F) < 2:
self.__casu.set_diagnostic_led_rgb(1, 1, 0, casu.DLED_TOP)
self.old_state = self.state
self.state = 'Yellow On'
# East => red
elif self._ctr_dir == 'R' and ((self.__casu.get_range(casu.IR_FR) <
| 2 or self.__casu.get_range(casu.IR_BR) < 2)):
self.__casu.set_diagnostic_led_rgb(1, 0, 0, casu.DLED_TOP)
self.old_state = self.state
self.state = 'Red On'
# South => blue
elif self._ctr_dir == 'S' and self.__casu.get_range(casu.IR_B) < 2:
self.__casu.set_diagnostic_led_rgb(0, 0, 1, casu.DLED_TOP)
self.old_state = self.state
self.state = 'Blue On'
# West => green
elif self._ctr_dir == 'W' and ((self.__casu.get_range(casu.IR_BR) <
2 or self.__casu.get_range(casu.IR_FR) < 2)):
self.__casu.set_diagnostic_led_rgb(0, 1, 0, casu.DLED_TOP)
self.old_state = self.state
self.state = 'Green On'
else:
self.__casu.diagnostic_led_standby(casu.DLED_TOP)
self.old_state = self.state
self.state = 'Off'
if self.old_state != self.state:
if self.old_state in ['Red On', 'Green On', 'Blue On',
'Yellow On']:
self.__casu.send_message('collector', 'Off')
if self.state in ['Red On', 'Green On', 'Blue On',
'Yellow On']:
self.__casu.send_message('collector', 'On')
def loop(self):
"""
Do some smart control stuff...
"""
while True:
self.send_msg()
time.sleep(0.5)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--rtc-path', type=str, default='',
help="location of RTC files to configure CASUs",)
parser.add_argument('-n', '--name', type=str, default='casu-ctr',
help="location of RTC files to configure CASUs",)
parser.add_argument('-v', '--verb', type=int, default=1,
help="verbosity level")
parser.add_argument('-d', '--dir', type=str, default='F',
help="direction of centre")
args = parser.parse_args()
fname = "{}.rtc".format(args.name)
rtc = os.path.join(args.rtc_path, fname)
print "connecting to casu {} ('{}')".format(args.name, rtc)
ctrl = CasuController(rtc_file=rtc, direction=args.dir, verb=args.verb)
try:
while True:
ctrl.loop()
except KeyboardInterrupt:
# cleanup
ctrl.stop()
| |
gersakbogdan/fsnd-conference | settings.py | Python | apache-2.0 | 494 | 0.002024 | #!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine | app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '54751868361-i018plbnbgq80kdro99rqk3qt12d07pk.apps.googleusercontent.com'
ANDROID_CLIENT_ID = 'replace with Android client ID'
IOS_CLIENT_ID = 'replace with iOS client ID'
ANDROID | _AUDIENCE = WEB_CLIENT_ID
|
cgstudiomap/cgstudiomap | main/local_modules/frontend_shop/__openerp__.py | Python | agpl-3.0 | 1,388 | 0 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) cgstudiomap <cgstudiomap@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Frontend Shop',
'version': 'beta',
'author': 'cgstudiomap',
'maintainer': 'cgstudiomap | ',
'license': 'AGPL-3',
'category': 'Web',
'summary': 'Shop Modules',
'depends': [
'website',
'website_m | enu_by_user_status',
],
'data': [
'templates/template_shop.xml',
'data/website_menus.xml',
],
'installable': True,
}
|
JetChars/vim | vim/bundle/python-mode/pymode/libs3/rope/base/stdmods.py | Python | apache-2.0 | 1,296 | 0.003086 | import os
import sys
from rope.base import utils
def _stdlib_path():
import inspect
return os.path.dirname(inspect.getsourcefile(inspect))
@utils.cached(1)
def standard_modules():
return python_modules() | dynload_modules()
@utils.cached(1)
def python_modules():
result = set()
lib_path = _stdlib_path()
if os.path.exists(lib_p | ath):
for name in os.listdir(lib_path):
path = os.path.join(lib_path, name)
if os.path.isdir(path):
if '-' not in name:
result.add(name)
else:
if name.endswith('.py'):
result.add(name[:-3])
return result
@utils.cached(1)
def dynload_modules():
result = set(sys.builtin_module_names)
dynload_path | = os.path.join(_stdlib_path(), 'lib-dynload')
if os.path.exists(dynload_path):
for name in os.listdir(dynload_path):
path = os.path.join(dynload_path, name)
if os.path.isfile(path):
if name.endswith('.so') or name.endswith('.dll'):
if "cpython" in name:
result.add(os.path.splitext(os.path.splitext(name)[0])[0])
else:
result.add(os.path.splitext(name)[0])
return result
|
geminy/aidear | oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/third_party/WebKit/Tools/Scripts/webkitpy/common/net/git_cl_unittest.py | Python | gpl-3.0 | 3,618 | 0.000553 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from webkitpy.common.net.git_cl import GitCL
from webkitpy.common.system.executive_mock import MockExecutive2
from webkitpy.common.host_mock import MockHost
class GitCLTest(unittest.TestCase):
de | f test_run(self):
host = MockHost()
host.executive = MockExecutive2(output='mock-output')
git_cl = GitCL(host)
output = git_cl.run(['command'])
| self.assertEqual(output, 'mock-output')
self.assertEqual(host.executive.calls, [['git', 'cl', 'command']])
def test_run_with_auth(self):
host = MockHost()
host.executive = MockExecutive2(output='mock-output')
git_cl = GitCL(host, auth_refresh_token_json='token.json')
git_cl.run(['upload'])
self.assertEqual(
host.executive.calls,
[['git', 'cl', 'upload', '--auth-refresh-token-json', 'token.json']])
def test_some_commands_not_run_with_auth(self):
host = MockHost()
host.executive = MockExecutive2(output='mock-output')
git_cl = GitCL(host, auth_refresh_token_json='token.json')
git_cl.run(['issue'])
self.assertEqual(host.executive.calls, [['git', 'cl', 'issue']])
def test_get_issue_number(self):
host = MockHost()
host.executive = MockExecutive2(output='Issue number: 12345 (http://crrev.com/12345)')
git_cl = GitCL(host)
self.assertEqual(git_cl.get_issue_number(), '12345')
def test_get_issue_number_none(self):
host = MockHost()
host.executive = MockExecutive2(output='Issue number: None (None)')
git_cl = GitCL(host)
self.assertEqual(git_cl.get_issue_number(), 'None')
def test_all_jobs_finished_empty(self):
self.assertTrue(GitCL.all_jobs_finished([]))
def test_all_jobs_finished_with_started_jobs(self):
self.assertFalse(GitCL.all_jobs_finished([
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'FAILURE',
},
{
'builder_name': 'some-builder',
'status': 'STARTED',
'result': None,
},
]))
def test_all_jobs_finished_only_completed_jobs(self):
self.assertTrue(GitCL.all_jobs_finished([
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'FAILURE',
},
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'SUCCESS',
},
]))
def test_has_failing_try_results_empty(self):
self.assertFalse(GitCL.has_failing_try_results([]))
def test_has_failing_try_results_only_success_and_started(self):
self.assertFalse(GitCL.has_failing_try_results([
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'SUCCESS',
},
{
'builder_name': 'some-builder',
'status': 'STARTED',
'result': None,
},
]))
def test_has_failing_try_results_with_failing_results(self):
self.assertTrue(GitCL.has_failing_try_results([
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'FAILURE',
},
]))
|
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.4/Lib/test/test_optparse.py | Python | mit | 57,004 | 0.001281 | #!/usr/bin/python
#
# Test suite for Optik. Supplied by Johannes Gijsbers
# (taradino@softhome.net) -- translated from the original Optik
# test suite to this PyUnit-based version.
#
# $Id: test_optparse.py,v 1.10 2004/10/27 02:43:25 tim_one Exp $
#
import sys
import os
import copy
import unittest
from cStringIO import StringIO
from pprint import pprint
from test import test_support
from optparse import make_option, Option, IndentedHelpFormatter, \
TitledHelpFormatter, OptionParser, OptionContainer, OptionGroup, \
SUPPRESS_HELP, SUPPRESS_USAGE, OptionError, OptionConflictError, \
BadOptionError, OptionValueError, Values, _match_abbrev
# Do the right thing with boolean values for all known Python versions.
try:
True, False
except NameError:
(True, False) = (1, 0)
class InterceptedError(Exception):
def __init__(self,
error_message=None,
exit_status=None,
exit_message=None):
self.error_message = error_message
self.exit_status = exit_status
self.exit_message = exit_message
def __str__(self):
return self.error_message or self.exit_message or "intercepted error"
class InterceptingOptionParser(OptionParser):
def exit(self, status=0, msg=None):
raise InterceptedError(exit_status=status, exit_message=msg)
def error(self, msg):
raise InterceptedError(error_message=msg)
class BaseTest(unittest.TestCase):
def assertParseOK(self, args, expected_opts, expected_positional_args):
"""Assert the options are what we expected when parsing arguments.
Otherwise, fail with a nicely formatted message.
Keyword arguments:
args -- A list of arguments to parse with OptionParser.
expected_opts -- The options expected.
expected_positional_args -- The positional arguments expected.
Returns the options and positional args for further testing.
"""
(options, positional_args) = self.parser.parse_args(args)
optdict = vars(options)
self.assertEqual(optdict, expected_opts,
"""
Options are %(optdict)s.
Should be %(expected_opts)s.
Args were %(args)s.""" % locals())
self.assertEqual(positional_args, expected_positional_args,
"""
Positional arguments are %(positional_args)s.
Should be %(expected_positional_args)s.
Args were %(args)s.""" % locals ())
return (options, positional_args)
def assertRaises(self,
func,
args,
kwargs,
expected_exception,
expected_message):
"""
Assert that the expected exception is raised when calling a
function, and that the right error message is included with
that exception.
Arguments:
func -- the function to call
args -- positional arguments to `func`
kwargs -- keyword arguments to `func`
expected_exception -- exception that should be raised
expected_output -- output we expect to see
Returns the exception raised for further testing.
"""
if args is None:
args = ()
if kwargs is None:
kwargs = {}
try:
func(*args, **kwargs)
except expected_exception, err:
actual_message = str(err)
self.assertEqual(actual_message,
expected_message,
"""\
expected exception message:
'''%(expected_message)s'''
actual exception message:
'''%(actual_message)s'''
""" % locals())
return err
else:
self.fail("""expected exception %(expected_exception)s not raised
called %(func)r
with args %(args)r
and kwargs %(kwargs)r
""" % locals ())
# -- Assertions used in more than one class --------------------
def assertParseFail(self, cmdline_args, expected_output):
"""
Assert the parser fails with the expected message. Caller
must ensure that self.parser is an InterceptingOptionParser.
"""
try:
self.parser.parse_args(cmdline_args)
except InterceptedError, err:
self.assertEqual(err.error_message, expected_output)
else:
self.assertFalse("expected parse failure")
def assertOutput(self,
cmdline_args,
expected_output,
expected_status=0,
expected_error=None):
"""Assert the parser prints the expected output on stdout."""
save_stdout = sys.stdout
try:
try:
sys.stdout = StringIO()
self.parser.parse_args(cmdline_args)
finally:
output = sys.stdout.getvalue()
sys.stdout = save_stdout
except InterceptedError, err:
self.assertEqual(output, expected_output)
self.assertEqual(err.exit_status, expected_status)
self.assertEqual(err.exit_message, expected_error)
else:
self.assertFalse("expected parser.exit()")
def assertTypeError(self, func, expected_message, *args):
"""Assert that TypeError is raised when executing func."""
self.assertRaises(func, args, None, TypeError, expected_message)
def assertHelp(self, parser, expected_help):
actual_help = parser.format_help()
if actual_help != | expected_help:
raise self.failureException(
'help text failure; expected:\n"' +
expected_help + '"; got:\n"' +
actual_help + '"\n')
# -- Test make_option() aka Option ---- | ---------------------------------
# It's not necessary to test correct options here. All the tests in the
# parser.parse_args() section deal with those, because they're needed
# there.
class TestOptionChecks(BaseTest):
def setUp(self):
self.parser = OptionParser(usage=SUPPRESS_USAGE)
def assertOptionError(self, expected_message, args=[], kwargs={}):
self.assertRaises(make_option, args, kwargs,
OptionError, expected_message)
def test_opt_string_empty(self):
self.assertTypeError(make_option,
"at least one option string must be supplied")
def test_opt_string_too_short(self):
self.assertOptionError(
"invalid option string 'b': must be at least two characters long",
["b"])
def test_opt_string_short_invalid(self):
self.assertOptionError(
"invalid short option string '--': must be "
"of the form -x, (x any non-dash char)",
["--"])
def test_opt_string_long_invalid(self):
self.assertOptionError(
"invalid long option string '---': "
"must start with --, followed by non-dash",
["---"])
def test_attr_invalid(self):
self.assertOptionError(
"option -b: invalid keyword arguments: foo, bar",
["-b"], {'foo': None, 'bar': None})
def test_action_invalid(self):
self.assertOptionError(
"option -b: invalid action: 'foo'",
["-b"], {'action': 'foo'})
def test_type_invalid(self):
self.assertOptionError(
"option -b: invalid option type: 'foo'",
["-b"], {'type': 'foo'})
self.assertOptionError(
"option -b: invalid option type: 'tuple'",
["-b"], {'type': tuple})
def test_no_type_for_action(self):
self.assertOptionError(
"option -b: must not supply a type for action 'count'",
["-b"], {'action': 'count', 'type': 'int'})
def test_no_choices_list(self):
self.assertOptionError(
"option -b/--bad: must supply a list of "
"choices for type 'choice'",
["-b", "--bad"], {'type': "choice"})
def test_bad_choices_list(self):
typename = type('').__name__
self.assertOptionError(
"option -b/--bad: choices must be a list of "
"strin |
pelson/conda-build-all | conda_build_all/tests/unit/test_artefact_destination.py | Python | bsd-3-clause | 5,716 | 0.002274 | from argparse import Namespace
from contextlib import contextmanager
import logging
import mock
import os
import sys
import unittest
from conda_build_all.tests.unit.dummy_index import DummyIndex, DummyPackage
from conda_build_all.artefact_destination import (ArtefactDestination,
AnacondaClientChannelDest)
import conda_build_all.artefact_destination
class Test_AnacondaClientChannelDest(unittest.TestCase):
# These tests make extensive use of mock to avoid the need to contact the
# conda.anaconda.org server.
# Integration tests which do use the server are available for inspect_binstar.
def setUp(self):
self.logger_patch = mock.patch('conda_build_all.artefact_destination.log')
self.logger = self.logger_patch.start()
def tearDown(self):
self.logger_patch.stop()
@contextmanager
def dist_exists_setup(self, on_owner, on_channel):
dist | _exists = mock.patch('conda_build_all.inspect_binstar.distribution_exists', return_value=on_owner)
d | ist_exists_on_channel = mock.patch('conda_build_all.inspect_binstar.distribution_exists_on_channel', return_value=on_channel)
with dist_exists:
with dist_exists_on_channel:
yield
def test_not_already_available_not_just_built(self):
client, owner, channel = [mock.sentinel.client, mock.sentinel.owner,
mock.sentinel.channel]
ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel)
ad._cli = client
meta = DummyPackage('a', '2.1.0')
with self.dist_exists_setup(on_owner=True, on_channel=False):
with mock.patch('conda_build_all.inspect_binstar.add_distribution_to_channel') as add_to_channel:
ad.make_available(meta, mock.sentinel.dist_path,
just_built=False)
add_to_channel.assert_called_once_with(client, owner, meta, channel=channel)
self.logger.info.assert_called_once_with('Adding existing a-0.0-0 to the sentinel.owner/sentinel.channel channel.')
def test_not_already_available_just_built(self):
client, owner, channel = [mock.sentinel.client, mock.sentinel.owner,
mock.sentinel.channel]
ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel)
ad._cli = client
meta = DummyPackage('a', '2.1.0')
with self.dist_exists_setup(on_owner=False, on_channel=False):
with mock.patch('conda_build_all.build.upload') as upload:
ad.make_available(meta, mock.sentinel.dist_path,
just_built=True)
upload.assert_called_once_with(client, meta, owner, channels=[channel])
self.logger.info.assert_called_once_with('Uploading a to the sentinel.channel channel.')
def test_already_available_not_just_built(self):
# Note, we exercise the use of get_binstar here too.
client, owner, channel = [mock.sentinel.client, mock.sentinel.owner,
mock.sentinel.channel]
ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel)
meta = DummyPackage('a', '2.1.0')
with self.dist_exists_setup(on_owner=True, on_channel=True):
with mock.patch('binstar_client.utils.get_binstar') as get_binstar:
ad.make_available(meta, mock.sentinel.dist_path, just_built=False)
get_binstar.assert_called_once_with(Namespace(site=None, token=mock.sentinel.token))
# Nothing happens, we just get a message.
self.logger.info.assert_called_once_with('Nothing to be done for a - it is already on sentinel.owner/sentinel.channel.')
def test_already_available_not_just_built(self):
client, owner, channel = [mock.sentinel.client, mock.sentinel.owner,
mock.sentinel.channel]
ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel)
ad._cli = client
meta = DummyPackage('a', '2.1.0')
with self.dist_exists_setup(on_owner=True, on_channel=True):
ad.make_available(meta, mock.sentinel.dist_path, just_built=True)
# Nothing happens, we just get a message.
self.logger.warn.assert_called_once_with("Assuming the distribution we've just built and the one on sentinel.owner/sentinel.channel are the same.")
def test_already_available_elsewhere(self):
client, owner, channel = [mock.sentinel.client, mock.sentinel.owner,
mock.sentinel.channel]
ad = AnacondaClientChannelDest(mock.sentinel.token, owner, channel)
ad._cli = client
meta = DummyPackage('a', '2.1.0')
with self.dist_exists_setup(on_owner=False, on_channel=False):
with self.assertRaises(NotImplementedError):
ad.make_available(meta, mock.sentinel.dist_path, just_built=False)
def test_from_spec_owner(self):
spec = 'testing'
os.environ['BINSTAR_TOKEN'] = 'a test token'
dest = AnacondaClientChannelDest.from_spec(spec)
self.assertEqual(dest.token, 'a test token')
self.assertEqual(dest.owner, 'testing')
self.assertEqual(dest.channel, 'main')
def test_from_spec_owner_and_channel(self):
spec = 'testing_owner/channels/my_channel'
os.environ['BINSTAR_TOKEN'] = 'a test token'
dest = AnacondaClientChannelDest.from_spec(spec)
self.assertEqual(dest.token, 'a test token')
self.assertEqual(dest.owner, 'testing_owner')
self.assertEqual(dest.channel, 'my_channel')
if __name__ == '__main__':
unittest.main()
|
gloryofrobots/obin | arza/misc/strutil.py | Python | gpl-2.0 | 5,908 | 0.001523 | from arza.misc.platform import (runicode, rarithmetic, rstring)
from arza.runtime import error
from arza.types import api, space
def get_line(string, line_no):
index = -1
for _ in range(line_no - 1):
index = string.index('\n', index + 1)
try:
last_index = string.index('\n', index + 1)
result = string[index + 1:last_index]
except ValueError:
result = string[index + 1:]
result = result.lstrip()
return unicode(result)
def get_line_for_position(string, pos):
try:
index = string.index('\n', pos + 1)
result = string[pos: index]
except:
result = string[pos:]
return unicode(result)
def is_quoted_string(string):
if string.startswith('"') and string.endswith('"'):
return True
if string.startswith("'") and string.endswith("'"):
return True
return False
def string_to_int(string):
return int(string)
def cat_both_ends(string):
if len(string) < 2:
return None
return string[1:len(string) - 1]
def unquote_w(w):
return space.newstring_s(unquote_s(api.to_s(w)))
def unquote_s(string):
s = string
if s.startswith('"""'):
assert s.endswith('"""')
s = s[:-3]
s = s[3:]
elif s.startswith('"'):
assert s.endswith('"')
s = s[:-1]
s = s[1:]
elif s.startswith("'"):
assert s.endswith("'")
s = s[:-1]
s = s[1:]
return s
def decode_str_utf8(string):
assert isinstance(string, str)
result, consumed = runicode.str_decode_utf_8(string, len(string), "strict", True)
return result
def encode_unicode_utf8(string):
assert isinstance(string, unicode)
result = runicode.unicode_encode_utf_8(string, len(string), None)
return result
def decode_unicode_escape(string):
assert isinstance(string, str)
result, consumed = runicode.str_decode_unicode_escape(string, len(string), "strict", True)
return result
def unescape_errorhandler(errors, encoding, msg, s, startingpos, endingpos):
start = startingpos + 1
assert start > 0
assert endingpos > 0
res = s[start:endingpos]
return res, endingpos
# based on pypy.rlib.runicode str_decode_unicode_escape
def unicode_unescape(string):
assert isinstance(string, unicode)
s = string
size = len(string)
errorhandler = unescape_errorhandler
errors = 'strict'
if size == 0:
return u''
builder = rstring.UnicodeBuilder(size)
pos = 0
while pos < size:
ch = s[pos]
# Non-escape characters are interpreted as Unicode ordinals
if ch != '\\':
builder.append(unichr(ord(ch)))
pos += 1
continue
# - Escapes
pos += 1
if pos >= size:
raise RuntimeError(u"\\ at end of string")
ch = s[pos]
pos += 1
# \x escapes
if ch == '\n':
pass
elif ch == '\\':
builder.append(u'\\')
elif ch == '\'':
builder.append(u'\'')
elif ch == '\"':
builder.append(u'\"')
elif ch == 'b':
builder.append(u'\b')
elif ch == 'f':
builder.append(u'\f')
elif ch == 't':
| builder.append(u'\t')
elif ch == 'n':
builder.append(u'\n')
elif ch == 'r':
builder.append(u'\r')
elif ch == 'v':
builder.append(u'\v')
elif ch == 'a':
builder.append(u'\a')
elif '0' <= ch <= '7':
x = ord(ch) - ord('0')
if pos < size:
ch = s[pos]
if '0' <= ch <= '7':
pos += 1
x | = (x << 3) + ord(ch) - ord('0')
if pos < size:
ch = s[pos]
if '0' <= ch <= '7':
pos += 1
x = (x << 3) + ord(ch) - ord('0')
builder.append(unichr(x))
# hex escapes
# \xXX
elif ch == 'x':
digits = 2
message = "truncated \\xXX escape"
pos = hexescape(builder, s, pos, digits, "unicodeescape", errorhandler, message, errors)
# \uXXXX
elif ch == 'u':
digits = 4
message = "truncated \\uXXXX escape"
pos = hexescape(builder, s, pos, digits, "unicodeescape", errorhandler, message, errors)
else:
# builder.append(u'\\')
builder.append(unichr(ord(ch)))
return builder.build()
hexdigits = "0123456789ABCDEFabcdef"
def hexescape(builder, s, pos, digits, encoding, errorhandler, message, errors):
chr = 0
if pos + digits > len(s):
message = "end of string in escape sequence"
res, pos = errorhandler(errors, "unicodeescape", message, s, pos - 2, len(s))
builder.append(res)
else:
try:
chr = rarithmetic.r_uint(int(str(s[pos:pos + digits]), 16))
except ValueError:
endinpos = pos
while s[endinpos] in hexdigits:
endinpos += 1
res, pos = errorhandler(errors, encoding, message, s, pos - 2, endinpos + 1)
builder.append(res)
else:
# when we get here, chr is a 32-bit unicode character
if chr <= runicode.MAXUNICODE:
builder.append(runicode.UNICHR(chr))
pos += digits
elif chr <= 0x10ffff:
chr -= 0x10000L
builder.append(unichr(0xD800 + (chr >> 10)))
builder.append(unichr(0xDC00 + (chr & 0x03FF)))
pos += digits
else:
message = "illegal Unicode character"
res, pos = errorhandler(errors, encoding, message, s, pos - 2, pos + digits)
builder.append(res)
return pos
|
WoLpH/zfs-utils-osx | zfs_utils_osx/zpool.py | Python | bsd-3-clause | 3,142 | 0.000318 | import sys
import subprocess
import textwrap
import decimal
from . import constants
from . import utils
from . import argparse_utils
def zpool_command(args):
context = vars(args)
effective_image_count = constants.ZPOOL_TYPES[args.type](args.count)
context['image_size'] = args.size / effective_image_count
context['physical_size'] = context['image_size'] * args.count
context['effective_size'] = context['image_size'] * effective_image_count
context['prefix'] %= context
cont | ext['postfix'] %= context
context['i'] = 0
context['name'] = constants.IMAGE_NAME % context
context['extra_args'] = ''
print textwrap.fill(constants.ZPOOL_CREATE_MESSAGE % context)
devices = []
for i in range(args.count):
context['i'] = i
context['name'] = constants.IMAGE_NAME % context
try:
| if args.overwrite:
arg = '-ov'
else:
arg = ''
utils.execute(context, constants.ZPOOL_CREATE_IMAGE_COMMAND, arg)
except subprocess.CalledProcessError:
print 'Unable to create a new image'
sys.exit(1)
try:
context['name'] += '.sparseimage'
device = utils.execute(context,
constants.ZPOOL_ATTACH_IMAGE_COMMAND)
if device:
devices.append(device.strip())
except subprocess.CalledProcessError:
print 'Unable to attach image'
sys.exit(1)
if devices:
context['devices'] = ' '.join(devices)
context['mountpoint'] %= context
utils.execute(context, constants.ZPOOL_CREATE_COMMAND)
def get_parser(subparsers):
zpool = subparsers.add_parser('zpool', help='zpool creation')
zpool.add_argument(
'-c', '--count', default=3,
type=lambda s: argparse_utils.greater_than(s, int, 1),
help='The amount of images to use (default: %(default)s)')
zpool.add_argument(
'-s', '--size', default=10,
type=lambda s: argparse_utils.greater_than(s, decimal.Decimal, 0),
help='The usable size of the zpool in GiB (default: %(default)sGiB)')
zpool.add_argument(
'-t', '--type', choices=constants.ZPOOL_TYPES, default='raidz',
help='The zpool type to use (default: %(default)s)')
zpool.add_argument(
'-n', '--no-op', '--dry-run', action='store_true',
help='Show what will be done but dont execute')
zpool.add_argument(
'-m', '--mountpoint', default='~/%(pool_name)s',
help='Where should the disk be mounted (default: %(default)s')
zpool.add_argument(
'-o', '--overwrite', action='store_true',
help='Overwrite old images if they exist')
zpool.add_argument('pool_name', help='The name of the pool to create')
zpool.add_argument(
'-p', '--prefix', default='%(pool_name)s_',
help='File name prefix for the images (default: %(default)s)')
zpool.add_argument(
'--postfix', default='',
help='File name postfix for the images (default: %(default)s)')
zpool.set_defaults(func=zpool_command)
|
0--key/lib | portfolio/Python/scrapy/tigerchef/tigerchefspider.py | Python | apache-2.0 | 2,126 | 0.006585 | from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.respo | nse import get_base_url
from scrapy.utils.url import urljoin_rfc
from product_spiders.items import Product, ProductLoader
class TigerChefSpider(BaseSpider):
name = 'tigerchef.com'
allowed_domains = ['tigerchef.com']
start_urls = ('http://www.tigerchef.com',)
def parse(self, respon | se):
hxs = HtmlXPathSelector(response)
#categories = hxs.select('//div[@class="sidebar_nav"]//li/a/@href').extract()
categories = hxs.select('//div[@class="navigation"]/ul/li/a/@href').extract()
categories += hxs.select('//ul[@class="cl_subs"]//a/@href').extract()
loaded = False
for category in categories:
loaded = True
yield Request(category)
next_page = hxs.select('//a[@rel="next"]/@href').extract()
if next_page:
base_url = get_base_url(response)
loaded = True
yield Request(urljoin_rfc(base_url, next_page[0]))
products = [product for product in self.parse_products(hxs)]
for product in products:
yield product
if (not products or not loaded) and response.meta.get('retries', 0) < 3:
yield Request(response.url, dont_filter=True,
meta={'retries': response.meta.get('retries', 0) + 1})
def parse_products(self, hxs):
products = hxs.select('//div[starts-with(@id, "product_")]')
for product in products:
product_loader = ProductLoader(Product(), product)
product_loader.add_xpath('url', './/span[@class="description"]/a/@href')
product_loader.add_xpath('name', './/span[@class="description"]/a/b/text()')
#product_loader.add_xpath('price', './/label/text()')
product_loader.add_xpath('price', './/div[@class="our_price"]/text()')
product_loader.add_xpath('sku', './/span[@class="description"]', re='Model #:[\s(]*([\S^)]*)')
yield product_loader.load_item()
|
chris-barry/i2py | i2py/control/pyjsonrpc/rpcjson.py | Python | mit | 1,916 | 0.017745 | #!/usr/bin/env python
# coding: utf-8
import json as _json
JsonParseError = ValueError
# Default-Parameters for the *dumps*-function
dumps_skipkeys = False
dumps_ensure_ascii = True
dumps_check_circular = True
dumps_allow_nan = True
dumps_cls = None
dumps_indent = None
dumps_separators = None
dumps_encoding = "utf-8"
dumps_default = None
dumps_sort_keys = False
# Default-Parameters for the *loads*-function
loads_encoding = None
loads_cls = None
loads_object_hook = None
loads_parse_float = None
loads_parse_int = None
loads_parse_constant = None
loads_object_pairs_hook = None
def dumps(ob | j):
"""
Replacement function for *json.dumps*
Uses the predefined default settings.
"""
return _json.dumps(
obj,
skipkeys = dumps_skipkeys,
ensure_ascii = dumps_ensure_ascii,
check_ci | rcular = dumps_check_circular,
allow_nan = dumps_allow_nan,
cls = dumps_cls,
indent = dumps_indent,
separators = dumps_separators,
encoding = dumps_encoding,
default = dumps_default,
sort_keys = dumps_sort_keys
)
def loads(s):
"""
Replacement function for *json.loads*
Uses the predefined default settings.
"""
return _json.loads(
s,
encoding = loads_encoding,
cls = loads_cls,
object_hook = loads_object_hook,
parse_float = loads_parse_float,
parse_int = loads_parse_int,
parse_constant = loads_parse_constant,
object_pairs_hook = loads_object_pairs_hook
)
########################
# OLD IMPORTS
########################
# try:
# import jsonlib2 as json
# JsonParseError = json.ReadError
# except ImportError:
# try:
# import simplejson as json
# JsonParseError = json.JSONDecodeError
# except ImportError:
# import json
# JsonParseError = ValueError
########################
|
lnls-fac/sirius | pymodels/TS_V03_03/__init__.py | Python | mit | 474 | 0.00211 |
from .lattice import default_optics_mode
from .lattice import energy
from .accelerator import default_vchamber_on
from .accelerator import default_radiation_on
from .accelerator import accelerator_data
from .accelerator import create_accelerator
from .families import get_family_data
from .families import family_mapping
from .families import get_section_name_mapping
# -- default accelerator values for TS_V03 --
lattice_version = accelerator_dat | a['lattice_version']
| |
n3wb13/OpenNfrGui-5.0-1 | lib/python/Plugins/Extensions/NFR4XBoot/ubi_reader/ubifs/__init__.py | Python | gpl-2.0 | 2,189 | 0.000914 | import re
import struct
from ubifs.defines import *
from ubifs import nodes
from ubifs.nodes imp | ort extract
from ubifs.log import log
class ubifs:
def __init__(self, ubifs_file):
self.log = log()
self._file = ubifs_file
self._sb_node = extract.sb_node(self, UBIFS_COMMON_HDR_SZ)
self._min_io_size = self._sb_node.min_io_size
self._leb_size = self._sb_node.leb_size
self._mst_node = extract.mst_node(self, 1, UBIFS_COMMON_HDR_SZ)
self._mst_node = extract.mst_node(self, 2, UBIFS_COMMON_HDR_SZ)
def _get_file(self):
| return self._file
file = property(_get_file)
def _get_superblock(self):
return self._sb_node
superblock_node = property(_get_superblock)
def _get_master_node(self):
return self._mst_node
master_node = property(_get_master_node)
def _get_master_node2(self):
return self._mst_node
master_node2 = property(_get_master_node2)
def _get_leb_size(self):
return self._leb_size
leb_size = property(_get_leb_size)
def _get_min_io_size(self):
return self._min_io_size
min_io_size = property(_get_min_io_size)
def get_leb_size(path):
f = open(path, 'rb')
f.seek(0, 2)
file_size = f.tell() + 1
f.seek(0)
block_size = 0
for i in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
for m in re.finditer(UBIFS_NODE_MAGIC, buf):
start = m.start()
chdr = nodes.common_hdr(buf[start:start + UBIFS_COMMON_HDR_SZ])
if chdr and chdr.node_type == UBIFS_SB_NODE:
sb_start = start + UBIFS_COMMON_HDR_SZ
sb_end = sb_start + UBIFS_SB_NODE_SZ
if chdr.len != len(buf[sb_start:sb_end]):
f.seek(sb_start)
buf = f.read(UBIFS_SB_NODE_SZ)
else:
buf = buf[sb_start:sb_end]
sbn = nodes.sb_node(buf)
block_size = sbn.leb_size
f.close()
return block_size
f.close()
return block_size |
tballas/IRC2LCD | Python/IRC2LCD.py | Python | mit | 2,965 | 0.030691 | #! /usr/bin/env python
#
# IRC2LCD
# Tim Ballas
#
"""IRC bot to display mentions on an LCD through a Parallax Propeller.
Usage: IRCbot2LCD.py <server[:port]> <channel> <nicknameToMonitor> <COMport> <optional bot nickname>
"""
#
# Modified from:
# Example program using irc.bot.
# Joel Rosdahl <joel@rosdahl.net>
#
import irc.bot
import irc.strings
from irc.client import ip_numstr_to_quad, ip_quad_to_numstr
import re
import serial
import time
class IRC2LCDbot(irc.bot.SingleServerIRCBot):
def __init__(self, channel, nickname, server, port=6667):
irc.bot.SingleServerIRCBot.__init__(self, [(server, port)], nickname, nickname)
self.channel = channel
def on_nicknameinuse(self, c, e):
c.nick(BotNick)
def on_welcome(self, c, e | ):
c.join(self.channel)
def on_pubmsg(self, c, e):
pubmsgTemp = e.arguments[0] # e.arguments[0] is the public message we are proce | ssing, loaded into "pubmsgTemp"
pattern = re.compile(r'(.*{0}([|_][a-z0-9]+)?(\s|$).*|.*{1}([|_][a-z0-9]+)?:.*)'.format(MonitorNick,MonitorNick)) # Compile Regular Expression to check if the public message has our MonitorNick in it
result = re.search(pattern, pubmsgTemp) # Execute Regular Expression
if result: # Check to see if we matched our MonitorNick in the public message
try: # Handle error when result has 'None' in it
print result.group(1) # Print matched message to the console
MatchedMessage = str(result.group(1)) # Load matched message into "MatchedMessage" variable. Enclosing it in "str()" is to return a nice printable string.
ser.write("\r\t" + MatchedMessage) # Write "MatchedMessage" to LCD through Parallax Propeller over Serial connection. "\r\t" is command for Propeller to Clear LCD.
except: # Needed to complete 'try:' statement
pass # Do nothing and move on
def main():
import sys
if len(sys.argv) < 5:
print("Usage: IRCbot2LCD.py <server[:port]> <channel> <nicknameToMonitor> <COMport> <optional bot nickname>")
sys.exit(1)
s = sys.argv[1].split(":", 1)
server = s[0]
if len(s) == 2:
try:
port = int(s[1])
except ValueError:
print("Error: Erroneous port.")
sys.exit(1)
else:
port = 6667
channel = sys.argv[2]
nickname = sys.argv[3]
COMport = sys.argv[4]
global BotNick # Declare global variable for "BotNick"
if len(sys.argv) == 6: # If there is a argument defined for "BotNick"
BotNick = sys.argv[5] # Set "BotNick" to Argument 5(sys.argv[5])
else: # Else
BotNick = nickname + "_" # Use nickname to monitor and an underscore
global MonitorNick # Declare global variable for "MonitorNick"
MonitorNick = nickname # Set "MonitorNick" to nickname(sys.argv[3])
global ser # Declare global variable for "ser"
ser = serial.Serial(str(COMport),baudrate=9600) # Set "ser" to Serial object
bot = IRC2LCDbot(channel, nickname, server, port) # Set "bot" to IRC2LCDbot object
bot.start() # Start bot
ser.close() # Closing Serial port will prevent problems
if __name__ == "__main__":
main()
|
newhouseb/MatTex | mattex.py | Python | mit | 582 | 0.012027 | #!/usr/bin/env python
import sys, re
output = open(sys.argv[1] | )
output = output.read()
output = re.split('thisisalinebreak =',output)
f = open(sys.argv[2])
i = 1
matlab = False
for line in f:
if line == "<?ml\n":
matlab = True
j = 0
for oline in output[i].split('\n'):
if (j > 2) & (re.match('^(\s+[^\s]+|[^=]+)$',oline) != None):
if oline.strip() != '':
print oline
j += 1
i += 1
if line == "?>\n":
matlab = False
continue
if not | matlab:
print line,
|
tsheasha/fullerite | src/diamond/collectors/vmstat/vmstat.py | Python | apache-2.0 | 1,796 | 0.001114 | # coding=utf-8
"""
Uses /proc/vmstat to collect data on virtual memory manager
#### Dependencies
* /proc/vmstat
"""
import diamond.collector
import os
import re
class VMStatCollector(diamond.collector.Collector):
PROC = '/proc/vmstat'
MAX_VALUES = {
'pgpgin': diamond.collector.MAX_COUNTER,
'pgpgout': diamond.collector.MAX_COUNTER,
'pswpin': diamond.collector.MAX_COUNTER,
'pswpout': diamond.collector.MAX_COUNTER,
'pgmajfault': diamond.collector.MAX_COUNTER,
}
def get_default_config_help(self):
config_help = super(VMStatCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_ | default_config(self):
"""
Returns the default collector settings
"""
config = super(VMStatCollector, self).get_default_config()
config.update({
'path': 'vmstat'
})
return config
def collect(self):
if not os.acce | ss(self.PROC, os.R_OK):
return None
results = {}
# open file
file = open(self.PROC)
exp = '^(pgpgin|pgpgout|pswpin|pswpout|pgmajfault)\s(\d+)'
reg = re.compile(exp)
# Build regex
for line in file:
match = reg.match(line)
if match:
name = match.group(1)
value = match.group(2)
results[name] = self.derivative(name,
int(value),
self.MAX_VALUES[name])
# Close file
file.close()
for key, value in results.items():
metric_name = '.'.join(['vm', key])
self.publish(metric_name, value, precision=2)
|
xcgd/auth_saml | model/auth_saml.py | Python | agpl-3.0 | 2,907 | 0 | # -*- encoding: utf-8 -*-
from openerp.osv import fields
from openerp.osv import osv
import lasso
import simplejson
class auth_saml_provider(osv.osv):
"""Class defining the configuration values of an Saml2 provider"""
_name = 'auth.saml.provider'
_description = 'SAML2 provider'
_order = 'name'
| def _get_lasso_for_provider(self, | cr, uid, provider_id, context=None):
"""internal helper to get a configured lasso.Login object for the
given provider id"""
provider = self.browse(cr, uid, provider_id, context=context)
# TODO: we should cache those results somewhere because it is
# really costly to always recreate a login variable from buffers
server = lasso.Server.newFromBuffers(
provider.sp_metadata,
provider.sp_pkey
)
server.addProviderFromBuffer(
lasso.PROVIDER_ROLE_IDP,
provider.idp_metadata
)
return lasso.Login(server)
def _get_matching_attr_for_provider(
self, cr, uid, provider_id, context=None
):
"""internal helper to fetch the matching attribute for this SAML
provider. Returns a unicode object.
"""
provider = self.browse(cr, uid, provider_id, context=context)
return provider.matching_attribute
def _get_auth_request(self, cr, uid, id_, state, context=None):
"""build an authentication request and give it back to our client
WARNING: this method cannot be used for multiple ids
"""
login = self._get_lasso_for_provider(cr, uid, id_, context=context)
# ! -- this is the part that MUST be performed on each call and
# cannot be cached
login.initAuthnRequest()
login.request.nameIdPolicy.format = None
login.request.nameIdPolicy.allowCreate = True
login.msgRelayState = simplejson.dumps(state)
login.buildAuthnRequestMsg()
# msgUrl is a fully encoded url ready for redirect use
# obtained after the buildAuthnRequestMsg() call
return login.msgUrl
_columns = {
# Name of the OAuth2 entity, authentic, xcg...
'name': fields.char('Provider name'),
'idp_metadata': fields.text('IDP Configuration'),
'sp_metadata': fields.text('SP Configuration'),
'sp_pkey': fields.text(
'Private key of our service provider (this openerpserver)'
),
'matching_attribute': fields.text('Matching Attribute', required=True),
'enabled': fields.boolean('Enabled'),
'css_class': fields.char('CSS class'),
'body': fields.char(
'Body',
required=True,
),
'sequence': fields.integer(),
}
_defaults = {
'enabled': False,
'matching_attribute': "subject.nameId",
'css_class': 'zocial saml',
'body': 'Authentic',
}
|
aligoren/pyalgo | move_to_front_algo.py | Python | mit | 857 | 0.003501 | from __future__ import print_function
from string import ascii_lowercase
SYMBOLTABLE = list(ascii_lowercase)
def move2front_encode(strng, symboltable):
sequence, pad = [], symboltable[::]
for char in strng:
indx = p | ad.index(char)
sequence.append(indx)
pad = [pad.pop(indx)] + pad
return sequence
def move2front_decode(sequence, symboltable):
chars, pad = [], symboltable[::]
for indx in sequence:
char = pad[indx]
chars.append(char)
pad = [pad.pop(indx)] + pad
return ''.join(chars)
if __name__ == '__main__':
for s in ['broood', 'bananaaa', 'hiphophiphop']:
encode = move2front_encode(s, SYMBOLTABLE)
| print('%14r encodes to %r' % (s, encode), end=', ')
decode = move2front_decode(encode, SYMBOLTABLE)
print('which decodes back to %r' % decode)
|
magomez96/AdamTestBot | src/Community/utils.py | Python | mit | 2,356 | 0.003396 | import csv
import os
from pydblite import Base
def convertcsv2db(csvpath, dbpath): #Converts a CSV file to a PyDBLite database
db = Base(dbpath)
try:
csvfile = open(csvpath, 'rb')
except csv.Error:
print("Could not open CSV file at " + csvpath + "\n")
reader = csv.reader(csvfile)
header = next(reader)
try:
db.create(*header)
except IOError:
print("Existing DB at " + dbpath + "\n")
for row in reader:
db.insert(*row)
db.commit()
def printdb(dbpath): #Prints the contents of a PyDBLite database to the console
db = Base(dbpath)
if db.exists():
db.open()
retstr = ""
for obj in db:
retstr += str(obj)
retstr += "\n"
print(retstr)
return retstr
else:
print("The database does not exist or is corrupt.\n")
def likeconvert(likesRoot):
histPath = likesRoot + '/history'
convertcsv2db(likesRoot + '/totals.csv', likesRoot + '/likes.pdl')
db = Base(likesRoot + '/likes.pdl')
db.open()
db.add_field('history', "")
db.add_field('liked', "")
dirContents = os.listdir(histPath)
histFiles = []
for File in dirContents:
if ".csv" in File:
histFiles.append(File)
for histFile in histFiles:
try:
csvfile = open(histPath + '/' + histFile, 'rb')
reader = csv.DictReader(csvfile)
for row in reader:
if histFile.endswith('history.csv'):
recName = histFile[:-11]
print(recName)
| if db(userID=recName):
rec = db(userID=recName).pop()
if not rec['liked']:
| db.update(rec, liked=row['liked'])
else:
tmpLiked = rec['liked']
tmpLiked += " " + row['liked']
db.update(rec, liked=tmpLiked)
if not rec['history']:
db.update(rec, history=row['messageID'])
else:
tmpHist = rec['history']
tmpHist += " " + row['messageID']
db.update(rec, history=tmpHist)
db.commit()
except csv.Error:
print("Could not open CSV file")
|
chongdashu/puzzlescript-analyze | python/simulator.py | Python | mit | 248 | 0.032258 | __author__ = 'Chong-U L | im, culim@mit.edu'
import uinput
def Simulator():
def __init__(self):
pass
def test1(self):
device = uinput.Device([uinput.KEY_E, uinput.KEY_H, uinput.KEY_L, uinput.KEY_O])
device.emit_click(ui | nput.KEY_H)
|
socialplanning/WSSEAuth | wsseauth/tests/test_w3dtf.py | Python | gpl-3.0 | 497 | 0.022133 | from datetime import *
from wsseauth import parse_w3dtf
def test_w3dtf():
| d1 = '2007-07-16T15:46:07.507379Z'
d2 = '2007-07-16T05:16:07.507379+10:30'
expected = datetime(2007, 7, 16, 15, 46, 7) #that's the expected utc time
local_timezone_offset = datetime.utcnow() - datetime.now() #.. or so
expected_local = expected - local_timezone_offset
assert parse_w3dtf(d1) - expected_local < timedelta(0,1,0)
ass | ert parse_w3dtf(d2) - expected_local < timedelta(0,1,0)
|
Cladis/wikilabels | wikilabels/wsgi/util.py | Python | mit | 2,938 | 0.005106 | import os
from functools import lru_cache, wraps
from itertools import chain
import uglipyjs
from flask import current_app, request
def read_param(request, param, default=None, type=str):
try:
value = request.args.get(param, request.form.get(param, default))
return type(value.strip())
except (ValueError, TypeError) as e:
error = errors.bad_request("Could not interpret {0}. {1}" \
.format(param, str(e)))
raise ParamError(error)
def read_bar_split_param(request, param, default=None, type=str):
values = read_param(request, param, default=default)
if values == None:
return []
return [type(value) for value in values.split("|")]
def jsonp(func):
"""Wraps JSONified output for JSONP requests."""
@wraps(func)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', None)
if callback is not None:
data = str(func(*args, **kwargs).data)
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated_function
def static_file_path(path):
dir_name = os.path.dirname(os.path.abspath(__file__))
return os.path.join(dir_name, "static", path)
@lru_cache(128)
def read_javascript(static_paths, minify=False):
if minify:
return uglipyjs.compile(read_cat(static_paths))
else:
return read_cat(static_paths)
@lru_cache(128)
def read_cat(static_paths):
| return "".join(open(static_file_path(path)).read()
for path in static_paths)
def build_script_tags(static_paths, config):
return "".join('<script src="{0}"></script>'\
.format(static_path(path, config))
for path in static_paths)
def build_style_tags(static_paths, config):
return "".join('<link rel="stylesheet" type | ="text/css" href="{0}" />'\
.format(static_path(path, config))
for path in static_paths)
def app_path(path, config):
return path_join("/", config['wsgi']['application_root'], path)
def static_path(path, config):
return app_path(path_join("static", path), config)
def url_for(path, config):
return "//" + path_join(config['wsgi']['host'],
config['wsgi']['application_root'],
path)
def path_join(*path_parts):
path_parts = [path for path in path_parts if len(path) > 0]
if len(path_parts) == 0:
return ""
elif len(path_parts) == 1:
return path_parts[0]
else: # len(path_parts) >= 2
return "/".join(chain([path_parts[0].rstrip("/")],
(path.strip("/") for path in path_parts[1:-1]),
[path_parts[-1].lstrip("/")]))
|
ambv/flake8-bugbear | tests/b303_b304.py | Python | mit | 705 | 0 | """
Should emit:
B303 - on line 25
B304 - on line 42
"""
import sys
import something_else
def this_is_okay( | ):
something_else.maxint
maxint = 3
maxint
maxint = 3
def this_is_also_okay():
maxint
class CustomClassWithBrokenMetaclass:
__metaclass__ = type
maxint = 5 # this is okay
# the following shouldn't crash
(a, b, c) | = list(range(3))
# it's different than this
a, b, c = list(range(3))
(
a,
b,
c,
) = list(range(3))
# and different than this
(a, b), c = list(range(3))
a, *b, c = [1, 2, 3, 4, 5]
b[1:3] = [0, 0]
def this_is_also_fine(self):
self.maxint
def this_is_wrong():
sys.maxint
|
JaapJoris/autodidact | autodidact/views/decorators.py | Python | agpl-3.0 | 3,963 | 0.002271 | from functools import wraps
from django.shortcuts import get_object_or_404, redirect
from django.http import Http404, HttpResponseForbidden, HttpResponseBadRequest
from autodidact.models import *
def needs_course(view):
@wraps(view)
def wrapper(request, course_slug, *args, **kwargs):
if isinstance(course_slug, Course):
course = course_slug
elif request.user.is_staff:
course = get_object_or_404(Course, slug=course_slug)
else:
course = get_object_or_404(Course, slug=course_slug, active=True)
return view(request, course, *args, **kwargs)
return wrapper
def needs_session(view):
@wraps(view)
def wrapper(request, course, session_nr, *args, **kwargs):
if not isinstance(course, Course):
raise TypeError(' | Course object required')
if isinstance(session_nr, Session):
session = session_nr
else:
session_nr = int(se | ssion_nr)
session = course.sessions.filter(number=session_nr).first()
if session is None:
raise Http404()
if not session.active and not request.user.is_staff:
raise Http404()
return view(request, course, session, *args, **kwargs)
return wrapper
def needs_assignment(view):
@wraps(view)
def wrapper(request, course, session, assignment_nr, *args, **kwargs):
if not isinstance(course, Course):
raise TypeError('Course object required')
if not isinstance(session, Session):
raise TypeError('Session object required')
if isinstance(assignment_nr, Assignment):
assignment = assignment_nr
else:
assignment_nr = int(assignment_nr)
assignment = session.assignments.filter(number=assignment_nr).first()
if assignment is None:
raise Http404()
if not assignment.active and not request.user.is_staff:
raise Http404()
if assignment.locked and not request.user.is_staff:
if not request.user.attends.all() & session.classes.all():
return HttpResponseForbidden('Permission Denied')
return view(request, course, session, assignment, *args, **kwargs)
return wrapper
def needs_step(view):
@wraps(view)
def wrapper(request, course, session, assignment, *args, **kwargs):
if not isinstance(course, Course):
raise TypeError('Course object required')
if not isinstance(session, Session):
raise TypeError('Session object required')
if not isinstance(assignment, Assignment):
raise TypeError('Assignment object required')
try:
step = assignment.steps.filter(number=request.GET.get('step')).first()
if step is None:
# Not sure if this is the right place, but let's
# ensure that an assignment has at least one step
if not assignment.steps.exists():
Step(assignment=assignment).save()
return redirect(assignment.steps.first())
except ValueError:
return HttpResponseBadRequest('Invalid step number')
step.fullscreen = 'fullscreen' in request.GET
step.completedstep = request.user.completed.filter(step=step).first()
step.given_values = step.completedstep.answer.split('\x1e') if step.completedstep else []
step.right_values = [a.value for a in step.right_answers.all()]
step.wrong_values = [a.value for a in step.wrong_answers.all()]
step.graded = bool(step.right_values) and step.answer_required
step.multiple_choice = bool(step.wrong_values)
step.multiple_answers = step.multiple_choice and len(step.right_values) > 1
step.please_try_again = False
return view(request, course, session, assignment, step, *args, **kwargs)
return wrapper
|
EugeneHasJeans/EugeneHasJeans.github.io | documents/lifelines.py | Python | agpl-3.0 | 799 | 0.032541 | import time
import RPi.GPIO as GPIO
GPIO.VERSION
GPIO.setmode(GPIO.BOARD)
GPIO.setup(11,GPIO.OUT)
GPIO.setup(12,GPIO.OUT)
from smbus import SMBus
bus = SMBus(1)
def read_ain(i):
global bus
#bus.write_byte_data(0x48, 0x40 | ((i) & 0x03), 0)
bus.write_byte(0x48, i)
bus.read_byte(0x48)#first 2 are last | state, and last state repeated.
bus.read_byte(0x48)
return bus.read_byte(0x48)
while(True):
alcohol = read_ain(2)*0.001
heartrate = read_ain( | 1)
print "-------------------------\n"
print("Alcohol Sensor: {0:.3f}%".format(alcohol))
if(heartrate<60) or (heartrate>100):
GPIO.output(11,0)
GPIO.output(12,1)
else:
GPIO.output(11,1)
GPIO.output(12,0)
print("Heart Rate Sensor: {0:.0f} BPM\n".format(heartrate))
time.sleep(1)#sec
|
jenaiz/Crawly | common/__init__.py | Python | mit | 149 | 0.006711 | #!/usr/bin/env python
# enco | ding: utf-8
"""
__init__.py
Created by on 2012-06-08.
Copyright (c) 2012 __MyCompanyName__. Al | l rights reserved.
"""
|
mce35/agocontrol | devices/rrdtool/RRDtool.py | Python | gpl-3.0 | 3,622 | 0.022916 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# python-rrdtool, rrdtool bindings for Python.
# Based on the rrdtool Python bindings for Python 2 from
# Hye-Shik Chang <perky@fallin.lv>.
#
# Copyright 2012 Christian Jurk <commx@commx.ws>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import io
import os
import rrdtool
from datetime import datetime
from time import mktime
def create(filename, *args):
"Create a Round Robin Database and return a RRD object on success."
rrdtool.create(filename, *args)
if not os.access(filename, os.F_OK):
raise rrdtool.OperationalError('RRD file was not created')
return RRD(filename)
class RRD:
"""An object-based interface to the rrdtool module."""
def __init__(self, filename, check_type=True):
"Initialize the class instance with a filename."
if not os.access(filename, os.F_OK | os.R_OK):
raise rrdtool.OperationalError('RRD {!s} cannot be opened.' \
.format(filename))
# Use rrdinfo to test whether the file is a valid RRD file
if check_type is True:
rrdtool.info(filename)
self.readonly = not os.access(filename, os.W_OK)
self.filename = filename
def graph(self, output_file, *args):
"""
Generate a graph based on the arguments passed to this function.
If output_file is None, "-" will be used as the output filename.
In that case, rrdtool returns the image bytes within its info dict.
"""
outfile = '-' if output_file is None else output_file
# when writing to a file-like object, use output buffering
if isinstance(output_file, io.IOBase):
outfile = '-'
info = rrdtool.graphv(outfile, *args)
if isinstance(info, dict) and 'image' in info:
if isinstance(output_file, io.IOBase):
output_file.write(info['image'])
elif output_file is None:
return info['image']
return info
def info(self):
return rrdtool.info(self.filename)
def update(self, values, *args):
vl = []
if self.readonly:
raise rrdt | ool.OperationalError('RRD file is read-only: {!s}' \
.format(self.filename))
elif not isinstance(values, (list, tuple)):
raise rrdtool.ProgrammingError('The v | alues parameter must be a ' \
'list or tuple')
else:
for row in values:
if isinstance(row, str):
vl.append(row)
elif isinstance(row, (list, tuple)):
if len(row) < 2:
raise rrdtool.ProgrammingError('Value {!r} has too ' \
'few elements in sequence object'.format(row))
else:
ts = row[0]
if ts is None:
ts = 'N'
elif isinstance(ts, datetime):
ts = int(mktime(ts.timetuple()))
elif isinstance(ts, str):
ts = int(ts)
elif not isinstance(ts, int):
raise ValueError('Unsupported type')
v = '{}:{}'.format(ts, ':'.join([str(x) for x in row[1:]]))
vl.append(v)
arglist = tuple(vl + list(args))
return rrdtool.update(self.filename, *arglist)
def __repr__(self):
return '<RRD {!r}>'.format(self.filename)
|
pinterest/kingpin | examples/test_service_client.py | Python | apache-2.0 | 1,287 | 0.000777 | #!/usr/bin/python
#
# Copyright 2016 Pinterest, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kingpin.thrift_utils.thrift_client_mixin import PooledThriftClientMixin
from kingpin.thrift_utils.base_thrift_exceptions import ThriftConnectionError
from kingpin.kazoo_utils.hosts import HostsProvider
import TestService
class TestServiceConnectionException(ThriftConnectionError):
pass
class TestServiceClient(TestService.Client, PooledThriftClientMixin):
def get_connection_exception_class(self):
return TestServiceConnectionException
testservice_client = TestServiceClient(
HostsProvider([], file_path="/var/serverset/discovery.test_service.prod") | ,
timeout=3000,
poo | l_size=10,
always_retry_on_new_host=True)
print testservice_client.ping()
|
Alphadelta14/XCSV | xcsv/__init__.py | Python | mit | 152 | 0 |
from wrapper | import parse_header, safe_parse_header, XCSVDialect
__all__ = ["parse_header", "safe_parse_header", "XCSVDialect"]
__versio | n__ = "0.2.4"
|
depop/kombu | kombu/tests/mocks.py | Python | bsd-3-clause | 4,240 | 0 | from __future__ import absolute_import
from itertools import count
import anyjson
from kombu.transport import base
class Message(base.Message):
def __init__(self, *args, **kwargs):
self.throw_decode_error = kwargs.get('throw_decode_error', False)
super(Message, self).__init__(*args, **kwargs)
def decode(self):
if self.throw_decode_error:
raise ValueError("can't decode message")
return super(Message, self).decode()
class Channel(base.StdChannel):
open = True
throw_decode_error = False
_ids = count(1).next
def __init__(self, connection):
self.connection = connection
self.called = []
self.deliveries = count(1).next
self.to_deliver = []
self.events = {'basic_return': set()}
self.channel_id = self._ids()
def _called(self, name):
self.called.append(name)
def __contains__(self, key):
return key in self.called
def exchange_declare( | self, *args, **kwargs):
self._called('exchange_declare')
def prepare_message(self, body, priority=0, content_type=None,
content_encoding=None, headers=None, properties={}):
self._called('prepare_message')
return dict(body=body,
headers=headers,
properties=properties,
priority=priority,
content_type=content_type,
content_encoding=content_enc | oding)
def basic_publish(self, message, exchange='', routing_key='',
mandatory=False, immediate=False, **kwargs):
self._called('basic_publish')
return message, exchange, routing_key
def exchange_delete(self, *args, **kwargs):
self._called('exchange_delete')
def queue_declare(self, *args, **kwargs):
self._called('queue_declare')
def queue_bind(self, *args, **kwargs):
self._called('queue_bind')
def queue_unbind(self, *args, **kwargs):
self._called('queue_unbind')
def queue_delete(self, queue, if_unused=False, if_empty=False, **kwargs):
self._called('queue_delete')
def basic_get(self, *args, **kwargs):
self._called('basic_get')
try:
return self.to_deliver.pop()
except IndexError:
pass
def queue_purge(self, *args, **kwargs):
self._called('queue_purge')
def basic_consume(self, *args, **kwargs):
self._called('basic_consume')
def basic_cancel(self, *args, **kwargs):
self._called('basic_cancel')
def basic_ack(self, *args, **kwargs):
self._called('basic_ack')
def basic_recover(self, requeue=False):
self._called('basic_recover')
def exchange_bind(self, *args, **kwargs):
self._called('exchange_bind')
def exchange_unbind(self, *args, **kwargs):
self._called('exchange_unbind')
def close(self):
self._called('close')
def message_to_python(self, message, *args, **kwargs):
self._called('message_to_python')
return Message(self, body=anyjson.dumps(message),
delivery_tag=self.deliveries(),
throw_decode_error=self.throw_decode_error,
content_type='application/json',
content_encoding='utf-8')
def flow(self, active):
self._called('flow')
def basic_reject(self, delivery_tag, requeue=False):
if requeue:
return self._called('basic_reject:requeue')
return self._called('basic_reject')
def basic_qos(self, prefetch_size=0, prefetch_count=0,
apply_global=False):
self._called('basic_qos')
class Connection(object):
connected = True
def __init__(self, client):
self.client = client
def channel(self):
return Channel(self)
class Transport(base.Transport):
def establish_connection(self):
return Connection(self.client)
def create_channel(self, connection):
return connection.channel()
def drain_events(self, connection, **kwargs):
return 'event'
def close_connection(self, connection):
connection.connected = False
|
baylee-d/osf.io | admin/collection_providers/forms.py | Python | apache-2.0 | 9,406 | 0.003827 | import bleach
import json
from django import forms
from osf.models import CollectionProvider, CollectionSubmission
from admin.base.utils import get_nodelicense_choices, get_defaultlicense_choices, validate_slug
class CollectionProviderForm(forms.ModelForm):
collected_type_choices = forms.CharField(widget=forms.HiddenInput(), required=False)
status_choices = forms.CharField(widget=forms.HiddenInput(), required=False)
volume_choices = forms.CharField(widget=forms.HiddenInput(), required=False)
issue_choices = forms.CharField(widget=forms.HiddenInput(), required=False)
program_area_choices = forms.CharField(widget=forms.HiddenInput(), required=False)
_id = forms.SlugField(
required=True,
help_text='URL Slug',
validators=[validate_slug]
)
class Meta:
model = CollectionProvider
exclude = ['primary_identifier_name', 'primary_collection', 'type', 'allow_commenting', 'advisory_board',
'example', 'domain', 'domain_redirect_enabled', 'reviews_comments_anonymous',
'reviews_comments_private', 'reviews_workflow']
widgets = {
'licenses_acceptable': forms.CheckboxSelectMultiple(),
}
def __init__(self, *args, **kwargs):
nodelicense_choices = get_nodelicense_choices()
defaultlicense_choices = get_defaultlicense_choices()
super(CollectionProviderForm, self).__init__(*args, **kwargs)
self.fields['licenses_acceptable'].choices = nodelicense_choices
self.fields['default_license'].choices = defaultlicense_choices
def clean_description(self, *args, **kwargs):
if not self.data.get('description'):
return u''
return bleach.clean(
self.data.get('description'),
tags=['a', 'br', 'em', 'p', 'span', 'strong'],
attributes=['class', 'style', 'href', 'title', 'target'],
styles=['text-align', 'vertical-align'],
strip=True
)
def clean_footer_links(self, *args, **kwargs):
if not self.data.get('footer_links'):
return u''
return bleach.clean(
self.data.get('footer_links'),
tags=['a', 'br', 'div', 'em', 'p', 'span', 'strong'],
attributes=['class', 'style', 'href', 'title', 'target'],
styles=['text-align', 'vertical-align'],
strip=True
)
def clean_collected_type_choices(self):
collection_provider = self.instance
# if this is to modify an existing CollectionProvider
if collection_provider.primary_collection:
type_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.collected_type_choices])
type_choices_new = set([c.strip(' ') for c in json.loads(self.data.get('collected_type_choices'))])
type_choices_added = type_choices_new - type_choices_old
type_choices_removed = type_choices_old - type_choices_new
for item in type_choices_removed:
if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection,
collected_type=item).exists():
raise forms.ValidationError(
'Cannot delete "{}" because it is used as metadata on objects.'.format(item)
)
else:
# if this is creating a CollectionProvider
type_choices_added = []
type_choices_removed = []
choices = self.data.get('collected_type_choices')
if choices:
type_choices_added = json.loads(choices)
return {
'added': type_choices_added,
'removed': type_choices_removed,
}
def clean_status_choices(self):
collection_provider = self.instance
# if this is to modify an existing CollectionProvider
if collection_provider.primary_collection:
status_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.status_choices])
status_choices_new = set([c.strip(' ') for c in json.loads(self.data.get('status_choices'))])
status_choices_added = status_choices_new - status_choices_old
status_choices_removed = status_choices_old - status_choices_new
for item in status_choices_removed:
if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection,
status=item).exists():
raise forms.ValidationError(
'Cannot delete "{}" because it is used as metadata on objects.'.format(item)
)
else:
# if this is creating a CollectionProvider
status_choices_added = []
status_choices_removed = []
choices = self.data.get('status_choices')
if choices:
status_choices_added = json.loads(choices)
return {
'added': status_choices_added,
'removed': status_choices_removed,
}
def clean_volume_choices(self):
collection_provider = self.instance
# if this is to modify an existing CollectionProvider
if collection_provider.primary_collection:
volume_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.volume_choices])
volume_choices_new = set([c.strip(' ') for c in json.loads(self.data.get('volume_choices'))])
volume_choices_added = volume_choices_new - volume_choices_old
volume_choices_removed = volume_choices_old - volume_choices_new
for item in volume_choices_removed:
if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection,
volume=item).exists():
raise forms.ValidationError(
'Cannot delete "{}" because it is used as metadata on objects.'.format(item)
)
else:
# if this is creating a CollectionProvider
volume_choices_added = []
volume_choices_removed = []
choices = self.data.get('volume_choices')
if choices:
volume_choices_added = json.loads(choices)
return {
'added': volume_choices_added,
'removed': volume_choices_removed,
}
def clean_issue_choices(self):
collection_provider = self.instance
# if this is to modify an existing CollectionProvider
if collection_provider.primary_collection:
issue_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.issue_choices])
issue_choices_new = set([c.strip(' ') for c in json.loads(self.data. | get('issue_choices'))])
issue_choices_added = issue_choices_new - issue_choices_old
issue_choices_removed = issue_choices_old - issue_choices_new
for item in issue_choices_removed:
if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection,
issue=item).exists():
raise forms.ValidationError(
'Cannot delete "{}" because it is u | sed as metadata on objects.'.format(item)
)
else:
# if this is creating a CollectionProvider
issue_choices_added = []
issue_choices_removed = []
choices = self.data.get('issue_choices')
if choices:
issue_choices_added = json.loads(choices)
return {
'added': issue_choices_added,
'removed': issue_choices_removed,
}
def clean_program_area_choices(self):
collection_provider = self.instance
# if this is to modify an existing CollectionProvider
if collection_provider.primary_collection:
program_area_choices_old = set([c.strip(' ') for c |
remram44/rpaths | tests/test_abstract.py | Python | bsd-3-clause | 15,818 | 0 | from __future__ import unicode_literals
try:
import unittest2 as unittest
except ImportError:
import unittest
from rpaths import unicode, PY3, AbstractPath, PosixPath, WindowsPath
class TestAbstract(unittest.TestCase):
def test_construct(self):
"""Tests building an AbstractPath."""
with self.assertRaises(RuntimeError):
AbstractPath('path/to/something')
class TestWindows(unittest.TestCase):
"""Tests for WindowsPath.
"""
def test_construct(self):
"""Tests building paths."""
self.assertEqual(WindowsPath('C:\\',
WindowsPath('some/dir'),
'with',
'files.txt').path,
'C:\\some\\dir\\with\\files.txt')
with self.assertRaises(TypeError):
WindowsPath(WindowsPath('C:\\somedir'), PosixPath('file.sh'))
self.assertEqual((WindowsPath('Users\\R\xE9mi/Desktop') /
WindowsPath(b'pictures/m\xE9chant.jpg')).path,
'Users\\R\xE9mi\\Desktop\\pictures\\m\xE9chant.jpg')
self.assertEqual((WindowsPath('C:\\dir') /
WindowsPath('D:\\other')).path,
'D:\\other')
def test_plus(self):
"""Tests the plus operator."""
self.assertEqual((WindowsPath('some\\file.txt') + '.bak').path,
'some\\file.txt.bak')
with self.assertRaises(TypeError):
WindowsPath('some\\file.txt') + WindowsPath('.bak')
with self.assertRaises(ValueError):
WindowsPath('some\\file.txt') + '.bak/kidding'
with self.assertRaises(ValueError):
WindowsPath('some\\file.txt') + '/backup'
def test_str(self):
"""Tests getting string representations (repr/bytes/unicode)."""
latin = WindowsPath('C:\\r\xE9mi')
nonlatin = WindowsPath('C:\\you like\u203D.txt')
# repr()
self.assertEqual(repr(latin),
"WindowsPath(u'C:\\\\r\\xe9mi')")
self.assertEqual(repr(nonlatin),
"WindowsPath(u'C:\\\\you like\\u203d.txt')")
# bytes()
self.assertEqual(bytes(latin),
b'C:\\r\xe9mi')
self.assertEqual(bytes(nonlatin),
b'C:\\you like?.txt')
# unicode()
self.assertEqual(unicode(latin),
'C:\\r\xe9mi')
self.assertEqual(unicode(nonlatin),
'C:\\you like\u203d.txt')
def test_parts(self):
"""Tests parent, ancestor, name, stem, ext."""
relative = WindowsPath('directory/users\\r\xE9mi/file.txt')
absolute = WindowsPath('\\some/other\\thing.h\xE9h\xE9')
self.assertEqual(relative.parent.path,
'directory\\users\\r\xE9mi')
self.assertEqual(absolute.parent.path,
'\\some\\other')
self.assertEqual(absolute.ancestor(10).path,
'\\')
self.assertEqual(relative.name, 'file.txt')
self.assertEqual(absolute.name, 'thing.h\xE9h\xE9')
self.assertEqual(absolute.unicodename, 'thing.h\xE9h\xE9')
self.assertEqual(absolute.stem, 'thing')
self.assertEqual(absolute.ext, '.h\xE9h\xE9')
self.assertEqual(relative._components(),
['directory', 'users', 'r\xE9mi', 'file.txt'])
self.assertEqual(abso | lute._components(),
['\\', 'some', 'other', 'thing.h\xE9h\xE9'])
def test_root(self):
"""Tests roots, drives and UNC shares."""
a = WindowsPath(b'some/relative/path')
b = WindowsPath('alsorelative')
c = WindowsPath(b'/this/is/absolute')
d = WindowsPath('C:\\')
e = WindowsPath(b'C:\\also/absolute')
f = WindowsPath('\\\\SOMEMACHINE\\share\\some\\f | ile')
def split_root(f):
return tuple(p.path for p in f.split_root())
self.assertEqual(split_root(a),
('.', 'some\\relative\\path'))
self.assertEqual(split_root(b),
('.', 'alsorelative'))
self.assertFalse(b.is_absolute)
self.assertEqual(split_root(c),
('\\', 'this\\is\\absolute'))
self.assertTrue(c.is_absolute)
self.assertEqual(split_root(d),
('C:\\', '.'))
self.assertTrue(d.is_absolute)
self.assertEqual(d.root.path, 'C:\\')
self.assertEqual(split_root(e),
('C:\\', 'also\\absolute'))
# FIXME : normpath() doesn't behave consistently: puts \ at the end on
# PY3, not on PY2.
self.assertIn(split_root(f),
[('\\\\SOMEMACHINE\\share', 'some\\file'),
('\\\\SOMEMACHINE\\share\\', 'some\\file')])
def test_rel_path_to(self):
"""Tests the rel_path_to method."""
self.assertEqual(WindowsPath('.').rel_path_to(WindowsPath('')).path,
'.')
self.assertEqual(WindowsPath('\\var\\log\\apache2\\').rel_path_to(
'\\var\\www\\cat.jpg').path,
'..\\..\\www\\cat.jpg')
self.assertEqual(WindowsPath('C:\\var\\log\\apache2\\').rel_path_to(
'C:\\tmp\\access.log').path,
'..\\..\\..\\tmp\\access.log')
self.assertEqual(WindowsPath('var\\log').rel_path_to(
'var\\log\\apache2\\access.log').path,
'apache2\\access.log')
self.assertEqual(WindowsPath('\\var\\log\\apache2').rel_path_to(
'\\var\\log\\apache2').path,
'.')
self.assertEqual(WindowsPath('C:\\').rel_path_to(
'C:\\var\\log\\apache2\\access.log').path,
'var\\log\\apache2\\access.log')
self.assertEqual(WindowsPath('\\tmp\\secretdir\\').rel_path_to(
'\\').path,
'..\\..')
self.assertEqual(WindowsPath('C:\\tmp\\secretdir\\').rel_path_to(
'D:\\other\\file.txt').path,
'D:\\other\\file.txt')
with self.assertRaises(TypeError):
WindowsPath('C:\\mydir\\').rel_path_to(PosixPath('/tmp/file'))
def test_lies_under(self):
"""Tests the lies_under method."""
self.assertTrue(WindowsPath('\\tmp')
.lies_under('\\'))
self.assertFalse(WindowsPath('C:\\tmp')
.lies_under('C:\\var'))
self.assertFalse(WindowsPath('\\tmp')
.lies_under('C:\\tmp'))
self.assertFalse(WindowsPath('C:\\')
.lies_under('D:\\tmp'))
self.assertTrue(WindowsPath('\\tmp\\some\\file\\here')
.lies_under('\\tmp\\some'))
self.assertFalse(WindowsPath('\\tmp\\some\\file\\here')
.lies_under('\\tmp\\no'))
self.assertFalse(WindowsPath('C:\\tmp\\some\\file\\here')
.lies_under('C:\\no\\tmp\\some'))
self.assertFalse(WindowsPath('\\tmp\\some\\file\\here')
.lies_under('\\no\\some'))
self.assertTrue(WindowsPath('C:\\tmp\\some\\file\\here')
.lies_under('C:\\tmp\\some\\file\\here'))
self.assertTrue(WindowsPath('\\')
.lies_under('\\'))
self.assertTrue(WindowsPath('')
.lies_under(''))
self.assertTrue(WindowsPath('test')
.lies_under(''))
self.assertFalse(WindowsPath('')
.lies_under('test'))
self.assertFalse(WindowsPath('test')
.lies_under('\\'))
def test_comparisons(self):
"""Tests the comparison operators."""
self.assertTrue(WindowsPath('\\tmp') == WindowsPath('\\tmp'))
self.assertFalse(WindowsPath('C:\\file') != 'c:\\FILE')
self.assertTrue('c:\\FILE' == WindowsPath('C:\\file' |
Anaethelion/Geotrek | geotrek/trekking/migrations/0029_auto__add_service__add_servicetype.py | Python | bsd-2-clause | 32,683 | 0.006915 | # -*- coding: utf-8 -*-
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from django.conf import settings
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Service'
db.create_table('o_t_service', (
('structure', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['authent.Structure'], db_column='structure')),
('topo_object', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['core.Topology'], unique=True, primary_key=True, db_column='evenement')),
('type', self.gf('django.db.models.fields.related.ForeignKey')(related_name='services', db_column='type', to=orm['trekking.ServiceType'])),
('eid', self.gf('django.db.models.fields.CharField')(max_length=128, db_column='id_externe', blank=True)),
))
db.send_create_signal(u'trekking', ['Service'])
# Adding model 'ServiceType'
db.create_table('o_b_service', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('published', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='public')),
('publication_date', self.gf('django.db.models.fields.DateField')(null=True, db_column='date_publication', blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=128, db_column='nom')),
('review', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='relecture')),
('pictogram', self.gf('django.db.models.fields.files.FileField')(max_length=512, null=True, db_column='picto')),
))
db.send_create_signal(u'trekking', ['ServiceType'])
# Adding M2M table for field practices on 'ServiceType'
m2m_table_name = 'o_r_service_pratique'
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('servicetype', models.ForeignKey(orm[u'trekking.servicetype'], null=False)),
('practice', models.ForeignKey(orm[u'trekking.practice'], null=False))
))
db.create_unique(m2m_table_name, ['servicetype_id', 'practice_id'])
def backwards(self, orm):
# Deleting model 'Service'
db.delete_table('o_t_service')
# Deleting model 'ServiceType'
db.delete_table('o_b_service')
# Removing M2M table for field practices on 'ServiceType'
db.delete_table('o_r_service_pratique')
models = {
u'authent.structure': {
'Meta': {'ordering': "['name']", 'object_name': 'Structure'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
u'cirkwi.cirkwilocomotion': {
'Meta': {'ordering': "['name']", 'object_name': 'CirkwiLocomotion', 'db_table': "'o_b_cirkwi_locomotion'"},
'eid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"})
},
u'cirkwi.cirkwipoicategory': {
'Meta': {'ordering': "['name']", 'object_name': 'CirkwiPOICategory', 'db_table': "'o_b_cirkwi_poi_category'"},
'eid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"})
},
u'cirkwi.cirkwitag': {
'Meta': {'ordering': "['name']", 'object_name': 'CirkwiTag', 'db_table': "'o_b_cirkwi_tag'"},
'eid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"})
},
u'common.recordsource': {
'Meta': {'ordering': "['name']", 'object_name': 'RecordSource', 'db_table': "'o_b_source_fiche'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key | ': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'pictogram': ('django.db.models.fields.files.FileField', [], {'max_length': '512', 'null': 'True', 'db | _column': "'picto'", 'blank': 'True'}),
'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '256', 'null': 'True', 'db_column': "'website'", 'blank': 'True'})
},
u'common.theme': {
'Meta': {'ordering': "['label']", 'object_name': 'Theme', 'db_table': "'o_b_theme'"},
'cirkwi': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cirkwi.CirkwiTag']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'theme'"}),
'pictogram': ('django.db.models.fields.files.FileField', [], {'max_length': '512', 'null': 'True', 'db_column': "'picto'"})
},
u'core.comfort': {
'Meta': {'ordering': "['comfort']", 'object_name': 'Comfort', 'db_table': "'l_b_confort'"},
'comfort': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_column': "'confort'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"})
},
u'core.network': {
'Meta': {'ordering': "['network']", 'object_name': 'Network', 'db_table': "'l_b_reseau'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'network': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_column': "'reseau'"}),
'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"})
},
u'core.path': {
'Meta': {'object_name': 'Path', 'db_table': "'l_t_troncon'"},
'arrival': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True', 'db_column': "'arrivee'", 'blank': 'True'}),
'ascent': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'denivelee_positive'", 'blank': 'True'}),
'comfort': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'paths'", 'null': 'True', 'db_column': "'confort'", 'to': u"orm['core.Comfort']"}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "'remarques'", 'blank': 'True'}),
'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "'date_insert'", 'blank': 'True'}),
'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "'date_update'", 'blank': 'True'}),
'departure': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True', 'db_column': "'depart'", 'blank': 'True'}),
'descent': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'denivelee_negative'", 'blank': 'True'}),
'eid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'id_externe'", 'blank': 'True'}),
'geom': ('django.contrib.gis.db.models.fields.LineStringField', [], {'srid': str(settings.SRID), 'spatial_index': 'False'}),
'geom_3d': ('dj |
x684867/nemesis | src/node/tools/test.py | Python | mit | 42,571 | 0.015691 | #!/usr/bin/env python
#
# Copyright 2008 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import imp
import optparse
import os
import platform
import re
import signal
import subprocess
import sys
import tempfile
import time
import threading
import utils
from os.path import join, dirname, abspath, basename, isdir, exists
from datetime import datetime
from Queue import Queue, Empty
VERBOSE = False
# ---------------------------------------------
# --- P r o g r e s s I n d i c a t o r s ---
# ---------------------------------------------
class ProgressIndicator(object):
def __init__(self, cases):
self.cases = cases
self.queue = Queue(len(cases))
for case in cases:
self.queue.put_nowait(case)
self.succeeded = 0
self.remaining = len(cases)
self.total = len(cases)
self.failed = [ ]
self.crashed = 0
self.terminate = False
self.lock = threading.Lock()
def PrintFailureHeader(self, test):
if test.IsNegative():
negative_marker = '[negative] '
else:
negative_marker = ''
print "=== %(label)s %(negative)s===" % {
'label': test.GetLabel(),
'negative': negative_marker
}
print "Path: %s" % "/".join(test.path)
def Run(self, tasks):
self.Starting()
threads = []
# Spawn N-1 threads and then use this thread as the last one.
# That way -j1 avoids threading altogether which is a nice fallback
# in case of threading problems.
for i in xrange(tasks - 1):
thread = threading.Thread(target=self.RunSingle, args=[])
threads.append(thread)
thread.start()
try:
self.RunSingle()
# Wait for the remaining threads
for thread in threads:
# Use a timeout so that signals (ctrl-c) will be processed.
thread.join(timeout=10000000)
except Exception, e:
# If there's an exception we schedule an interruption for any
# remaining threads.
self.terminate = True
# ...and then reraise the exception to bail out
raise
self.Done()
return not self.failed
def RunSingle(self):
while not self.terminate:
try:
test = self.queue.get_nowait()
except Empty:
return
case = test.case
self.lock.acquire()
self.AboutToRun(case)
self.lock.release()
try:
start = datetime.now()
output = case.Run()
case.duration = (datetime.now() - start)
except IOError, e:
assert self.terminate
return
if self.terminate:
return
self.lock.acquire()
if output.UnexpectedOutput():
self.failed.append(output)
if output.HasCrashed():
self.crashed += 1
else:
self.succeeded += 1
self.remaining -= 1
self.HasRun(output)
self.lock.release()
def EscapeCommand(command):
parts = []
for part in command:
if ' ' in part:
# Escape spaces. We may need to escape more characters for this
# to work properly.
parts.append('"%s"' % part)
else:
parts.append(part)
return " ".join(parts)
class SimpleProgressIndicator(ProgressIndicator):
def Starting(self):
print 'Running %i tests' % len(self.cases)
def Done(self):
print
for failed in self.failed:
self.PrintFailureHeader(failed.test)
if failed.output.stderr:
print "--- stderr ---"
print failed.output.stderr.strip()
if failed.output.stdout:
print "--- stdout ---"
print failed.output.stdout.strip()
print "Command: %s" % EscapeCommand(failed.command)
if failed.HasCrashed():
print "--- CRASHED ---"
if failed.HasTimedOut():
print "--- TIMEOUT ---"
if len(self.failed) == 0:
print "==="
print "=== All tests succeeded"
print "==="
else:
print
print "==="
print "=== %i tests failed" % len(self.failed)
if self.crashed > 0:
print "=== %i tests CRASHED" % self.crashed
print "==="
class VerboseProgressIndicator(SimpleProgressIndicator):
def AboutToRun(self, case):
print 'Starting %s...' % case.GetLabel()
sys.stdout.flush()
def HasRun(self, output):
if output.UnexpectedOutput():
if output.HasCrashed():
outcome = 'CRASH'
else:
outcome = 'FAIL'
else:
outcome = 'pass'
print 'Done running %s: %s' % (output.test.GetLabel(), outcome)
class DotsProgressIndicator(SimpleProgressIndicator):
def AboutToRun(self, case):
pass
def HasRun(self, output):
total = self.succeeded + len(self.failed)
if (total > 1) and (total % 50 == 1):
sys.stdout.write('\n')
if output.UnexpectedOutput():
if output.HasCrashed():
sys.stdout.write('C')
sys.stdout.flush()
elif output.HasTimedOut():
sys.stdout.write('T')
sys.stdout.flush()
else:
sys.stdout.write('F')
sys.stdout.flush()
else:
sys.stdout.write('.')
sys.stdout.flush()
class TapProgressIndicator(SimpleProgressIndicator):
| def Starting(self):
print '1..%i' % len(self.cases)
self._done = 0
def AboutToRun(self, case):
pass
def HasRun(self, output):
self._done += 1
command = basename(output.command[-1])
if output.UnexpectedOutput():
print 'not ok %i - %s' % (self._done, command)
for l in output.output.stderr.splitlines():
print '#' + l
for l in output.output.stdout.splitlines():
print '#' + l
| else:
print 'ok %i - %s' % (self._done, command)
duration = output.test.duration
# total_seconds() was added in 2.7
total_seconds = (duration.microseconds +
(duration.seconds + duration.days * 24 * 3600) * 10**6) / 10**6
print ' ---'
print ' duration_ms: %d.%d' % (total_seconds, duration.microseconds / 1000)
print ' ...'
def Done(self):
pass
class CompactProgressIndicator(ProgressIndicator):
def __init__(self, cases, templates):
super(CompactProgressIndicator, self).__init__(cases)
self.templates = templates
self.last_status_length = 0
self.start_time = time.time()
def Starting(self):
pass
def Done(self):
self.PrintProgress('Done')
def AboutToRun(self, case):
self.PrintProgress(case.GetLabel())
def HasRun(self, output):
if output.UnexpectedOutput():
self.ClearLine(self.last_status_length)
self.PrintFailureHeader(output.test)
stdout = output.output.stdout.strip()
if len(stdout):
print self.templates['stdout'] % stdout
stderr = output.output.stderr.strip()
if len(stderr):
print self.templates['stderr'] % stderr
|
LighthouseHPC/lighthouse | src/LAPACK341/sort341/sing.py | Python | mit | 2,743 | 0.010208 | import urllib, shutil, csv
from time import time
import os
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.sys.path.insert(0,parentdir)
import summary.summary
print "----------------- Sort the sing routines in v3.4.1 -----------------"
###----------- get new_list
new_list = summary.summary.New_List()
###------------ find sing routines in v3.4.1
wr_single = csv.writer(open('./routines/sing_341_single.txt', 'w'), delimiter=';')
wr_double = csv.writer(open('./routines/sing_341_double.txt', 'w'), delimiter=';')
wr_complex = csv.writer(open('./routines/sing_341_complex.txt', 'w'), delimiter=';')
wr_complex16 = csv.writer(open('./routines/sing_341_complex16.txt', 'w'), delimiter=';')
wr_aux = csv.writer(open('./routines/sing_341_aux.txt', 'w'), delimiter=';')
sing_single = []
sing_double = []
sing_complex = []
sing_complex16 = []
sing_aux = []
i=0
j=0
k=0
l=0
m=0
start = time()
f = open("./routines/routines_341_list.txt", 'r')
for line in f:
routineName = line.split(' ')[1]
category = line.split(' ')[2]
#print category
if "sing" in category:
print routineName, "------->", category
if category[0:3] == "aux":
m += 1
sing_aux.append(routineName)
wr_aux.writerow([m, routineName[0], routineName[1:-2], "http://www.netlib.org/lapack/lapack_routine/"+routineName])
#print category
elif category[0:4] == "real":
i += 1
sing_single.append(routineName)
wr_single.writerow([i, routineName[0], routineName[1:-2], "http://www.netlib.org/lapack/lapack_routine/"+routineName])
elif category[0:6] == "double":
j += 1
sing_double.append(routineName)
wr_double.writerow([j, routineName[0], routineName[1:-2], "http://www.netlib.org/lapack/lapack_routine/"+routineName])
elif category[0:9] == "complex16":
l += 1
sing_complex16.append(routineName)
wr_complex16.writerow([l, routineName[0], routineName[1:-2], "http://www.netlib.org/lapack/lapack_routine/"+routineName])
else:
k += 1
sing_complex.append(routineName)
wr_complex.writerow([k, routineName[0], routineName[1:-2], "http://www.netlib.org/lapack/lapack_routine/"+routineName])
f.close()
print "Singular value decomposition (SVD) single: ", len(sing_single)
print "Singular value decomposition (SVD) double: ", len(sing_double)
print "Singular value decomposition (SVD) complex: ", len(sing_ | complex)
print "Singular valu | e decomposition (SVD) complex16: ", len(sing_complex16)
print "Singular value decomposition (SVD) auxiliary: ", len(sing_aux)
print "total time: ", time()-start
|
EmanueleCannizzaro/scons | test/Platform.py | Python | mit | 2,185 | 0.004119 | #!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTI | ON
# OF CONTRACT, TORT | OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/Platform.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
env = Environment()
Platform('cygwin')(env)
print "'%s'" % env['PROGSUFFIX']
assert env['SHELL'] == 'sh'
Platform('os2')(env)
print "'%s'" % env['PROGSUFFIX']
env.Platform('posix')
print "'%s'" % env['PROGSUFFIX']
Platform('win32')(env)
print "'%s'" % env['PROGSUFFIX']
SConscript('SConscript')
""")
test.write('SConscript', """
env = Environment()
Platform('cygwin')(env)
print "'%s'" % env['LIBSUFFIX']
Platform('os2')(env)
print "'%s'" % env['LIBSUFFIX']
env.Platform('posix')
print "'%s'" % env['LIBSUFFIX']
Platform('win32')(env)
print "'%s'" % env['LIBSUFFIX']
""")
expect = test.wrap_stdout(read_str = """'.exe'
'.exe'
''
'.exe'
'.a'
'.lib'
'.a'
'.lib'
""", build_str = "scons: `.' is up to date.\n")
test.run(arguments = ".", stdout = expect)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
paultag/aiodocker | aiodocker/constants.py | Python | mit | 29 | 0 | ST | REAM_HEADER_SIZ | E_BYTES = 8
|
lkishline/expyfun | expyfun/_eyelink_controller.py | Python | bsd-3-clause | 28,789 | 0.000035 | """Tools for controlling eyelink communication"""
# Authors: Eric Larson <larsoner@uw.edu>
# Dan McCloy <drmccloy@uw.edu>
#
# License: BSD (3-clause)
import numpy as np
import datetime
from distutils.version import LooseVersion
import os
from os import path as op
import sys
import subprocess
import time
from .visual import FixationDot, Circle, RawImage, Line, Text
from ._utils import get_config, verbose_dec, logger, string_types
# Constants
TRIAL_OK = 0
IN_RECORD_MODE = 4
CR_HAIR_COLOR = 1
PUPIL_HAIR_COLOR = 2
PUPIL_BOX_COLOR = 3
SEARCH_LIMIT_BOX_COLOR = 4
MOUSE_CURSOR_COLOR = 5
def dummy_fun(*args, **kwargs):
"""A dummy function used by EL dummy mode"""
return TRIAL_OK
# don't prevent basic functionality for folks who don't use EL
try:
import pylink
cal_super_class = pylink.EyeLinkCustomDisplay
openGraphicsEx = pylink.openGraphicsEx
except ImportError:
pylink = None
cal_super_class = object
openGraphicsEx = dummy_fun
eye_list = ['LEFT_EYE', 'RIGHT_EYE', 'BINOCULAR'] # Used by eyeAvailable
def _get_key_trans_dict():
"""Helper to translate pyglet keys to pylink codes"""
from pyglet.window import key
key_trans_dict = {str(key.F1): pylink.F1_KEY,
str(key.F2): pylink.F2_KEY,
str(key.F3): pylink.F3_KEY,
str(key.F4): pylink.F4_KEY,
str(key.F5): pylink.F5_KEY,
str(key.F6): pylink.F6_KEY,
str(key.F7): pylink.F7_KEY,
str(key.F8): pylink.F8_KEY,
str(key.F9): pylink.F9_KEY,
str(key.F10): pylink.F10_KEY,
str(key.PAGEUP): pylink.PAGE_UP,
str(key.PAGEDOWN): pylink.PAGE_DOWN,
str(key.UP): pylink.CURS_UP,
str(key.DOWN): pylink.CURS_DOWN,
str(key.LEFT): pylink.CURS_LEFT,
str(key.RIGHT): pylink.CURS_RIGHT,
str(key.BACKSPACE): '\b',
str(key.RETURN): pylink.ENTER_KEY,
str(key.ESCAPE): pylink.ESC_KEY,
str(key.NUM_ADD): key.PLUS,
str(key.NUM_SUBTRACT): key.MINUS,
}
return key_trans_dict
def _get_color_dict():
"""Helper to translate pylink colors to pyglet"""
color_dict = {str(CR_HAIR_COLOR): (1.0, 1.0, 1.0),
str(PUPIL_HAIR_COLOR): (1.0, 1.0, 1.0),
str(PUPIL_BOX_COLOR): (0.0, 1.0, 0.0),
str(SEARCH_LIMIT_BOX_COLOR): (1.0, 0.0, 0.0),
str(MOUSE_CURSOR_COLOR): (1.0, 0.0, 0.0)}
return color_dict
def _check(val, msg, out='error'):
"""Helper to check output"""
if val != TRIAL_OK:
msg = msg.format(val)
if out == 'warn':
logger.warn(msg)
else:
raise RuntimeError()
_dummy_names = [
'setSaccadeVelocityThreshold', 'setAccelerationThreshold',
'setUpdateInterval', 'setFixationUpdateAccumulate', 'setFileEventFilter',
'setLinkEventFilter', 'setFileSampleFilter', 'setLinkSampleFilter',
'setPupilSizeDiameter', 'setAcceptTargetFixationButton',
'openDataFile', 'startRecording', 'waitForModeReady',
'isRecording', 'stopRecording', 'closeDataFile', 'doTrackerSetup',
'receiveDataFile', 'close', 'eyeAvailable', 'sendCommand',
]
class DummyEl(object):
def __init__(self):
for name in _dummy_names:
setattr(self, name, dummy_fun)
self.getTrackerVersion = lambda: 'Dummy'
self.getDummyMode = lambda: True
self.getCurrentMode = lambda: IN_RECORD_MODE
self.waitForBlockStart = lambda a, b, c: 1
def sendMessage(self, msg):
if not isinstance(msg, string_types):
raise TypeError('msg must be str')
return TRIAL_OK
class EyelinkController(object):
"""Eyelink communi | cation and control methods
Parameters
----------
ec : instance of ExperimentController | None
ExperimentContr | oller instance to interface with. Necessary for
doing calibrations.
link : str | None
If 'default', the default value will be read from EXPYFUN_EYELINK.
If None, dummy (simulation) mode will be used. If str, should be
the network location of eyelink (e.g., "100.1.1.1").
fs : int
Sample rate to use. Must be one of [250, 500, 1000, 2000].
verbose : bool, str, int, or None
If not None, override default verbose level (see expyfun.verbose).
Returns
-------
el_controller : instance of EyelinkController
The Eyelink control interface.
Notes
-----
The data will be saved to the ExperimentController ``output_dir``.
If this was `None`, data will be saved to the current working dir.
"""
@verbose_dec
def __init__(self, ec, link='default', fs=1000, verbose=None):
if link == 'default':
link = get_config('EXPYFUN_EYELINK', None)
if link is not None and pylink is None:
raise ImportError('Could not import pylink, please ensure it '
'is installed correctly to use the EyeLink')
valid_fs = (250, 500, 1000, 2000)
if fs not in valid_fs:
raise ValueError('fs must be one of {0}'.format(list(valid_fs)))
output_dir = ec._output_dir
if output_dir is None:
output_dir = os.getcwd()
if not isinstance(output_dir, string_types):
raise TypeError('output_dir must be a string')
if not op.isdir(output_dir):
os.mkdir(output_dir)
self._output_dir = output_dir
self._ec = ec
if 'el_id' in self._ec._id_call_dict:
raise RuntimeError('Cannot use initialize EL twice')
logger.info('EyeLink: Initializing on {}'.format(link))
ec.flush()
if link is not None:
iswin = (sys.platform == 'win32')
cmd = 'ping -n 1 -w 100' if iswin else 'fping -c 1 -t100'
cmd = subprocess.Popen('%s %s' % (cmd, link),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if cmd.returncode:
raise RuntimeError('could not connect to Eyelink @ %s, '
'is it turned on?' % link)
self._eyelink = DummyEl() if link is None else pylink.EyeLink(link)
self._file_list = []
self._size = np.array(self._ec.window_size_pix)
self._ec._extra_cleanup_fun += [self._close]
self._ec.flush()
self._setup(fs)
self._ec._id_call_dict['el_id'] = self._stamp_trial_id
self._ec._ofp_critical_funs.append(self._stamp_trial_start)
self._ec._on_trial_ok.append(self._stamp_trial_ok)
self._fake_calibration = False # Only used for testing
self._closed = False # to prevent double-closing
self._current_open_file = None
logger.debug('EyeLink: Setup complete')
self._ec.flush()
def _setup(self, fs=1000):
"""Start up Eyelink
Executes automatically on init, and needs to be run after
el_save() if further eye tracking is desired.
Parameters
----------
fs : int
The sample rate to use.
"""
# map the gaze positions from the tracker to screen pixel positions
res = self._size
res_str = '0 0 {0} {1}'.format(res[0] - 1, res[1] - 1)
logger.debug('EyeLink: Setting display coordinates and saccade levels')
self._command('screen_pixel_coords = ' + res_str)
self._message('DISPLAY_COORDS ' + res_str)
# set calibration parameters
self.custom_calibration()
# set parser (conservative saccade thresholds)
self._eyelink.setSaccadeVelocityThreshold(35)
self._eyelink.setAccelerationThreshold(9500)
self._eyelink.setUpdateInterval(50)
self._eyelink.setFixationUpdateAccumulate(50)
self._command('sample_rate = {0}'.format(fs))
# retrieve tracker version and tracke |
jileiwang/CJ-Glo | tools/distance.py | Python | apache-2.0 | 2,385 | 0.002096 | import argparse
import numpy as np
import sys
def generate():
parser = argparse.ArgumentParser()
parser.add_argument('--vocab_file', default='vocab.txt', type=str)
parser.add_argument('--vectors_file', default='vectors.txt', type=str)
args = parser.parse_args()
with open(args.vocab_file, 'r') as f:
words = [x.rstrip().split(' ')[0] for x in f.readlines()]
with open(args.vectors_file, 'r') as f:
vectors = {}
for line in f:
vals = line.rstrip().split(' ')
vectors[vals[0]] = [float(x) for x in vals[1:]]
vocab_size = len(words)
vocab = {w: idx for idx, w in enumerate(words)}
ivocab = {idx: w for idx, w in enumerate(words)}
vector_dim = len(vectors[ivocab[0]])
W = np.zeros((vocab_size, vector_dim))
for word, v in vectors.items():
if word == '<unk>':
continue
W[vocab[word], :] = v
# normalize each word vector to unit variance
W_norm = np.zeros(W.shape)
d = (np.sum(W ** 2, 1) ** (0.5))
W_norm = (W.T / d).T
return (W_norm, vocab, ivocab)
def distance(W, vocab, ivocab, input_term):
for idx, term in enumerate(input_term.split(' ')):
if term in vocab:
print('Word: %s Position in vocabulary: %i' % (term, vocab[term]))
if idx == 0:
vec_result = np.copy(W[vocab[term], :])
else:
vec_result += W[vocab[term], :]
else:
print('Word: %s Out of dictionary!\n' % term)
return
vec_norm = np.zeros(vec_result.shape)
d = (np.sum(vec_result ** 2,) ** (0.5))
vec_norm = (vec_result.T / d).T
dist = np.dot(W, vec_norm.T)
for term in input_term.split(' '):
index = vocab[term]
dist[index] = -np.Inf
a = np.argsort(-dist)[:N]
print("\n Word Cosine distance\n")
print("-------------------------------------------- | -------------\n")
for x in a:
print("%35s\t\t%f" % (ivocab[x], dist[x]))
if __name__ == "__main__":
N = 20; # number of closest words that will be shown
W, vocab, ivocab = generate()
while True:
input_term = raw_input("\nEnter word or sentence (EXIT to break): ")
if input_term == 'EXIT':
break
else:
distance(W, | vocab, ivocab, input_term)
|
Arlefreak/MaloBarba | storeApi/migrations/0013_productimages.py | Python | mit | 1,161 | 0.005168 | # -*- coding: utf-8 -* | -
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('storeApi', '0012_product_tags'),
]
| operations = [
migrations.CreateModel(
name='ProductImages',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'', max_length=140, verbose_name=b'Name')),
('image', models.ImageField(upload_to=b'', verbose_name=b'Image')),
('order', models.IntegerField(default=0, verbose_name=b'Order')),
('date', models.DateField(auto_now_add=True, verbose_name=b'Date added')),
('updated', models.DateField(auto_now=True, verbose_name=b'Date updated')),
('product', models.ForeignKey(to='storeApi.Product')),
],
options={
'ordering': ['order', 'date'],
'verbose_name': 'image',
'verbose_name_plural': 'images',
},
),
]
|
fparma/events | website/errorhandler.py | Python | mit | 363 | 0 | from flask import render_template
from website import app
@app.errorhandler(403)
def not_authorized(path):
return | render_template('status/403.html'), 403
@app.errorhandler(404)
def page_not_found(path):
return render_template('status | /404.html'), 404
@app.errorhandler(410)
def resource_gone(path):
return render_template('status/410.html'), 410
|
tuskar/tuskar-ui | openstack_dashboard/dashboards/admin/info/tests.py | Python | apache-2.0 | 3,281 | 0.003657 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox import IsA
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:admin:info:index')
class ServicesViewTests(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('service_list',)})
def test_index(self):
self.mox.StubOutWithMock(api.nova, 'default_quota_get')
self.mox.StubOutWithMock(api.cinder, 'default_quota_get')
api.nova.default_quota_get(IsA(http.HttpRequest),
self.tenant.id).AndReturn(self.quotas.nova)
api.cinder.default_quota_get(IsA(http.HttpRequest), self.tenant.id) \
.AndReturn(self.cinder_quotas.first())
services = self.services.list()
api.nova.service_list(IsA(http.HttpRequest)).AndReturn(services)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'admin/info/index.html')
services_tab = res.context['tab_group'].get_tab('services')
self.assertQuerysetEqual(services_tab._tables['services'].data,
['<Service: compute>',
'<Service: volume>',
'<Service: image>',
'<Service: identity (native backend)>',
'<Service: object-store>',
'<Service: network>',
'<Service: ec2>',
'<Service: orchestration>'])
quotas_tab = res.context['tab_group'].get_tab('quotas')
self.assertQuerysetEqual(quotas_tab._tables['quotas'].data,
['<Quota | : (injected_file_content_bytes, 1)>',
'<Quota: (metadata_items, 1)>',
'<Quota: (injected_files, 1)>',
'<Quota: (gigabytes, 1000)>',
'<Quota: (ram, 10000)>',
| '<Quota: (floating_ips, 1)>',
'<Quota: (fixed_ips, 10)>',
'<Quota: (instances, 10)>',
'<Quota: (snapshots, 1)>',
'<Quota: (volumes, 1)>',
'<Quota: (cores, 10)>',
'<Quota: (security_groups, 10)>',
'<Quota: (security_group_rules, 20)>'],
ordered=False)
|
yosi-dediashvili/SubiT | tests/api/providers/torec/test_hamster.py | Python | gpl-3.0 | 1,257 | 0.005569 | import sys
sys.path.append("..\\..")
import os
import time
from api.providers.torec.hamster import TorecHashCodesHamster
from api.requestsmanager import RequestsManager
import unittest
class TestTorecHashCodeHamster(unittest.TestCase):
def setUp(self):
self.hamster = TorecHashCodesHamster(RequestsManager())
def test_remove_after_max_time_passed(self):
self.hamster.add_sub_id("23703")
self.hamster.add_sub_id("2638")
self.assertEquals(len(self.hamster._records), 2)
time.sleep(10)
self. | assertEquals(len(self.hamster._records), 2)
time.sleep(120)
self.assertEquals(len(self.hamster._records), 0)
def test_remove_after_after_request(self):
self.hamster.add_sub_id("23703")
self.hamster.add_sub_id("2638")
self.assertEquals(len(self.hamster._records), 2)
self.hamster.remove_sub_id("2638")
self.assertEquals( | len(self.hamster._records), 1)
self.assertEquals(self.hamster._records.keys()[0], "23703")
def run_tests():
test_runner = unittest.TextTestRunner(verbosity=0)
tests = unittest.defaultTestLoader.loadTestsFromTestCase(
TestTorecHashCodeHamster)
test_runner.run(tests) |
michelle/sink | 164/tml.py | Python | mit | 733 | 0.004093 | """Functions for TML layout that are used in the gram | mar to construct DOM-like
node objects used in the 164 layout engine.
"""
def createNode(name, attributes=None, children=None):
"""Creates a DOM-like node obje | ct, using the 164 representation so that
the node can be processed by the 164 layout engine.
"""
node = dict(attributes)
node['name'] = name
# Represent the list of child nodes as a dict with numeric keys.
node['children'] = dict(enumerate(children)) if children else {}
return node
def createWordNodes(text):
"""Returns a Python list of DOM-like nodes, one for each word in the given
text.
"""
return [createNode('Word', {'word': word + ' '}) for word in text.split()]
|
paineliu/tflearn | rnn02.py | Python | apache-2.0 | 2,697 | 0.00482 | # coding = utf-8
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as | tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MN | IST_data/", one_hot=True)
tf.set_random_seed(1)
np.random.seed(1)
# Hyper Parameters
BATCH_SIZE = 128
TIME_STEP = 28 # rnn time step / image height
INPUT_SIZE = 28 # rnn input size / image width
LR = 0.01 # learning rate
# data
mnist = input_data.read_data_sets('./mnist', one_hot=True) # they has been normalized to range (0,1)
test_x = mnist.test.images[:2000]
test_y = mnist.test.labels[:2000]
# plot one example
print(mnist.train.images.shape) # (55000, 28 * 28)
print(mnist.train.labels.shape) # (55000, 10)
# tensorflow placeholders
tf_x = tf.placeholder(tf.float32, [None, TIME_STEP * INPUT_SIZE]) # shape(batch, 784)
image = tf.reshape(tf_x, [-1, TIME_STEP, INPUT_SIZE]) # (batch, height, width, channel)
tf_y = tf.placeholder(tf.int32, [None, 10]) # input y
# RNN
rnn_cell = tf.contrib.rnn.BasicLSTMCell(num_units=BATCH_SIZE)
outputs, (h_c, h_n) = tf.nn.dynamic_rnn(
rnn_cell, # cell you have chosen
image, # input
initial_state=None, # the initial hidden state
dtype=tf.float32, # must given if set initial_state = None
time_major=False, # False: (batch, time step, input); True: (time step, batch, input)
)
output = tf.layers.dense(outputs[:, -1, :], 10) # output based on the last output step
loss = tf.losses.softmax_cross_entropy(onehot_labels=tf_y, logits=output) # compute cost
train_op = tf.train.AdamOptimizer(LR).minimize(loss)
accuracy = tf.metrics.accuracy( # return (acc, update_op), and create 2 local variables
labels=tf.argmax(tf_y, axis=1), predictions=tf.argmax(output, axis=1))[1]
sess = tf.Session()
init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer()) # the local var is for accuracy_op
sess.run(init_op) # initialize var in graph
for step in range(1200): # training
b_x, b_y = mnist.train.next_batch(BATCH_SIZE)
_, loss_ = sess.run([train_op, loss], {tf_x: b_x, tf_y: b_y})
if step % 50 == 0: # testing
accuracy_ = sess.run(accuracy, {tf_x: test_x, tf_y: test_y})
print('train loss: %.4f' % loss_, '| test accuracy: %.2f' % accuracy_)
# print 10 predictions from test data
test_output = sess.run(output, {tf_x: test_x[:10]})
pred_y = np.argmax(test_output, 1)
print(pred_y, 'prediction number')
print(np.argmax(test_y[:10], 1), 'real number')
|
uclouvain/osis_louvain | base/forms/search/search_tutor.py | Python | agpl-3.0 | 1,699 | 0.001178 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hop | e that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/lice | nses/.
#
##############################################################################
from django import forms
from django.utils.translation import ugettext_lazy as _
from base.forms.search.search_form import BaseSearchForm
from base.models import tutor
class TutorSearchForm(BaseSearchForm):
name = forms.CharField(max_length=40,
label=_("name"))
def search(self):
return tutor.search(**self.cleaned_data).order_by("person__last_name", "person__first_name")
|
alexforencich/verilog-ethernet | tb/test_ip_demux_64_4.py | Python | mit | 25,346 | 0.00075 | #!/usr/bin/env python
"""
Copyright (c) 2014-2018 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from | myhdl import *
import os
import ip_ep
module = 'ip_demux'
testbench = 'test_%s_64_4' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Parameters
M_COUNT = 4
DATA_WIDTH = 64
KEEP_ENABLE = (DATA_WIDTH>8)
KEEP_WIDTH = (DATA_WIDTH/8)
ID_ENABLE = 1
ID_WIDTH = 8
DEST_ENABLE = 1
DEST_WIDTH = 8
USER_ENA | BLE = 1
USER_WIDTH = 1
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
s_ip_hdr_valid = Signal(bool(0))
s_eth_dest_mac = Signal(intbv(0)[48:])
s_eth_src_mac = Signal(intbv(0)[48:])
s_eth_type = Signal(intbv(0)[16:])
s_ip_version = Signal(intbv(0)[4:])
s_ip_ihl = Signal(intbv(0)[4:])
s_ip_dscp = Signal(intbv(0)[6:])
s_ip_ecn = Signal(intbv(0)[2:])
s_ip_length = Signal(intbv(0)[16:])
s_ip_identification = Signal(intbv(0)[16:])
s_ip_flags = Signal(intbv(0)[3:])
s_ip_fragment_offset = Signal(intbv(0)[13:])
s_ip_ttl = Signal(intbv(0)[8:])
s_ip_protocol = Signal(intbv(0)[8:])
s_ip_header_checksum = Signal(intbv(0)[16:])
s_ip_source_ip = Signal(intbv(0)[32:])
s_ip_dest_ip = Signal(intbv(0)[32:])
s_ip_payload_axis_tdata = Signal(intbv(0)[DATA_WIDTH:])
s_ip_payload_axis_tkeep = Signal(intbv(1)[KEEP_WIDTH:])
s_ip_payload_axis_tvalid = Signal(bool(0))
s_ip_payload_axis_tlast = Signal(bool(0))
s_ip_payload_axis_tid = Signal(intbv(0)[ID_WIDTH:])
s_ip_payload_axis_tdest = Signal(intbv(0)[DEST_WIDTH:])
s_ip_payload_axis_tuser = Signal(intbv(0)[USER_WIDTH:])
m_ip_hdr_ready_list = [Signal(bool(0)) for i in range(M_COUNT)]
m_ip_payload_axis_tready_list = [Signal(bool(0)) for i in range(M_COUNT)]
m_ip_hdr_ready = ConcatSignal(*reversed(m_ip_hdr_ready_list))
m_ip_payload_axis_tready = ConcatSignal(*reversed(m_ip_payload_axis_tready_list))
enable = Signal(bool(0))
drop = Signal(bool(0))
select = Signal(intbv(0)[2:])
# Outputs
s_ip_hdr_ready = Signal(bool(0))
s_ip_payload_axis_tready = Signal(bool(0))
m_ip_hdr_valid = Signal(intbv(0)[M_COUNT:])
m_eth_dest_mac = Signal(intbv(0)[M_COUNT*48:])
m_eth_src_mac = Signal(intbv(0)[M_COUNT*48:])
m_eth_type = Signal(intbv(0)[M_COUNT*16:])
m_ip_version = Signal(intbv(0)[M_COUNT*4:])
m_ip_ihl = Signal(intbv(0)[M_COUNT*4:])
m_ip_dscp = Signal(intbv(0)[M_COUNT*6:])
m_ip_ecn = Signal(intbv(0)[M_COUNT*2:])
m_ip_length = Signal(intbv(0)[M_COUNT*16:])
m_ip_identification = Signal(intbv(0)[M_COUNT*16:])
m_ip_flags = Signal(intbv(0)[M_COUNT*3:])
m_ip_fragment_offset = Signal(intbv(0)[M_COUNT*13:])
m_ip_ttl = Signal(intbv(0)[M_COUNT*8:])
m_ip_protocol = Signal(intbv(0)[M_COUNT*8:])
m_ip_header_checksum = Signal(intbv(0)[M_COUNT*16:])
m_ip_source_ip = Signal(intbv(0)[M_COUNT*32:])
m_ip_dest_ip = Signal(intbv(0)[M_COUNT*32:])
m_ip_payload_axis_tdata = Signal(intbv(0)[M_COUNT*DATA_WIDTH:])
m_ip_payload_axis_tkeep = Signal(intbv(0xf)[M_COUNT*KEEP_WIDTH:])
m_ip_payload_axis_tvalid = Signal(intbv(0)[M_COUNT:])
m_ip_payload_axis_tlast = Signal(intbv(0)[M_COUNT:])
m_ip_payload_axis_tid = Signal(intbv(0)[M_COUNT*ID_WIDTH:])
m_ip_payload_axis_tdest = Signal(intbv(0)[M_COUNT*DEST_WIDTH:])
m_ip_payload_axis_tuser = Signal(intbv(0)[M_COUNT*USER_WIDTH:])
m_ip_hdr_valid_list = [m_ip_hdr_valid(i) for i in range(M_COUNT)]
m_eth_dest_mac_list = [m_eth_dest_mac((i+1)*48, i*48) for i in range(M_COUNT)]
m_eth_src_mac_list = [m_eth_src_mac((i+1)*48, i*48) for i in range(M_COUNT)]
m_eth_type_list = [m_eth_type((i+1)*16, i*16) for i in range(M_COUNT)]
m_ip_version_list = [m_ip_version((i+1)*4, i*4) for i in range(M_COUNT)]
m_ip_ihl_list = [m_ip_ihl((i+1)*4, i*4) for i in range(M_COUNT)]
m_ip_dscp_list = [m_ip_dscp((i+1)*6, i*6) for i in range(M_COUNT)]
m_ip_ecn_list = [m_ip_ecn((i+1)*2, i*2) for i in range(M_COUNT)]
m_ip_length_list = [m_ip_length((i+1)*16, i*16) for i in range(M_COUNT)]
m_ip_identification_list = [m_ip_identification((i+1)*16, i*16) for i in range(M_COUNT)]
m_ip_flags_list = [m_ip_flags((i+1)*3, i*3) for i in range(M_COUNT)]
m_ip_fragment_offset_list = [m_ip_fragment_offset((i+1)*13, i*13) for i in range(M_COUNT)]
m_ip_ttl_list = [m_ip_ttl((i+1)*8, i*8) for i in range(M_COUNT)]
m_ip_protocol_list = [m_ip_protocol((i+1)*8, i*8) for i in range(M_COUNT)]
m_ip_header_checksum_list = [m_ip_header_checksum((i+1)*16, i*16) for i in range(M_COUNT)]
m_ip_source_ip_list = [m_ip_source_ip((i+1)*32, i*32) for i in range(M_COUNT)]
m_ip_dest_ip_list = [m_ip_dest_ip((i+1)*32, i*32) for i in range(M_COUNT)]
m_ip_payload_axis_tdata_list = [m_ip_payload_axis_tdata((i+1)*DATA_WIDTH, i*DATA_WIDTH) for i in range(M_COUNT)]
m_ip_payload_axis_tkeep_list = [m_ip_payload_axis_tkeep((i+1)*KEEP_WIDTH, i*KEEP_WIDTH) for i in range(M_COUNT)]
m_ip_payload_axis_tvalid_list = [m_ip_payload_axis_tvalid(i) for i in range(M_COUNT)]
m_ip_payload_axis_tlast_list = [m_ip_payload_axis_tlast(i) for i in range(M_COUNT)]
m_ip_payload_axis_tid_list = [m_ip_payload_axis_tid((i+1)*ID_WIDTH, i*ID_WIDTH) for i in range(M_COUNT)]
m_ip_payload_axis_tdest_list = [m_ip_payload_axis_tdest((i+1)*DEST_WIDTH, i*DEST_WIDTH) for i in range(M_COUNT)]
m_ip_payload_axis_tuser_list = [m_ip_payload_axis_tuser((i+1)*USER_WIDTH, i*USER_WIDTH) for i in range(M_COUNT)]
# sources and sinks
source_pause = Signal(bool(0))
sink_pause_list = []
sink_list = []
sink_logic_list = []
source = ip_ep.IPFrameSource()
source_logic = source.create_logic(
clk,
rst,
ip_hdr_ready=s_ip_hdr_ready,
ip_hdr_valid=s_ip_hdr_valid,
eth_dest_mac=s_eth_dest_mac,
eth_src_mac=s_eth_src_mac,
eth_type=s_eth_type,
ip_version=s_ip_version,
ip_ihl=s_ip_ihl,
ip_dscp=s_ip_dscp,
ip_ecn=s_ip_ecn,
ip_length=s_ip_length,
ip_identification=s_ip_identification,
ip_flags=s_ip_flags,
ip_fragment_offset=s_ip_fragment_offset,
ip_ttl=s_ip_ttl,
ip_protocol=s_ip_protocol,
ip_header_checksum=s_ip_header_checksum,
ip_source_ip=s_ip_source_ip,
ip_dest_ip=s_ip_dest_ip,
ip_payload_tdata=s_ip_payload_axis_tdata,
ip_payload_tkeep=s_ip_payload_axis_tkeep,
ip_payload_tvalid=s_ip_payload_axis_tvalid,
ip_payload_tready=s_ip_payload_axis_tready,
ip_payload_tlast=s_ip_payload_axis_tlast,
ip_payload_tuser=s_ip_payload_axis_tuser,
pause=source_pause,
name='source'
)
for k in range(M_COUNT):
s = ip_ep.IPFrameSink()
p = Signal(bool(0))
sink_list.append(s)
sink_pause_list.append(p)
sink_logic_list.append(s.create_logic(
clk,
rst,
ip_hdr_ready=m_ip_hdr_ready_list[k],
ip_hdr_va |
jcamachor/hive | ql/src/gen/thrift/gen-py/queryplan/ttypes.py | Python | apache-2.0 | 43,635 | 0.002177 | #
# Autogenerated by Thrift Compiler (0.14.1)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
from thrift.transport import TTransport
all_structs = []
class AdjacencyType(object):
CONJUNCTIVE = 0
DISJUNCTIVE = 1
_VALUES_TO_NAMES = {
0: "CONJUNCTIVE",
1: "DISJUNCTIVE",
}
_NAMES_TO_VALUES = {
"CONJUNCTIVE": 0,
"DISJUNCTIVE": 1,
}
class NodeType(object):
OPERATOR = 0
STAGE = 1
_VALUES_TO_NAMES = {
0: "OPERATOR",
1: "STAGE",
}
_NAMES_TO_VALUES = {
"OPERATOR": 0,
"STAGE": 1,
}
class OperatorType(object):
JOIN = 0
MAPJOIN = 1
EXTRACT = 2
FILTER = 3
FORWARD = 4
GROUPBY = 5
LIMIT = 6
SCRIPT = 7
SELECT = 8
TABLESCAN = 9
FILESINK = 10
REDUCESINK = 11
UNION = 12
UDTF = 13
LATERALVIEWJOIN = 14
LATERALVIEWFORWARD = 15
HASHTABLESINK = 16
HASHTABLEDUMMY = 17
PTF = 18
MUX = 19
DEMUX = 20
EVENT = 21
ORCFILEMERGE = 22
RCFILEMERGE = 23
MERGEJOIN = 24
SPARKPRUNINGSINK = 25
TOPNKEY = 26
_VALUES_TO_NAMES = {
0: "JOIN",
1: "MAPJOIN",
2: "EXTRACT",
3: "FILTER",
4: "FORWARD",
5: "GROUPBY",
6: "LIMIT",
7: "SCRIPT",
8: "SELECT",
9: "TABLESCAN",
10: "FILESINK",
11: "REDUCESINK",
12: "UNION",
13: "UDTF",
14: "LATERALVIEWJOIN",
15: "LATERALVIEWFORWARD",
16: "HASHTABLESINK",
17: "HASHTABLEDUMMY",
18: "PTF",
19: "MUX",
20: "DEMUX",
21: "EVENT",
22: "ORCFILEMERGE",
23: "RCFILEMERGE",
24: "MERGEJOIN",
25: "SPARKPRUNINGSINK",
26: "TOPNKEY",
}
_NAMES_TO_VALUES = {
"JOIN": 0,
"MAPJOIN": 1,
"EXTRACT": 2,
"FILTER": 3,
"FORWARD": 4,
"GROUPBY": 5,
"LIMIT": 6,
"SCRIPT": 7,
"SELECT": 8,
"TABLESCAN": 9,
"FILESINK": 10,
"REDUCESINK": 11,
"UNION": 12,
"UDTF": 13,
"LATERALVIEWJOIN": 14,
"LATERALVIEWFORWARD": 15,
"HASHTABLESINK": 16,
"HASHTABLEDUMMY": 17,
"PTF": 18,
"MUX": 19,
"DEMUX": 20,
"EVENT": 21,
"ORCFILEMERGE": 22,
"RCFILEMERGE": 23,
"MERGEJOIN": 24,
"SPARKPRUNINGSINK": 25,
"TOPNKEY": 26,
}
class TaskType(object):
MAP = 0
REDUCE = 1
OTHER = 2
_VALUES_TO_NAMES = {
0: "MAP",
1: "REDUCE",
2: "OTHER",
}
_NAMES_TO_VALUES = {
"MAP": 0,
"REDUCE": 1,
"OTHER": 2,
}
class StageType(object):
CONDITIONAL = 0
COPY = 1
DDL = 2
MAPRED = 3
EXPLAIN = 4
FETCH = 5
FUNC = 6
MAPREDLOCAL = 7
MOVE = 8
STATS = 9
DEPENDENCY_COLLECTION = 10
COLUMNSTATS = 11
REPL_DUMP = 12
REPL_BOOTSTRAP_LOAD = 13
REPL_STATE_LOG = 14
REPL_TXN = 15
REPL_INCREMENTAL_LOAD = 16
SCHEDULED_QUERY_MAINT = 17
ACK = 18
RANGER_DUMP = 19
RANGER_LOAD = 20
ATLAS_DUMP = 21
ATLAS_LOAD = 22
_VALUES_TO_NAMES = {
0: "CONDITIONAL",
1: "COPY",
2: "DDL",
3: "MAPRED",
4: "EXPLAIN",
5: "FETCH",
6: "FUNC",
7: "MAPREDLOCAL",
8: "MOVE",
9: "STATS",
10: "DEPENDENCY_COLLECTION",
11: "COLUMNSTATS",
12: "REPL_DUMP",
13: "REPL_BOOTSTRAP_LOAD",
14: "REPL_STATE_LOG",
15: "REPL_TXN",
16: "REPL_INCREMENTAL_LOAD",
17: "SCHEDULED_QUERY_MAINT",
18: "ACK",
19: "RANGER_DUMP",
20: "RANGER_LOAD",
21: "ATLAS_DUMP",
22: "ATLAS_LOAD",
}
_NAMES_TO_VALUES = {
"CONDITIONAL": 0,
"COPY": 1,
"DDL": 2,
"MAPRED": 3,
"EXPLAIN": 4,
"FETCH": 5,
"FUNC": 6,
"MAPREDLOCAL": 7,
"MOVE": 8,
"STATS": 9,
"DEPENDENCY_COLLECTION": 10,
"COLUMNSTATS": 11,
"REPL_DUMP": 12,
"REPL_BOOTSTRAP_LOAD": 13,
"REPL_STATE_LOG": 14,
"REPL_TXN": 15,
"REPL_INCREMENTAL_LOAD": 16,
"SCHEDULED_QUERY_MAINT": 17,
"ACK": 18,
"RANGER_DUMP": 19,
"RANGER_LOAD": 20,
"ATLAS_DUMP": 21,
"ATLAS_LOAD": 22,
}
class Adjacency(object):
"""
Attributes:
- node
- children
- adjacencyType
"""
def __init__(self, node=None, children=None, adjacencyType=None,):
self.node = node
self.children = children
self.adjacencyType = adjacencyType
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.node = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.children = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
self.children.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.adjacencyType = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Adjacency')
if self.node is not None:
oprot.writeFieldBegin('node', TType.STRING, 1)
oprot.writeString(self.node.encode('utf-8') if sys.version_info[0] == 2 else self.node)
oprot.writeFieldEnd()
if self.children is not None:
oprot.writeFieldBegin('children', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.children))
for iter6 in self.children:
oprot.writeString(iter6.encode('utf-8') if sys.version_info[0] == 2 else iter6)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.adjacencyType is not None:
oprot.writeField | Begin('adjacencyType', TType.I32, 3)
oprot.writeI32(self.adjacencyType)
oprot.writeFieldEnd()
oprot | .writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Graph(object):
"""
Attributes:
- nodeType
- roots
- adjacencyList
"""
def __init__(self, nodeType=None, roots=None, adjacencyList=None,):
self.nodeType = nodeType
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.