repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
edx/edx-notifications | testserver/forms.py | Python | agpl-3.0 | 1,437 | 0.007655 | import re
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^\w+$', widget=forms.TextInput(attrs=dict(required=True, max_length=30)), label=_("Username"), error_messages={ 'invalid': _("This value must contain only letters, numbers and underscores.") })
email = forms.EmailField(widget=forms.TextInput(attrs=dict(required=True, max_length=30)), label=_("Email address") | )
password1 = forms.CharField(widget=forms.PasswordInput(attrs=dict(required=True, max_length=30, re | nder_value=False)), label=_("Password"))
password2 = forms.CharField(widget=forms.PasswordInput(attrs=dict(required=True, max_length=30, render_value=False)), label=_("Password (again)"))
def clean_username(self):
try:
user = User.objects.get(username__iexact=self.cleaned_data['username'])
except User.DoesNotExist:
return self.cleaned_data['username']
raise forms.ValidationError(_("The username already exists. Please try another one."))
def clean(self):
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_("The two password fields did not match."))
return self.cleaned_data
|
c4sc/arividam | config/wsgi.py | Python | mit | 1,445 | 0 | """
WSGI config for arividam project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
o | s.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Ap | ply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
3dfxsoftware/cbss-addons | smile_access_control/res_group.py | Python | gpl-2.0 | 4,958 | 0.003025 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##################################################### | #########################
from osv import osv
class IrModel(osv.osv):
_inherit = 'ir.model'
def _get_first_level_relations(self, cr, uid, ids, context):
field_obj = self.pool.get('ir.model.fields')
field_ids = field_obj.search(cr, uid, [
('ttype', 'in', ('many2one', 'one2many', 'many2many')),
('model_id', 'in', ids),
], context=context)
if field_ids:
models = [field['relation'] for field in field_obj.read(cr, uid, field_id | s, ['relation'], context=None)]
return self.search(cr, uid, [('model', 'in', models)], context=context)
return []
def get_relations(self, cr, uid, ids, level=1, context=None):
"""
Return models linked to models given in params
If you don't want limit the relations level, indicate level = -1
"""
if isinstance(ids, (int, long)):
ids = [ids]
relation_ids, model_ids = list(ids), list(ids)
while model_ids and level:
model_ids = self._get_first_level_relations(cr, uid, model_ids, context)
model_ids = list(set(model_ids) - set(relation_ids))
relation_ids.extend(model_ids)
level -= 1
return list(set(relation_ids) - set(ids))
IrModel()
class IrModelAccess(osv.osv):
_inherit = 'ir.model.access'
def get_name(self, cr, uid, model_id, group_id=False):
model = self.pool.get('ir.model').read(cr, uid, model_id, ['model'])['model']
group = group_id and self.pool.get('res.groups').read(cr, uid, group_id, ['name'])['name'].lower() or 'all'
return '%s %s' % (model, group)
IrModelAccess()
class ResGroup(osv.osv):
_inherit = 'res.groups'
def button_complete_access_controls(self, cr, uid, ids, context=None):
"""Create access rules for the first level relation models of access rule models not only in readonly"""
context = context or {}
if isinstance(ids, (int, long)):
ids = [ids]
access_obj = self.pool.get('ir.model.access')
for group in self.browse(cr, uid, ids, context):
model_ids = [access_rule.model_id.id for access_rule in group.model_access
if access_rule.perm_write or access_rule.perm_create or access_rule.perm_unlink]
relation_model_ids = self.pool.get('ir.model').get_relations(cr, uid, model_ids, context.get('relations_level', 1), context)
for relation_model_id in relation_model_ids:
access_obj.create(cr, uid, {
'name': access_obj.get_name(cr, uid, relation_model_id, group.id),
'model_id': relation_model_id,
'group_id': group.id,
'perm_read': True,
'perm_write': False,
'perm_create': False,
'perm_unlink': False,
}, context)
return True
def _update_users(self, cr, uid, vals, context=None):
if vals.get('users'):
user_profile_ids = []
user_obj = self.pool.get('res.users')
for item in vals['users']:
user_ids = []
if item[0] == 6:
user_ids = item[2]
elif item[0] == 4:
user_ids = [item[1]]
for user in user_obj.read(cr, uid, user_ids, ['user_profile', 'user_profile_id'], context, '_classic_write'):
if user['user_profile']:
user_profile_ids.append(user['id'])
else:
user_profile_ids.append(user['user_profile_id'])
if user_profile_ids:
user_obj.write(cr, uid, list(set(user_profile_ids)), {}, context) # Update users linked to profiles
def write(self, cr, uid, ids, vals, context=None):
self._update_users(cr, uid, vals, context)
return super(ResGroup, self).write(cr, uid, ids, vals, context)
ResGroup()
|
landism/pants | src/python/pants/java/nailgun_client.py | Python | apache-2.0 | 7,580 | 0.010818 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import errno
import logging
import os
import signal
import socket
import sys
from pants.java.nailgun_io import NailgunStreamReader
from pants.java.nailgun_protocol import ChunkType, NailgunProtocol
from pants.util.socket import RecvBufferedSocket
logger = logging.getLogger(__name__)
class NailgunClientSession(NailgunProtocol):
"""Handles a single nailgun client session."""
def __init__(self, sock, in_fd, out_fd, err_fd, exit_on_broken_pipe=False):
self._sock = sock
self._input_reader = NailgunStreamReader(in_fd, self._sock) if in_fd else None
self._stdout = out_fd
self._stderr = err_fd
self._exit_on_broken_pipe = exit_on_broken_pipe
self.remote_pid = None
def _maybe_start_input_reader(self):
if self._input_reader:
self._input_reader.start()
def _maybe_stop_input_reader(self):
if self._input_reader:
self._input_reader.stop()
def _write_flush(self, fd, payload=None):
"""Write a payload to a given fd (if provided) and flush the fd."""
try:
if payload:
fd.write(payload)
fd.flush()
except (IOError, OSError) as e:
# If a `Broken Pipe` is encountered during a stdio fd write, we're headless - bail.
if e.errno == errno.EPIPE and self._exit_on_broken_pipe:
sys.exit()
# Otherwise, re-raise.
raise
def _process_session(self):
"""Process the outputs of the nailgun session."""
try:
for chunk_type, payload in self.iter_chunks(self._sock, return_bytes=True):
if chunk_type == ChunkType.STDOUT:
self._write_flush(self._stdout, payload)
elif chunk_type == ChunkType.STDERR:
self._write_flush(self._stderr, payload)
elif chunk_type == ChunkType.EXIT:
self._write_flush(self._stdout)
self._write_flush(self._stderr)
return int(payload)
elif chunk_type == ChunkType.PID:
self.remote_pid = int(payload)
elif chunk_type == ChunkType.START_READING_INPUT:
self._maybe_start_input_reader()
else:
raise self.ProtocolError('received unexpected chunk {} -> {}'.format(chunk_type, payload))
finally:
# Bad chunk types received from the server can throw NailgunProtocol.ProtocolError in
# NailgunProtocol.iter_chunks(). This ensures the NailgunStreamReader is always stopped.
self._maybe_stop_input_reader()
def execute(self, working_dir, main_class, *arguments, **environment):
# Send the nailgun request.
self.send_request(self._sock, working_dir, main_class, *arguments, **environment)
# Process the remainder of the nailgun session.
return self._process_session()
class NailgunClient(object):
"""A python nailgun client (see http://martiansoftware.com/nailgun for more info)."""
class NailgunError(Exception):
"""Indicates an error interacting with a nailgun server."""
class NailgunConnectionError(NailgunError):
"""Indicates an error upon initial connect to the nailgun server."""
# For backwards compatibility with nails expecting the ng c client special env vars.
ENV_DEFAULTS = dict(NAILGUN_FILESEPARATOR=os.sep, NAILGUN_PATHSEPARATOR=os.pathsep)
DEFAULT_NG_HOST = '127.0.0.1'
DEFAULT_NG_PORT = 2113
def __init__(self, host=DEFAULT_NG_HOST, port=DEFAULT_NG_PORT, ins=sys.stdin, out=None, err=None,
workdir=None, exit_on_broken_pipe=False):
"""Creates a nailgun client that can be used to issue zero or more nailgun commands.
:param string host: the nailgun server to contact (defaults to '127.0.0.1')
:param int port: the port the nailgun server is listening on (defaults to the default nailgun
port: 2113)
:param file ins: a file to read command standard input from (defaults to stdin) - can be None
in which case no input is read
:param file out: a stream to write command standard output to (defaults to stdout)
:param file err: a stream to write command standard error to (defaults to stderr)
:param string workdir: the default working directory for all nailgun commands (defaults to CWD)
:param bool exit_on_broken_pipe: whether or not to exit when `Broken Pipe` errors are encountered.
"""
self._host = host
self._port = port
self._stdin = ins
self._stdout = out or sys.stdout
self._stderr = err or sys.stderr
self._workdir = workdir or os.path.abspath(os.path.curdir)
self._exit_on_broken_pipe = exit_on_broken_pipe
self._session = None
def try_connect(self):
"""Creates a socket, connects it to the nailgun and returns the connected socket.
:returns: a connected `socket.socket`.
:raises: `NailgunClient.NailgunConnectionError` on failure to connect.
"""
sock = RecvBufferedSocket(socket.socket(socket.AF_INET, socket.SOCK_STREAM))
try:
sock.connect((self._host, self._port))
except (socket.error, socket.gaierror) as e:
logger.debug('Encountered socket exception {!r} when attempting connect to nailgun'.format(e))
sock.close()
raise self.NailgunConnectionError(
'Problem connecting to nailgun server at {}:{}: {!r}'.format(self._host, self._port, e))
else:
return sock
def send_control_c(self):
"""Sends SIGINT to a nailgun server using pid information from the active session."""
if self._session and sel | f._session.remote_pid is not None:
os.kill(self._session.remote_pid, signal.SIGINT)
def exec | ute(self, main_class, cwd=None, *args, **environment):
"""Executes the given main_class with any supplied args in the given environment.
:param string main_class: the fully qualified class name of the main entrypoint
:param string cwd: Set the working directory for this command
:param list args: any arguments to pass to the main entrypoint
:param dict environment: an env mapping made available to native nails via the nail context
:returns: the exit code of the main_class.
"""
environment = dict(self.ENV_DEFAULTS.items() + environment.items())
cwd = cwd or self._workdir
# N.B. This can throw NailgunConnectionError (catchable via NailgunError).
sock = self.try_connect()
self._session = NailgunClientSession(sock,
self._stdin,
self._stdout,
self._stderr,
self._exit_on_broken_pipe)
try:
return self._session.execute(cwd, main_class, *args, **environment)
except socket.error as e:
raise self.NailgunError('Problem communicating with nailgun server at {}:{}: {!r}'
.format(self._host, self._port, e))
except NailgunProtocol.ProtocolError as e:
raise self.NailgunError('Problem in nailgun protocol with nailgun server at {}:{}: {!r}'
.format(self._host, self._port, e))
finally:
sock.close()
self._session = None
def __repr__(self):
return 'NailgunClient(host={!r}, port={!r}, workdir={!r})'.format(self._host,
self._port,
self._workdir)
|
plotly/python-api | packages/python/plotly/plotly/validators/densitymapbox/colorbar/_ticksuffix.py | Python | mit | 485 | 0.002062 | import _pl | otly_utils.basevalidators
class TicksuffixValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="ticksuffix", parent_name="densitymapbox.colorbar", **kwargs
):
super(TicksuffixValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "style"),
**kwa | rgs
)
|
jniemann66/ReSampler | android/src/libsndfile/Scripts/cstyle.py | Python | lgpl-2.1 | 8,137 | 0.057638 | #!/usr/bin/python -tt
#
# Copyright (C) 2005-2017 Erik de Castro Lopo <erikd@mega-nerd.com>
#
# Released under the 2 clause BSD license.
"""
This program checks C code for compliance to coding standards used in
libsndfile and other projects I run.
"""
import re
import sys
class Preprocessor:
"""
Preprocess lines of C code to make it easier for the CStyleChecker class to
test for correctness. Preprocessing works on a single line at a time but
maintains state between consecutive lines so it can preprocessess multi-line
comments.
Preprocessing involves:
- Strip C++ style comments from a line.
- Strip C comments from a series of lines. When a C comment starts and
ends on the same line it will be replaced with 'comment'.
- Replace arbitrary C strings with the zero length string.
- Replace '#define f(x)' with '#define f (c)' (The C #define requires that
there be no space between defined macro name and the open paren of the
argument list).
Used by the CStyleChecker class.
"""
def __init__ (self):
self.comment_nest = 0
self.leading_space_re = re.compile ('^(\t+| )')
self.trailing_space_re = re.compile ('(\t+| )$')
self.define_hack_re = re.compile ("(#\s*define\s+[a-zA-Z0-9_]+)\(")
def comment_nesting (self):
"""
Return the currect comment nesting. At the start and end of the file,
this value should be zero. Inside C comments it should be 1 or
(possibly) more.
"""
return self.comment_nest
def __call__ (self, line):
"""
Strip the provided line of C and C++ comments. Stripping of multi-line
C comments works as expected.
"""
line = self.define_hack_re.sub (r'\1 (', line)
line = self.process_strings (line)
# Strip C++ style comments.
if self.comment_nest == 0:
line = re.sub ("( |\t*)//.*", '', line)
# Strip C style comments.
open_comment = line.find ('/*')
close_comment = line.find ('*/')
if self.comment_nest > 0 and close_comment < 0:
# Inside a comment block that does not close on this line.
return ""
if open_comment >= 0 and close_comment < 0:
# A comment begins on this line but doesn't close on this line.
self.comment_nest += 1
return self.trailing_space_re.sub ('', line [:open_comment])
if open_comment < 0 and close_comment >= 0:
# Currently open comment ends on this line.
self.comment_nest -= 1
return self.trailing_space_re.sub ('', line [close_comment + 2:])
if open_comment >= 0 and close_comment > 0 and self.comment_nest == 0:
# Comment begins and ends on this line. Replace it with 'comment'
# so we don't need to check whitespace before and after the comment
# we're removing.
newline = line [:open_comment] + "comment" + line [close_comment + 2:]
return self.__call__ (newline)
return line
def process_strings (self, line):
"""
Given a line of C code, return a string where all literal C strings have
been replaced with the empty string literal "".
"""
for k in range (0, len (line)):
if line [k] == '"':
start = k
for k in range (start + 1, len (line)):
if line [k] == '"' and line [k - 1] != '\\':
return line [:start + 1] + '"' + self.process_strings (line [k + 1:])
return line
class CStyleChecker:
"""
A class for checking the whitespace and layout of a C code.
"""
def __init__ (self, debug):
self.debug = debug
self.filename = None
self.error_count = 0
self.line_num = 1
self.orig_line = ''
self.trailing_newline_re = re.compile ('[\r\n]+$')
self.indent_re = re.compile ("^\s*")
self.last_line_indent = ""
self.last_line_indent_curly = False
self.re_checks = \
[ ( re.compile (" "), "multiple space instead of tab" )
, ( re.compile ("\t "), "space after tab" )
, ( re.compile ("[^ ];"), "missing space before semi-colon" )
, ( re.compile ("{[^\s}]"), "missing space after open brace" )
, ( re.compile ("[^{\s]}"), "missing space before close brace" )
, ( re.compile ("[ \t]+$"), "contains trailing whitespace" )
, ( re.compile (",[^\s\n]"), "missing space after comma" )
, ( re.compile (";[^\s]"), "missing space after semi-colon" )
, ( re.compile ("=[^\s\"'=]"), "missing space after assignment" )
# Open and close parenthesis.
, ( re.compile ("[^\s\(\[\*&']\("), "missing space before open parenthesis" )
, ( re.compile ("\)(-[^>]|[^,'\s\n\)\]-])"), "missing space after close parenthesis" )
, ( re.compile ("\s(do|for|if|when)\s.*{$"), "trailing open parenthesis at end of line" )
, ( re.compile ("\( [^;]"), "space after open parenthesis" )
, ( re.compile ("[^;] \)"), "space before close parenthesis" )
# Open and close square brace.
, ( re.compile ("[^\s\(\]]\["), "missing space before open square brace" )
, ( re.compile ("\][^,\)\]\[\s\.-]"), " | missing space after close square brace" )
, ( re.compile ("\[ "), "space after open square brace" )
, ( re.compile (" \]"), "space before close square brace" )
# Space around operators.
, ( re.compile ("[^\s] | [\*/%+-][=][^\s]"), "missing space around opassign" )
, ( re.compile ("[^\s][<>!=^/][=]{1,2}[^\s]"), "missing space around comparison" )
# Parens around single argument to return.
, ( re.compile ("\s+return\s+\([a-zA-Z0-9_]+\)\s+;"), "parens around return value" )
# Parens around single case argument.
, ( re.compile ("\s+case\s+\([a-zA-Z0-9_]+\)\s+:"), "parens around single case argument" )
# Open curly at end of line.
, ( re.compile ("\)\s*{\s*$"), "open curly brace at end of line" )
# Pre and post increment/decrment.
, ( re.compile ("[^\(\[][+-]{2}[a-zA-Z0-9_]"), "space after pre increment/decrement" )
, ( re.compile ("[a-zA-Z0-9_][+-]{2}[^\)\,]]"), "space before post increment/decrement" )
]
def get_error_count (self):
"""
Return the current error count for this CStyleChecker object.
"""
return self.error_count
def check_files (self, files):
"""
Run the style checker on all the specified files.
"""
for filename in files:
self.check_file (filename)
def check_file (self, filename):
"""
Run the style checker on the specified file.
"""
self.filename = filename
cfile = open (filename, "r")
self.line_num = 1
preprocess = Preprocessor ()
while 1:
line = cfile.readline ()
if not line:
break
line = self.trailing_newline_re.sub ('', line)
self.orig_line = line
self.line_checks (preprocess (line))
self.line_num += 1
cfile.close ()
self.filename = None
# Check for errors finding comments.
if preprocess.comment_nesting () != 0:
print ("Weird, comments nested incorrectly.")
sys.exit (1)
return
def line_checks (self, line):
"""
Run the style checker on provided line of text, but within the context
of how the line fits within the file.
"""
indent = len (self.indent_re.search (line).group ())
if re.search ("^\s+}", line):
if not self.last_line_indent_curly and indent != self.last_line_indent:
None # self.error ("bad indent on close curly brace")
self.last_line_indent_curly = True
else:
self.last_line_indent_curly = False
# Now all the regex checks.
for (check_re, msg) in self.re_checks:
if check_re.search (line):
self.error (msg)
if re.search ("[a-zA-Z0-9][<>!=^/&\|]{1,2}[a-zA-Z0-9]", line):
if not re.search (".*#include.*[a-zA-Z0-9]/[a-zA-Z]", line):
self.error ("missing space around operator")
self.last_line_indent = indent
return
def error (self, msg):
"""
Print an error message and increment the error count.
"""
print ("%s (%d) : %s" % (self.filename, self.line_num, msg))
if self.debug:
print ("'" + self.orig_line + "'")
self.error_count += 1
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
if len (sys.argv) < 1:
print ("Usage : yada yada")
sys.exit (1)
# Create a new CStyleChecker object
if sys.argv [1] == '-d' or sys.argv [1] == '--debug':
cstyle = CStyleChecker (True)
cstyle.check_files (sys.argv [2:])
else:
cstyle = CStyleChecker (False)
cstyle.check_files (sys.argv [1:])
if cstyle.get_error_count ():
sys.exit (1)
sys.exit (0)
|
kalaspuff/tomodachi | tests/test_invalid_services.py | Python | mit | 1,285 | 0.003891 | from typing import Any
import pytest
from run_test_service_helper import start_service
def test_invalid_filename(monkeypatch: Any, capsys: Any, loop: Any) -> None:
with pytest.raises(SystemExit):
services, future = start_service("tests/services/no_servi | ce_existing.py", monkeypatch)
out, err = capsys.readouterr()
assert "Invalid service, no such service" in err
def test_invalid_service(monkeypatch: Any, capsys: Any, loop: Any) -> None:
with pytest.raises(NameError):
services, future = start_service("tests/services/invalid_se | rvice.py", monkeypatch)
out, err = capsys.readouterr()
assert "Unable to load service file" in err
def test_syntax_error_service(monkeypatch: Any, capsys: Any, loop: Any) -> None:
with pytest.raises(SyntaxError):
services, future = start_service("tests/services/syntax_error_service.py", monkeypatch)
out, err = capsys.readouterr()
assert "Unable to load service file" in err
def test_import_error(monkeypatch: Any, capsys: Any, loop: Any) -> None:
with pytest.raises(ImportError):
services, future = start_service("tests/services/import_error_service.py", monkeypatch)
out, err = capsys.readouterr()
assert "Invalid service, unable to load service file" in err
|
loles/solar | examples/riak/riaks.py | Python | apache-2.0 | 9,183 | 0.003594 | #!/usr/bin/env python
# To run:
# python example-riaks.py deploy
# solar changes stage
# solar changes process
# solar orch run-once last
# python example-riaks.py add_haproxies
# solar changes stage
# solar changes process
# solar orch run-once last
import click
import sys
from solar.core import resource
from solar.core import signals
from solar.core import validation
from solar.core.resource import virtual_resource as vr
from solar import errors
from solar.dblayer.model import ModelMeta
from solar.events.controls import React, Dep
from solar.events.api import add_event
from solar.dblayer.solar_models import Resource
def setup_riak():
ModelMeta.remove_all()
resources = vr.create('nodes', 'templates/nodes.yaml', {'count': 3})
nodes = [x for x in resources if x.name.startswith('node')]
hosts_services = [x for x in resources if x.name.startswith('hosts_file')]
node1, node2, node3 = nodes
hosts_services = [x for x in resources if x.name.startswith('hosts_file')]
riak_services = []
ips = '10.0.0.%d'
for i in xrange(3):
num = i + 1
r = vr.create('riak_service%d' % num,
'resources/riak_node',
{'riak_self_name': 'riak%d' % num,
'storage_backend': 'leveldb',
'riak_hostname': 'riak_server%d.solar' % num,
'riak_name': 'riak%d@riak_server%d.solar' % (num, num)})[0]
riak_services.append(r)
for i, riak in enumerate(riak_services):
nodes[i].connect(riak)
for i, riak in enumerate(riak_services[1:]):
riak_services[0].connect(riak, {'riak_name': 'join_to'})
for riak in riak_services:
for hosts_file in hosts_services:
riak.connect_with_events(hosts_file,
{'riak_hostname': 'hosts:name',
'ip': 'hosts:ip'})
Resource.save_all_lazy()
errors = resource.validate_resources()
for r, error in errors:
click.echo('ERROR: %s: %s' % (r.name, error))
has_errors = False
if errors:
click.echo("ERRORS")
sys.exit(1)
events = [
Dep('hosts_file1', 'run', 'success', 'riak_service1', 'run'),
Dep('hosts_file2', 'run', 'success', 'riak_service2', 'run'),
Dep('hosts_file3', 'run', 'success', 'riak_service3', 'run'),
React('riak_service2', 'run', 'success', 'riak_service2', 'join'),
React('riak_service3', 'run', 'success', 'riak_service3', 'join'),
# Dep('riak_service1', 'run', 'success', 'riak_service2', 'join'),
# Dep('riak_service1', 'run', 'success', 'riak_service3', 'join'),
# React('riak_service2', 'join', 'error', 'riak_service2', 'leave'),
# React('riak_service3', 'join', 'error', 'riak_service3', 'leave'),
React('riak_service2', 'leave', 'success', 'riak_service2', 'join'),
React('riak_service3', 'leave', 'success', 'riak_service3', 'join'),
# React('riak_service2', 'leave', 'success', 'riak_service1', 'commit_leave'),
# React('riak_service3', 'leave', 'success', 'riak_service1', 'commit_leave'),
# Dep('riak_service1', 'commit_leave', 'success', 'riak_service2', 'join'),
# Dep('riak_service1', 'commit_leave', 'success', 'riak_service3', 'join'),
React('riak_service3', 'join', 'success', 'riak_service1', 'commit'),
React('riak_service2', 'join', 'success', 'riak_service1', 'commit')
]
for event in events:
add_event(event)
click.echo('Use solar changes process & orch')
sys.exit(0)
def setup_haproxies():
hps = []
hpc = []
hpsc_http = []
hpsc_pb = []
for i in xrange(3):
num = i + 1
hps.append(vr.create('haproxy_service%d' % num,
'resources/haproxy_service',
{})[0])
hpc.append(vr.create('haproxy_config%d' % num,
'resources/haproxy_config',
{})[0])
hpsc_http.append(vr.create('haproxy_service_config_http%d' % num,
'resources/haproxy_service_config',
{'listen_port': 8098,
'protocol': 'http',
'name': 'riak_haproxy_http%d' % num})[0])
hpsc_pb.append(vr.create('haproxy_service_config_pb%d' % num,
'resources/haproxy_service_config',
{'listen_port': 8087,
'protocol': 'tcp',
'name': 'riak_haproxy_pb%d' % num})[0])
riak1 = resource.load('riak_service1')
riak2 = resource.load('riak_service2')
riak3 = resource.load('riak_service3')
riaks = [riak1, riak2, riak3]
for single_hpsc in hpsc_http:
for riak in riaks:
riak.connect(single_hpsc, {
'riak_hostname': 'backends:server',
'riak_port_http': 'backends:port'})
for single_hpsc in hpsc_pb:
for riak in riaks:
riak.connect(single_hpsc,
{'riak_hostname': 'backends:server',
'riak_port_pb': 'backends:port'})
# haproxy config to haproxy service
for single_hpc, single_hpsc in zip(hpc, hp | sc_http):
single_hpsc.connect(single_hpc, {"backends": "config:backends",
"listen_po | rt": "config:listen_port",
"protocol": "config:protocol",
"name": "config:name"})
for single_hpc, single_hpsc in zip(hpc, hpsc_pb):
single_hpsc.connect(single_hpc, {"backends": "config:backends",
"listen_port": "config:listen_port",
"protocol": "config:protocol",
"name": "config:name"})
# assign haproxy services to each node
node1 = resource.load('node1')
node2 = resource.load('node2')
node3 = resource.load('node3')
nodes = [node1, node2, node3]
for single_node, single_hps in zip(nodes, hps):
single_node.connect(single_hps)
for single_node, single_hpc in zip(nodes, hpc):
single_node.connect(single_hpc)
has_errors = False
for r in locals().values():
# TODO: handle list
if not isinstance(r, resource.Resource):
continue
# print 'Validating {}'.format(r.name)
local_errors = validation.validate_resource(r)
if local_errors:
has_errors = True
print 'ERROR: %s: %s' % (r.name, local_errors)
if has_errors:
print "ERRORS"
sys.exit(1)
events = []
for node, single_hps, single_hpc in zip(nodes, hps, hpc):
# r = React(node.name, 'run', 'success', single_hps.name, 'install')
d = Dep(single_hps.name, 'run', 'success', single_hpc.name, 'run')
e1 = React(single_hpc.name, 'run', 'success', single_hps.name, 'apply_config')
e2 = React(single_hpc.name, 'update', 'success', single_hps.name, 'apply_config')
# events.extend([r, d, e1, e2])
events.extend([d, e1, e2])
for event in events:
add_event(event)
@click.command()
@click.argument('i', type=int, required=True)
def add_solar_agent(i):
solar_agent_transport = vr.create('solar_agent_transport%s' % i, 'resources/transport_solar_agent',
{'solar_agent_user': 'vagrant',
'solar_agent_password': 'password'})[0]
transports = resource.load('transports%s' % i)
ssh_transport = resource.load('ssh_transport%s' % i)
transports_for_solar_agent = vr.create('transports_for_solar_agent%s' % i, 'resources/transports')[0]
# install solar_agent with ssh
signals.connect(transports_for_solar_agent, solar_agent_transport, {})
signals.connect(ssh_transport, transports_for_solar_agent, {'ssh_key': 'transports:key',
|
bert/geda-gaf | xorn/src/python/geda/netlist/pp_hierarchy.py | Python | gpl-2.0 | 6,338 | 0.001104 | # xorn.geda.netlist - gEDA Netlist Extraction and Generation
# Copyright (C) 1998-2010 Ales Hvezda
# Copyright (C) 1998-2010 gEDA Contributors (see ChangeLog for details)
# Copyright (C) 2013-2017 Roland Lutz
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
## \namespace xorn.geda.netlist.pp_hierarchy
## Post-processing: Hierarchy traversal.
from gettext import gettext as _
import xorn.geda.attrib
def postproc_blueprints(netlist):
for schematic in netlist.schematics:
schematic.ports = {}
for component in schematic.components:
portname = component.get_attribute('portname', None)
if portname is None:
continue
if component.refdes is not None:
component.error(_("refdes= and portname= attributes "
"are mutually exclusive"))
if xorn.geda.attrib.search_all(component.ob, 'net'):
component.error(_("portname= and net= attributes "
"are mutually exclusive"))
if not component.pins:
component.error(_("I/O symbol doesn't have pins"))
if len(component.pins) > 1:
component.error(_("multiple pins on I/O symbol"))
for pin in component.pins:
if pin.number is not None or pin.ob.attached_objects():
pin.warn(_("pin attributes on I/O symbol are ignored"))
try:
ports = schematic.ports[portname]
except KeyError:
ports = schematic.ports[portname] = []
ports.append(component)
component.has_portname_attrib = True
## Connect subsheet I/O ports to the instantiating component's pins.
#
# Disconnect all connections from and to composite components as
# they have been replaced with the actual subschematics.
#
# remove all composite components and ports
def postproc_instances(netlist):
remove_components = set()
for component in netlist.components:
if not component.blueprint.composite_sources:
continue
# collect potential old-style ports
refdes_dict = {}
for subsheet in component.subsheets:
for potential_port in subsheet.components:
try:
l = refdes_dict[potential_port.blueprint.refdes]
except KeyError:
l = refdes_dict[potential_port.blueprint.refdes] = []
l.append(potential_port)
for cpin in component.cpins:
label = cpin.blueprint.get_attribute('pinlabel', None)
if label is None:
cpin.error(_("pin on composite component is missing a label"))
continue
dest_net = cpin.local_net.net
# search for the matching port
ports = [subsheet.components_by_blueprint[port]
for subsheet in component.subsheets
for port in subsheet.blueprint.ports.get(label, [])]
for port in refdes_dict.get(label, []):
# found an old-style port
if port.blueprint.has_netname_attrib:
port.error(_("netname= attribute can't be used "
"on an I/O symbol"))
if xorn.geda.attrib.search_all(port.blueprint.ob, 'net'):
port.error(_("net= attribute can't be used "
"on an I/O symbol"))
if port.blueprint.composite_sources:
port.error(_("I/O symbol can't be a subschematic"))
if port.blueprint.is_graphical:
port.error(_("I/O symbol can't be graphical"))
if not port.cpins:
port.error(_("I/O symbol doesn't have pins"))
continue
| if len(port.cpins) > 1:
port.error(_("multiple pins on I/O symbol"))
continue
ports.append(port)
if not ports:
cpin.warn(_("missing | I/O symbol for port `%s' "
"inside schematic") % label)
elif len(ports) > 1:
cpin.warn(_("multiple I/O symbols for port `%s' "
"inside schematic") % label)
for port in ports:
src_net = port.cpins[0].local_net.net
# merge nets
if src_net != dest_net:
src_net.merge_into(dest_net)
dest_net.component_pins += src_net.component_pins
del src_net.component_pins[:]
# remove port component
remove_components.add(port)
port.sheet.components.remove(port)
del port.sheet.components_by_blueprint[port.blueprint]
port.cpins[0].local_net.cpins.remove(port.cpins[0])
dest_net.component_pins.remove(port.cpins[0])
# After the pin has been connected, remove it.
cpin.local_net.cpins.remove(cpin)
dest_net.component_pins.remove(cpin)
# After all pins have been connected, remove the component.
remove_components.add(component)
component.sheet.components.remove(component)
del component.sheet.components_by_blueprint[component.blueprint]
netlist.components = [component for component in netlist.components
if component not in remove_components]
for component in netlist.components:
if component.blueprint.has_portname_attrib:
component.error(_("unmatched I/O symbol"))
|
xi4nyu/mango_task | tasks/demo2.py | Python | gpl-2.0 | 314 | 0.012739 | # coding: utf-8
"""Demo2
"""
from common.task_base import TaskBase
class DemoTa | sk2(TaskBase):
def __call__(self):
print "demo2 was called."
@property
def desc(self):
return "demo2"
@property
def cron(self):
return | dict(day_of_week="mon-fri", second=2)
|
thruflo/pyramid_torque_engine | setup.py | Python | unlicense | 1,368 | 0.019737 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name = 'pyramid_torque_engine',
version = '0.5.4',
description = 'Pyramid and nTorque based dual queue work engine system.',
author = 'James Arthur',
author_email = 'username: thruflo, domain: gmail.com',
url = 'http://github.com/thruflo/pyramid_torque_engine',
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Framework :: Pylons',
'Topic :: Internet :: WWW/HTTP :: WSGI',
],
packages = find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe = False,
install_requires=[
'fysom',
# 'ntorque',
'pyramid_basemodel',
'pyramid_simpleauth',
'transaction',
'zope.interface'
],
test | s_require = [
'coverage',
'nose',
'mock'
],
entry_points = {
'console_scripts': [
'engine_notification = pyramid_t | orque_engine.notification_table_executer:run'
]
}
)
|
miller-tamil/research_on_email_marketting | web1_ads/Line_separator.py | Python | apache-2.0 | 144 | 0.006944 | lines = open('line.txt', 'r').readlines()
lines_set = set(lines)
out = open | ('workfile.txt', 'w')
for line in lines_se | t:
out.write(line)
|
vimalkumarvelayudhan/riboplot | tests/test_riboplot.py | Python | bsd-3-clause | 7,454 | 0.003488 | import os
import shutil
import logging
import unittest
import tempfile
fro | m riboplot import ribocore, riboplot
# use testing configuration
CFG = riboplot.CONFIG = riboplot.config.TestingConfig()
logging.disable(logging.CRITICAL)
class CheckArgumentsTestCase(unittest.TestCase):
"""Check if all arguments sent on the command line are valid."""
parser = riboplot.create_parser()
def test_bedtools_missing(self):
"" | "If bedtools is not in PATH, raise an error."""
args = self.parser.parse_args(
['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME, '-n', CFG.RNA_FILE])
save_path = os.environ['PATH']
os.environ['PATH'] = ''
self.assertRaises(OSError, ribocore.check_rna_file, rna_file=args.rna_file)
os.environ['PATH'] = save_path
def test_valid_read_length(self):
"""Read length should be a valid integer."""
args = self.parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA,
'-t', CFG.TRANSCRIPT_NAME, '-l', '28'])
ribocore.check_read_lengths(ribo_file=args.ribo_file, read_lengths=args.read_lengths)
def test_invalid_read_length(self):
"""An error is raised if an invalid read length is used."""
args = self.parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME,
'-l', '-1']) # invalid read length -1
self.assertRaises(ribocore.ArgumentError, ribocore.check_read_lengths,
ribo_file=args.ribo_file, read_lengths=args.read_lengths)
args = self.parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME,
'-l', '100']) # invalid read length 100
self.assertRaises(ribocore.RiboPlotError, riboplot.main, args=args)
def test_invalid_read_offset(self):
"""Read offset should be positive."""
args = self.parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME,
'-s', '-1']) # invalid read offset -1
self.assertRaises(ribocore.ArgumentError, ribocore.check_read_offsets, read_offsets=args.read_offsets)
def test_valid_read_lengths_offsets(self):
"""If multiple read lengths, offsets given check if they are valid
i.e., Each read length must have a corresponding offset.
"""
args = self.parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME,
'-l', '28,29', '-s', '15,16'])
ribocore.check_read_lengths_offsets(read_lengths=args.read_lengths, read_offsets=args.read_offsets)
def test_invalid_read_lengths_offsets(self):
"""If multiple read lengths and offsets are given check if they are valid
i.e., Each read length must have a corresponding offset and vice-versa. If not, raise
an error
"""
args = self.parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME,
'-l', '28,29', '-s', '15'])
self.assertRaises(ribocore.ArgumentError, ribocore.check_read_lengths_offsets, read_lengths=args.read_lengths, read_offsets=args.read_offsets)
args = self.parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME,
'-l', '28', '-s', '15,16'])
self.assertRaises(ribocore.ArgumentError, ribocore.check_read_lengths_offsets, read_lengths=args.read_lengths, read_offsets=args.read_offsets)
def test_missing_transcript_in_fasta(self):
"""If a transcript is missing in FASTA, an error is raised."""
args = self.parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME]) # invalid read offset -1
self.assertRaises(ribocore.ArgumentError, ribocore.check_required_arguments,
args.ribo_file, args.transcriptome_fasta, 'hello')
def test_missing_transcript_in_bam(self):
"""If a transcript is missing in BAM, an error is raised."""
# testing with an unrelated BAM file
args = self.parser.parse_args(['-b', CFG.UNRELATED_BAM, '-f', CFG.TRANSCRIPTOME_FASTA,
'-t', CFG.TRANSCRIPT_NAME])
self.assertRaises(ribocore.ArgumentError, ribocore.check_required_arguments, args.ribo_file,
args.transcriptome_fasta, args.transcript_name)
class RNACountsTestCase(unittest.TestCase):
def test_get_rna_counts(self):
"""Test get RNA counts for transcript from RNA-Seq BAM file. Assumes bedtools is installed."""
counts = riboplot.get_rna_counts(CFG.RNA_FILE, CFG.TRANSCRIPT_NAME)
self.assertIsInstance(counts, dict)
self.assertTrue(len(counts) > 0)
def test_invalid_rna_file(self):
"""If an invalid RNA file is provided, generate an error message"""
# using transcriptome FASTA file as the invalid RNA file for test
parser = riboplot.create_parser()
args = parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME, '-n', CFG.TRANSCRIPTOME_FASTA])
self.assertRaises(ValueError, ribocore.check_rna_file, rna_file=args.rna_file)
class RiboPlotTestCase(unittest.TestCase):
def test_get_codon_positions(self):
"""Get codon positions in all frames given a sequence."""
# the positions on this sequence were calculated manually.
fasta = ('AACCGGAGCACCCAGAGAAAACCCACGCAAACGCAGGGAGAATTTGCAAACTCCACACA'
'GAAATGCCAGCTGATCCAGCCGAGCCTCGAGTCAGCATCCTTGCTTGTTGGATGCCTGA'
'TTGCAGTTCAACTCCAAACTCAGTTGGACCAGCTGATCAGTG')
codon_positions = riboplot.get_start_stops(fasta)
expected = {1: {'starts': [], 'stops': []},
2: {'starts': [], 'stops': [71, 116, 152]},
3: {'starts': [63, 111], 'stops': []}}
self.assertEqual(codon_positions, expected)
def test_valid_riboplot_run(self):
"""A good riboplot run"""
output_dir = tempfile.mkdtemp()
print 'Output path is {}'.format(output_dir)
parser = riboplot.create_parser()
args = parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', CFG.TRANSCRIPT_NAME,
'-o', output_dir])
riboplot.main(args)
for fname in ('riboplot.png', 'riboplot.svg', 'RiboCounts.csv'):
self.assertTrue(os.path.exists(os.path.join(output_dir, fname)))
shutil.rmtree(output_dir)
def test_transcript_with_no_counts(self):
"""If the transcript has no ribocounts, no plot should be produced."""
transcript = 'gi|62955616|ref|NM_001017822.1|' # has no reads
output_dir = tempfile.mkdtemp()
parser = riboplot.create_parser()
args = parser.parse_args(['-b', CFG.RIBO_FILE, '-f', CFG.TRANSCRIPTOME_FASTA, '-t', transcript, '-o', output_dir])
self.assertRaises(ribocore.RiboPlotError, riboplot.main, args)
for fname in ('riboplot.png', 'riboplot.svg', 'RiboCounts.csv'):
self.assertFalse(os.path.exists(os.path.join(output_dir, fname)))
shutil.rmtree(output_dir)
|
rhempel/ev3dev-lang-python | ev3dev2/control/rc_tank.py | Python | mit | 1,496 | 0.004679 |
import logging
from ev3dev2.motor import MoveTank
from ev3dev2.sensor.lego import InfraredSensor
from time import sleep
log = logging.getLogger(__name__)
# ============
# Tank classes
# ============
class RemoteControlledTank(MoveTank):
def __init__(self, left_motor_port, right_motor_port, polarity='inversed', speed=400, channel=1):
MoveTank.__init__(self, left_motor_port, right_motor_port)
self.set_polarity(polarity)
left_motor = self.motors[left_motor_port]
right_motor = self.motors[right_motor_port]
self.speed_sp = speed
self.remote = InfraredSensor()
self.remote.on_channel1_top_left = self.make_move(left_motor, self.speed_sp)
self.remote.on_channel1_bottom_left | = self.make_move(left_motor, self.speed_sp* -1)
self.remote.on_channel1_top_right = self.make_move(right_motor, self.speed_sp)
self.remote.on_chan | nel1_bottom_right = self.make_move(right_motor, self.speed_sp * -1)
self.channel = channel
def make_move(self, motor, dc_sp):
def move(state):
if state:
motor.run_forever(speed_sp=dc_sp)
else:
motor.stop()
return move
def main(self):
try:
while True:
self.remote.process()
sleep(0.01)
# Exit cleanly so that all motors are stopped
except (KeyboardInterrupt, Exception) as e:
log.exception(e)
self.off()
|
kernevil/samba | python/samba/tests/param.py | Python | gpl-3.0 | 3,667 | 0.000273 | # Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Tests for samba.param."""
from samba import param
import samba.tests
import os
class LoadParmTestCase(samba.tests.TestCaseInTempDir):
def setUp(self):
super(LoadParmTestCase, self).setUp()
self.tempf = os.path.join(self.tempdir, "test")
open(self.tempf, 'w').write("empty")
def tearDown(self):
os.unlink(self.tempf)
super(LoadParmTestCase, self).tearDown()
def test_init(self):
file = param.LoadParm()
self.assertTrue(file is not None)
def test_length(self):
file = param.LoadParm()
self.assertEqual(0, len(file))
def test_set_workgroup(self):
| file = param.LoadParm()
file.set("workgroup", "bla")
self.assertEqual("BLA", file.get("workgroup"))
def test_is_mydomain(self):
file = param.LoadParm()
file.set("workgroup", "bla")
self.assertTrue(file.is_mydomain("BLA"))
self.assertFalse(file.is_mydomain("FOOBAR"))
def test_is_myname(self):
file = param.LoadParm()
file.set("netbios name", "bla")
| self.assertTrue(file.is_myname("BLA"))
self.assertFalse(file.is_myname("FOOBAR"))
def test_load_default(self):
file = param.LoadParm()
file.load_default()
def test_section_nonexistent(self):
samba_lp = param.LoadParm()
samba_lp.load_default()
self.assertRaises(KeyError, samba_lp.__getitem__, "nonexistent")
def test_log_level(self):
samba_lp = param.LoadParm()
samba_lp.set("log level", "5 auth:4")
self.assertEqual(5, samba_lp.log_level())
def test_dump(self):
samba_lp = param.LoadParm()
# Just test successfull method execution (outputs to stdout)
self.assertEqual(None, samba_lp.dump())
def test_dump_to_file(self):
samba_lp = param.LoadParm()
self.assertEqual(None, samba_lp.dump(False, self.tempf))
content = open(self.tempf, 'r').read()
self.assertIn('[global]', content)
self.assertIn('interfaces', content)
def test_dump_a_parameter(self):
samba_lp = param.LoadParm()
samba_lp.load_default()
# Just test successfull method execution
self.assertEqual(None, samba_lp.dump_a_parameter('interfaces'))
def test_dump_a_parameter_to_file(self):
samba_lp = param.LoadParm()
samba_lp.load_default()
self.assertEqual(None,
samba_lp.dump_a_parameter('interfaces',
'global',
self.tempf))
content = open(self.tempf, 'r').read()
self.assertIn('10.53.57.', content)
def test_samdb_url(self):
samba_lp = param.LoadParm()
samdb_url = samba_lp.samdb_url()
self.assertTrue(samdb_url.startswith('tdb://'))
self.assertTrue(samdb_url.endswith('/sam.ldb'))
|
MuckRock/muckrock | muckrock/jurisdiction/migrations/0021_remove_jurisdiction_full_name.py | Python | agpl-3.0 | 368 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-03-26 15:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('jurisdiction', '0020_auto_20180312_1413 | '),
]
operations = [
migrations.Re | moveField(
model_name='jurisdiction',
name='full_name',
),
]
|
TinyTinni/OF_Plugin-PyMesh | openmesh-python/tests/test_trimesh_circulator_face_vertex.py | Python | bsd-3-clause | 1,723 | 0.006965 | import unittest
import openmesh
import numpy as np
class TriMeshCirculatorFaceVertex(unittest.TestCase):
def setUp(self):
self.mesh = openmesh.TriMesh()
# Add some vertices
self.vhandle = []
self.vhandle.append(self.mesh.add_vertex(np.array([0, 1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([1, 0, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([2, 1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([0,-1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([2,-1, 0])))
# Add four faces
self.fh0 = self.mesh.add_face(self.vhandle[0], self.vhandle[1], self.vhandle[2])
self.mesh.add_face(self.vhandle[1], self.vhandle[3], self.vhandle[4])
self.mesh.add_face(self.vhandle[0], self.vhandle[3], self.vhandle[1])
| self.mesh.add_face(self.vhandle[2], self.vhandle[1], self.vhandle[4])
'''
Test setup:
0 ==== 2
|\ 0 /|
| \ / |
|2 1 3|
| | / \ |
|/ 1 \|
3 ==== 4
'''
def test_face_vertex_iter_without_increment(self):
self.assertEqual(self.fh0.idx(), 0)
# Iterate around face 0 at the top
fv_it = openmesh.FaceVertexIter(self.mesh, self.fh0)
self.assertEqual(next(fv_it).idx(), 0)
self.assertEqual(next(fv_it).idx(), 1)
self.assertEqual(next(fv_it).idx(), 2)
with self.assertRaises(StopIteration):
next(fv_it)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TriMeshCirculatorFaceVertex)
unittest.TextTestRunner(verbosity=2).run(suite)
|
twankim/weaksemi | gen_data.py | Python | mit | 3,619 | 0.023211 | # -*- coding: utf-8 -*-
# @Author: twankim
# @Date: 2017-05-05 00:06:53
# @Last Modified by: twankim
# @Last Modified time: 2017-10-19 23:50:33
import numpy as np
from sklearn.datasets import make_blobs
class genData:
def __init__(self,n,m,k,min_gamma=1,max_gamma=1.25,std=1.0):
self.n = n
self.m = m
self.k = k
self.std = std
self.min_gamma = min_gamma
self.max_gamma = max_gamma
self.gamma = 0
self.ris = np.zeros(self.k)
def gen(self):
while (self.gamma <= self.min_gamma) or (self.gamma > self.max_gamma):
X,y = make_blobs(n_samples=self.n,n_features=self.m,centers=self.k,cluster_std = self.std)
X_means = [X[y==t,:].mean(axis=0) for t in xrange(self.k)]
gammas = []
for i in xrange(self.k):
ri = max(np.linalg.norm(X[y==i,:]-np.tile(X_means[i],(sum(y==i),1)),axis=1))
ra = min(np.linalg.norm(X[y!=i,:]-np.tile(X_means[i],(sum(y!=i),1)),axis=1))
gammas.append(ra/float(ri))
self.ris[i] = ri
y += 1
self.gamma = min(gammas)
self.X = X
self.y = y
self.X_means = X_means
return self.X,self.y,self.ris
def calc_eta(self,delta,weak='random',q=1.0,nu=None,rho=None):
assert (q >0) and (q<=1), "q must be in (0,1]"
assert weak in ['random','local','global'], \
"weak must be in ['random','local','global']"
if weak == 'random':
if q < 1:
return int(np.ceil(np.log(2.0*self.k*(self.m+1)/delta) / \
np.log(1.0/(1 - q**(self.k-1)*(1-np.exp(-(self.gamma-1)**2 /8.0))))
))
else:
return int(np.ceil( 8*np.log(2*self.k*(self.m+1)/delta) / (self.gamma-1)**2 ))
elif weak == 'local':
c_param = min(2*rho-1,self.gamma-nu+1)
qds = []
for i in xrange(self.k):
dists = np.linalg.norm(
self.X[self.y==i+1,:]-np.tile(self.X_means[i],(sum(self.y==i+1),1)),
axis=1)
qds.append(sum(dists<c_param*self.ris[i])/float(len(dists)))
q = min(qds)
return int(np.ceil(np.log(2.0*self.k*(self.m+1)/delta) / \
np.log(1.0/(1 - q**(self.k-1)*(1-np.exp(-(self.gamma-1)**2 /8.0))))
))
elif weak == 'global':
c_param = 2*rho-1
qds = []
for i in xrange(self.k):
dists = np.linalg.norm(
self.X[self.y==i+1,:]-np.tile(self.X_means[i],(sum(self.y==i+1),1)),
axis=1)
qds.append(sum(dists<c_param*self.ris[i])/float(len(dists)))
q = min(qds)
return int(np.ceil(np.log(2.0*self.k*(self.m+1)/delta) / \
np.log(1.0/(1 - q**(self.k-1 | )*(1-np.exp(-(self.gamma-1)**2 /8.0))))
))
else:
return int(np.ceil( 8*np.log(2*self.k*(self.m+1)/delta) / (self.gamma-1)**2 ))
def calc_beta(self,delta,weak='random',q=1.0,nu=None,rho=None):
assert (q >0) and (q<=1), "q must be in (0,1]"
assert weak in ['random','local','global'], | \
"weak must be in ['random','local','global']"
if weak == 'random':
if q < 1:
return int(np.ceil(np.log(2*self.k*np.log(self.n)/delta) / np.log(1.0/(1-q))))
return 1
|
olt/mapproxy | mapproxy/image/transform.py | Python | apache-2.0 | 12,700 | 0.002598 | # This file is part of the MapProxy project.
# Copyright (C) 2010 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from mapproxy.compat.image import Image, transform_uses_center
from mapproxy.image import ImageSource, image_filter
from mapproxy.srs import make_lin_transf, bbox_equals
class ImageTransformer(object):
"""
Transform images between different bbox and spatial reference systems.
:note: The transformation doesn't make a real transformation for each pixel,
but a mesh transformation (see `PIL Image.transform`_).
It will divide the target image into rectangles (a mesh). The
source coordinates for each rectangle vertex will be calculated.
The quadrilateral will then be transformed with the source coordinates
into the destination quad (affine).
The number of quads is calculated dynamically to keep the deviation in
the image transformation below one pixel.
.. _PIL Image.transform:
http://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.Image.transform
::
src quad dst quad
.----. <- coord- .----.
/ / transformation | |
/ / | |
.----. img-transformation -> .----.----
| | |
---------------.
large src image large dst image
"""
def __init__(self, src_srs, dst_srs, max_px_err=1):
"""
:param src_srs: the srs of the source image
:param dst_srs: the srs of the target image
:param resampling: the resampling method used for transformation
:type resampling: nearest|bilinear|bicubic
"""
self.src_srs = src_srs
self.dst_srs = dst_srs
self.dst_bbox = self.dst_size = None
self.max_px_err = max_px_err
def transform(self, src_img, src_bbox, dst_size, dst_bbox, image_opts):
"""
Transforms the `src_img` between the source and destination SRS
of this ``ImageTransformer`` instance.
When the ``src_srs`` and ``dst_srs`` are equal the image will be cropped
and not transformed. If the `src_bbox` and `dst_bbox` are equal,
the `src_img` itself will be returned.
:param src_img: the source image for the transformation
:param src_bbox: the bbox of the src_img
:param dst_size: the size of the result image (in pizel)
:type dst_size: ``(int(width), int(height))``
:param dst_bbox: the bbox of the result image
:return: the transformed image
:rtype: `ImageSource`
"""
if self._no_transformation_needed(src_img.size, src_bbox, dst_size, dst_bbox):
return src_img
if self.src_srs == self.dst_srs:
result = self._transform_simple(src_img, src_bbox, dst_size, dst_bbox,
image_opts)
else:
result = self._transform(src_img, src_bbox, dst_size, dst_bbox, image_opts)
result.cacheable = src_img.cacheable
return result
def _transform_simple(self, src_img, src_bbox, dst_size, dst_bbox, image_opts):
"""
Do a simple crop/extent transformation.
"""
src_quad = (0, 0, src_img.size[0], src_img.size[1])
to_src_px = make_lin_transf(src_bbox, src_quad)
minx, miny = to_src_px((dst_bbox[0], dst_bbox[3]))
maxx, maxy = to_src_px((dst_bbox[2], dst_bbox[1]))
src_res = ((src_bbox[0]-src_bbox[2])/src_img.size[0],
(src_bbox[1]-src_bbox[3])/src_img.size[1])
dst_res = ((dst_bbox[0]-dst_bbox[2])/dst_size[0],
(dst_bbox[1]-dst_bbox[3])/dst_size[1])
tenth_px_res = (abs(dst_res[0]/(dst_size[0]*10)),
abs(dst_res[1]/(dst_size[1]*10)))
if (abs(src_res[0]-dst_res[0]) < tenth_px_res[0] and
abs(src_res[1]-dst_res[1]) < tenth_px_res[1]):
# rounding might result in subpixel inaccuracy
# this exact resolutioni match should only happen in clients with
# fixed resolutions like OpenLayers
minx = int(round(minx))
miny = int(round(miny))
result = src_img.as_image().crop((minx, miny,
minx+dst_size[0], miny+dst_size[1]))
else:
img = img_for_resampling(src_img.as_image(), image_opts.resampling)
result = img.transform(dst_size, Image.EXTENT,
(minx, miny, maxx, maxy),
image_filter[image_opts.resampling])
return ImageSource(result, size=dst_size, image_opts=image_opts)
def _transform(self, src_img, src_bbox, dst_size, dst_bbox, image_opts):
"""
Do a 'real' transformation with a transformed mesh (see above).
"""
meshes = transform_meshes(
src_size=src_img.size,
src_bbox=src_bbox,
src_srs=self.src_srs,
dst_size=dst_size,
dst_bbox=dst_bbox,
dst_srs=self.dst_srs,
max_px_err=self.max_px_err,
)
img = img_for_resampling(src_img.as_image(), image_opts.resampling)
result = img.transform(dst_size, Image.MESH, meshes,
image_filter[image_opts.resampling])
if False:
# draw mesh for debuging
from PIL import ImageDraw
draw = ImageDraw.Draw(result)
for g, _ in meshes:
draw.rectangle(g, fill=None, outline=(255, 0, 0))
return ImageSource(result, size=dst_size, image_opts=image_opts)
def _no_transformation_needed(self, src_size, src_bbox, dst_size, dst_bbox):
"""
>>> src_bbox = (-2504688.5428486541, 1252344.271424327,
... -1252344.271424327, 2504688.5428486541)
>>> dst_bbox = (-2504688.5431999983, 1252344.2704,
... -1252344.2719999983, 2504688.5416000001)
>>> from mapproxy.srs import SRS
>>> t = ImageTransformer(SRS(900913), SRS(900913))
>>> t._no_transformation_needed((256, 256), src_bbox, (256, 256), dst_bbox)
True
"""
xres = (dst_bbox[2]-dst_bbox[0])/dst_size[0]
yres = (dst_bbox[3]-dst_bbox[1])/dst_size[1]
return (src_size == dst_size and
self.src_srs == self.dst_srs and
bbox_equals(src_bbox, dst_bbox, xres/10, yres/10))
def transform_meshes(src_size, src_bbox, src_srs, dst_size, dst_bbox, dst_srs, max_px_err=1):
"""
transform_meshes creates a list of QUAD transformation parameters for PIL's
MESH image transf | ormation.
Each QUAD is a rectangle in the destination image, like ``(0, 0, 100, 100)`` and
a list of four pixel coordinates in the source image that match the destination rectangle.
The four points form a quadliteral (i.e. not a rectangle).
PIL's image transform uses affine transformation to fill each rectangle in the destination
image with data from the source quadliteral.
The number of QUADs is c | alculated dynamically to keep the deviation in the image
transformation below one pixel. Image transformations for large map scales can be transformed with
1-4 QUADs most of the time. For low scales, transform_meshes can generate a few hundred QUADs.
It generates a maximum of one QUAD per |
ini-bdds/ermrest | ermrest/ermpath/__init__.py | Python | apache-2.0 | 788 | 0.006345 |
#
# Copyright 2013-2015 University of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in complia | nce with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language gover | ning permissions and
# limitations under the License.
#
"""ERMREST data path support
The data path is the core ERM-aware mechanism for searching,
navigating, and manipulating data in an ERMREST catalog.
"""
from resource import *
|
Kovak/KivySurvey | kivy_survey/survey.py | Python | mit | 5,884 | 0.001869 | from __future__ import unicode_literals, print_function
from jsontowidget import widget_from_json
class Survey(object):
def __init__(self, json_survey, **kwargs):
super(Survey, self).__init__(**kwargs)
self.survey_file = json_survey
self.questionnaires = {}
self.prev_questionnaires = []
self.load_questionnaires()
def load_questionnaires(self):
json_data = self.survey_file['survey']
qs = self.questionnaires
for each in json_data:
qs[each] = Questionnaire(each, self)
def get_header_definitions(self, questionnaire):
q = self.questionnaires[questionnaire]
return q.headers
def get_subject_fields(self, questionnaire):
q = self.questionnaires[questionnaire]
return q.subject_fields
def get_next_page(self, questionnaire, current_page):
q = self.questionnaires[questionnaire]
page_order = q.page_order
if current_page is None:
return page_order[0]
else:
index = page_order.index(current_page)
if index+1 < len(page_order):
return page_order[index+1]
else:
return None
def get_prev_page(self, questionnaire, current_page):
q = self.questionnaires[questionnaire]
page_order = q.page_order
if current_page is None:
return page_order[-1]
else:
index = page_order.index(current_page)
if index-1 >= 0:
return page_order[i | ndex-1]
else:
return None
def get_next_questionnaire(self, current_questionnaire):
q = self.questionnaires[current_questionnaire]
return q.next_questionnaire
def get_allow_forward(self, current_questionnaire):
q = self.questionnaires[current_questionnaire]
return q.allow_forward
def | store_current_questionnaire(self, current_questionnaire):
self.prev_questionnaires.append(current_questionnaire)
def get_previous_questionnaire(self):
try:
return self.prev_questionnaires[-1]
except:
return None
def pop_previous_questionnaire(self):
try:
return self.prev_questionnaires.pop()
except:
return None
def get_allow_add_subjects(self, questionnaire):
try:
return self.questionnaires[questionnaire].add_subjects
except:
return False
class Questionnaire(object):
def __init__(self, name, survey, **kwargs):
super(Questionnaire, self).__init__(**kwargs)
self.survey = survey
self.page_order = []
self.headers = []
self.name = name
json_data = survey.survey_file['survey'][name]
self.load_pages(name, survey)
self.load_headers(name, survey)
self.load_subject_fields(name, survey)
if 'next_questionnaire' in json_data:
self.next_questionnaire = json_data["next_questionnaire"]
else:
self.next_questionnaire = None
if 'add_subjects' in json_data:
self.add_subjects = json_data["add_subjects"]
else:
self.add_subjects = False
if 'allow_forward' in json_data:
self.allow_forward = json_data["allow_forward"]
else:
self.allow_forward = False
if 'demographic' in json_data:
self.demographic = json_data['demographic']
if 'demographic_restrictions' in json_data:
self.demographic_restrictions = json_data['demographic_restrictions']
def load_subject_fields(self, name, survey):
json_data = survey.survey_file['survey'][name]
self.subject_fields = json_data['subject_fields']
def load_headers(self, name, survey):
json_data = survey.survey_file['survey'][name]
self.headers = json_data['headers']
def load_pages(self, name, survey):
json_data = survey.survey_file['survey'][name]
pages_json = json_data['pages']
pages = self.pages = {}
self.page_order = json_data['page_order']
for each in pages_json:
p = Page(each, name, survey)
pages[each] = p
class Page(object):
def __init__(self, name, questionnaire_name, survey, **kwargs):
super(Page, self).__init__(**kwargs)
self.q_name = questionnaire_name
self.survey = survey
self.name = name
self.question_order = []
self.load_questions(name, questionnaire_name, survey)
def load_questions(self, name, q_name, survey):
json_data = survey.survey_file['survey'][q_name]['pages'][name]
questions_json = json_data['questions']
questions = self.questions = {}
self.question_order = json_data['question_order']
if 'disable_binds' in json_data:
self.disable_binds = disable_binds = json_data['disable_binds']
else:
self.disable_binds = disable_binds = []
for each in questions_json:
q = Question(each, questions_json[each])
questions[each] = q
for bind in disable_binds:
a, b = bind
q1 = questions[a]
q2 = questions[b]
wid1 = q1.widget
wid2 = q2.widget
wid1.bind(answer=q2.call_disable_bind)
wid2.bind(answer=q1.call_disable_bind)
class Question(object):
def __init__(self, question_name, question_json, **kwargs):
super(Question, self).__init__(**kwargs)
self.widget = wid = widget_from_json(question_json)
wid.question_name = question_name
def call_disable_bind(self, instance, value):
if instance.validate_question():
self.widget.disabled = True
else:
self.widget.disabled = False
|
YiqunPeng/Leetcode-pyq | solutions/339NestedListWeightSum.py | Python | gpl-3.0 | 1,788 | 0.006152 | # """
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
#class NestedInteger:
# def __init__(self, value=None):
# """
# If value is not specified, initializes an empty list | .
# Otherwise initializes a single integer equal to value.
# """
#
# def isInteger(self):
# """ |
# @return True if this NestedInteger holds a single integer, rather than a nested list.
# :rtype bool
# """
#
# def add(self, elem):
# """
# Set this NestedInteger to hold a nested list and adds a nested integer elem to it.
# :rtype void
# """
#
# def setInteger(self, value):
# """
# Set this NestedInteger to hold a single integer equal to value.
# :rtype void
# """
#
# def getInteger(self):
# """
# @return the single integer that this NestedInteger holds, if it holds a single integer
# Return None if this NestedInteger holds a nested list
# :rtype int
# """
#
# def getList(self):
# """
# @return the nested list that this NestedInteger holds, if it holds a nested list
# Return None if this NestedInteger holds a single integer
# :rtype List[NestedInteger]
# """
class Solution:
def depthSum(self, nestedList, d = 1):
"""
:type nestedList: List[NestedInteger]
:rtype: int
"""
if not nestedList: return 0
ans = 0
for nested_integer in nestedList:
if nested_integer.isInteger():
ans += nested_integer.getInteger() * d
else:
ans += self.depthSum(nested_integer.getList(), d + 1)
return ans |
sonic182/portfolio3 | courses/migrations/0015_auto_20161028_1057.py | Python | mit | 445 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 201 | 6-10-28 10:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('courses', '0014_course_views'),
]
operations = [
migrations.AlterField(
model_name='course',
name='title',
field=models.CharField(max_l | ength=160),
),
]
|
butala/pyrsss | pyrsss/mag/iaga2hdf.py | Python | mit | 10,261 | 0.000585 | import os
import sys
import math
import logging
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import numpy as NP
import pandas as PD
from pyglow.pyglow import Point
from geomagio.StreamConverter import get_obs_from_geo, get_geo_from_obs
from obspy.core.stream import Stream
from obspy.core.utcdatetime import UTCDateTime
from obspy.core.trace import Trace
from .iaga2002 import iaga2df
from ..util.angle import deg2tenths_of_arcminute
logger = logging.getLogger('pyrsss.mag.iaga2hdf')
def find_value(key, headers):
"""
Search *headers* for *key* and return the common value shared
across each element of *headers* and raise an exception if
multiple values are encountered.
"""
values = set([x[key] for x in headers])
if len(values) == 0:
raise KeyError('{} not found'.format(key))
elif len(values) > 1:
raise ValueError('multiple values of {} detected ({})'.format(key,
', '.join(map(str, values))))
return values.pop()
def reduce_headers(headers,
keys=['IAGA CODE',
'Geodetic Latitude',
'Geodetic Longitude',
'Elevation',
'Reported',
'decbas']):
"""
Search *headers* for values associated with each of *keys* and
confirm those values are equal. Return a mapping between *keys*
and each unique value.
"""
reduced_header = {}
for key in keys:
try:
value = find_value(key, headers)
except KeyError:
logger.warning('Entry for {} not found in IAGA headers --- skipping'.format(key))
continue
reduced_header[key] = value
return reduced_header
def fix_sign(x, N=360 * 60 * 10):
"""
Convert negative tenths of arcminutes *x* to positive by checking
bounds and taking the modulus N (360 degrees * 60 minutes per
degree * 10 tenths per 1).
"""
if x < 0:
assert x > -N
x += N
assert x < N
return x % N
def get_dec_tenths_arcminute(header, date):
"""
Return the local magnetic declination angle associated with a
sensor at the location given in *header* and *date*. The returned
angle is in tenths of arcminutes (there are 360 * 60 * 10 tenths
of arcminnutes in one circle).
"""
point = Point(date,
header['Geodetic Latitude'],
header['Geodetic Longitude'],
header['Elevation'])
point.run_igrf()
dec_deg = point.dec
if 'IAGA CODE' in header:
logger.info('using declination angle {:f} (deg) for {}'.format(dec_deg, header['IAGA CODE']))
else:
logger.info('using declination angle {:f} (deg)'.format(dec_deg))
return fix_sign(deg2tenths_of_arcminute(dec_deg))
def df2stream(df,
header,
network='NT',
location='R0',
radians=True,
default_elevation=0):
"""
Build and return obspy :class:`Stream` from *header* information
and the :class:`DataFrame` *df*. Use *dec_tenths_arcminute* (local
magnetic declination in determining magnetic north and east or XYZ
in units of tenths of arcminutes). If *radians*, angles in D are
given in degrees and must be converted to radians. If the site
elevation is not included in the header, use
*default_elevation*. The *network* and *location* identifiers are
used in forming the trace names in the resultant stream.
"""
glon = header['Geodetic Longitude']
if glon < 0:
glon += 360
delta = (df.index[1] - df.index[0]).total_seconds()
fs = 1 / delta
d1 = df.index[0]
d2 = df.index[-1]
d1_obj = UTCDateTime('{:%Y-%m-%d %H:%H:%S}'.format(d1))
d2_obj = UTCDateTime('{:%Y-%m-%d %H:%H:%S}'.format(d2))
dec_tenths_arcminute = header.get('decbas',
get_dec_tenths_arcminute(header,
d1.to_pydatetime()))
logger.info('using declination baseline = {:.1f} (tenths of arcminutes)'.format(dec_tenths_arcminute))
N = df.shape[0]
stream_header = {'geodetic_latitude': header['Geodetic Latitude'],
'geodetic_longitude': glon,
'station': header['IAGA CODE'],
'sampling_rate': fs,
'starttime': d1_obj,
'endtime': d2_obj,
'declination_base': dec_tenths_arcminute,
'npts': N}
try:
stream_header['elevation'] = header['Elevation']
except KeyError:
stream_header['elevation'] = default_elevation
logger.warning('elevation is unknown --- inserting {}'.format(default_elevation))
traces = []
for column in df.columns:
channel = column[-1]
header_i = stream_header.copy()
header_i['channel'] = channel.upper()
header_i['network'] = network
header_i['location'] = location
vals = df[column].values
if channel == 'D' and radians:
vals = NP.radians(vals)
traces.append(Trace(data=vals,
header=header_i))
return Stream(traces=traces)
def write_hdf(hdf_fname, df, key, header):
"""
Output the contents of *df* and *header* to the HDF file
*hdf_fname* under identifier *key*.
"""
with PD.HDFStore(hdf_fname) as store:
store.put(key, df)
store.get_storer(key).attrs.header = header
return hdf_fname
def read_hdf(hdf_fname, key):
"""
Read contents of HDF file *hdf_fname* associated with *key* and
return a :class:`DataFrame`, header tuple.
"""
if not os.path.isfile(hdf_fname):
raise ValueError('file {} does not exist'.format(hdf_fname))
with PD.HDFStore(hdf_fname) as store:
df = store.get(key)
try:
header = store.get_storer(key).attrs.header
except AttributeError:
header = None
return df, header
def combine_iaga(iaga2002_fnames):
"""
Load one or more IAGA-2002 data records *iaga_fnames* and
concatenate the data into a single :class:`DataFrame` record.
"""
df_list = []
header_list = []
for iaga2002_fname in iaga2002_fnames:
df_i, header_i = iaga2df(iaga2002_fname)
df_list.append(df_i)
header_list.append(header_i)
return PD.concat(df_list), reduce_headers(header_list)
def xy2df(df, header):
"""
Add `B_X` and `B_Y` (surface magnetic field in geographic
coordinates, X is north and Y is east) to the :class:`DataFrame`
*df* and return. The record *header* is necessary to carry out the
coordinate transformation.
"""
obs = df2stream(df, header)
geo = get_geo_from_obs(obs)
return df.assign(B_X=geo.select(channel | ='X').traces[0].data,
B_Y=geo.select(channel='Y').traces[0].data)
def he2df(df, header):
"""
Add `B_H` and `B_E` (surface magnetic field in local geomagnetic
coordinates, H is local north and E is local east) to the
:class:`DataFrame` *df* and return. The record *header* is
necessary to carry out the coordinate transformation.
"""
if ('B_F' not in | df.columns):
# The USGS geomag-algorithms module requires the magnetic
# field magnitude B_F to transform from XY to HE --- add the
# B_F column if it is not present
values = zip(df['B_X'].values,
df['B_Y'].values,
df['B_Z'].values)
df = df.assign(B_F=[NP.linalg.norm([x, y, z]) for x, y, z in values])
geo = df2stream(df, header)
obs = get_obs_from_geo(geo)
return df.assign(B_H=obs.select(channel='H').traces[0].data,
B_E=obs.select(channel='E').traces[0].data)
def add_columns(df, header, xy, he):
"""
Add columns to *df* as needed and return a new :class:`DataFrame`,
reporting magnetic field in additional coordinates systems. If
*xy*, ensure data are provided in geographic XYZ coordi |
pawhewitt/Dev | SU2_PY/change_version_number.py | Python | lgpl-2.1 | 2,840 | 0.011268 | #!/usr/bin/env python
## \file change_version_number.py
# \brief Python script for updating the version number of the SU2 suite.
# \author A. Aranake
# \version 5.0.0 "Raven"
#
# SU2 Original Developers: Dr. Francisco D. Palacios.
# Dr. Thomas D. Economon.
#
# SU2 Developers: Prof. Juan J. Alonso's group at Stanford University.
# Prof. Piero Colonna's group at Delft University of Technology.
# Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# Prof. Alberto Guardone's group at Polytechnic University of Milan.
# Prof. Rafael Palacios' group at Imperial College London.
# Prof. Edwin van der Weide's group at the University of Twente.
# Prof. Vincent Terrapon's group at the University of Liege.
#
# Copyright (C) 2012-2017 SU2, the open-source CFD code.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
# Run the script from the base directory (ie $SU2HOME). Grep will search directories recursively for matches in version number
import os,sys
oldvers = '4.3.0 "Cardinal"'
newvers = '5.0.0 "Raven"'
os.system('rm -rf version.txt')
# Grep flag cheatsheet:
# -I : Ignore binary files
# -F : Match exact pattern (instead of regular expressions)
# -w : Match whole word
# -r : search directory recursively
# -v : Omit search string (.svn omitted, line containing ISC is CGNS related)
os.system("grep -IFwr '%s' *|grep -vF '.svn' |grep -v ISC > version.txt"%oldvers)
# Create a list of files to adjust
filelist = []
f = open('version.txt','r')
fo | r line in f.readlines():
candidate = line.split(':')[0]
if not candidate in filelist a | nd candidate.find(sys.argv[0])<0:
filelist.append(candidate)
f.close()
print filelist
# Prompt user before continuing
yorn = ''
while(not yorn.lower()=='y'):
yorn = raw_input('Replace %s with %s in the listed files? [Y/N]: '%(oldvers,newvers))
if yorn.lower()=='n':
print 'The file version.txt contains matches of oldvers'
sys.exit()
# Loop through and correct all files
for fname in filelist:
s = open(fname,'r').read()
s_new = s.replace(oldvers,newvers)
f = open(fname,'w')
f.write(s_new)
f.close()
os.system('rm -rf version.txt')
|
DJ-Tai/practice-problems | leet-code/python/solution-461-hamming-distance.py | Python | mit | 293 | 0.006826 | class Solution(object):
| def hammingDistance(self, x, y):
"""
:type x: int
:type y: int
:rtype: int
"""
x = x ^ y # Logical XOR
y = 0
while x:
y += 1
x | = x & (x-1)
return y
|
shannara/subuser | logic/subuserlib/builtInCommands/dev.py | Python | lgpl-3.0 | 2,852 | 0.030154 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#external imports
import sys
import optparse
import json
import os
import uuid
import subprocess
#internal imports
import subuserlib.commandLineArguments
import subuserlib.profile
import subuserlib.paths
subuserExecutable = os.path.join(subuserlib.paths.getSubuserDir(),"logic","subuser")
def parseCliArgs(realArgs):
usage = "usage: subuser dev <args> DEV-IMAGE-NAME"
| description = """ Create and run a subuser related to a dev image.
"""
parser=optparse.OptionParser(usage=usage,description=description,formatter=subuserlib.commandLineArguments.HelpFormatterThatDoesntReformatDescription())
parser.add_option("--ls",dest="ls",action="store_true",default=False,help="List dev images.")
parser.add_option("--update",dest="update",action="store_true",default=False,help="Updat | e dev images associated with this folder. Note: This always uses the layer cache. Use subuser update all to update fully without layer caching.")
parser.add_option("--remove",dest="remove",action="store_true",default=False,help="Remove dev images associated with this folder.")
parser.add_option("--entrypoint",dest="entrypoint",default=None,help="Use entrypoint instead of default executable.")
return parser.parse_args(args=realArgs)
@subuserlib.profile.do_cprofile
def runCommand(realArgs):
options,args = parseCliArgs(realArgs)
if options.ls:
subprocess.call([subuserExecutable,"list","available","./"])
sys.exit()
devSubuserRegistry = ".subuser-dev"
devSubusers = {}
subuserNames = []
if os.path.exists(devSubuserRegistry):
with open(devSubuserRegistry,"r") as fd:
devSubusers = json.load(fd)
for devSubuser in devSubusers.values():
subuserNames.append(devSubuser)
if options.remove:
subprocess.call([subuserExecutable,"subuser","remove"]+subuserNames)
sys.exit()
if options.update:
if not subprocess.call([subuserExecutable,"update","--use-cache","subusers"]+subuserNames) == 0:
sys.exit()
if len(args) != 1:
if options.update:
sys.exit()
sys.exit("Please pass a single dev image name. Use --help for help.")
devSubuser = None
devImage = args[0]
if not devImage.endswith("-dev"):
devImage = devImage + "-dev"
try:
devSubuser = devSubusers[devImage]
except KeyError:
pass
if devSubuser is None:
devSubuser = devImage+"@"+os.path.split(os.path.dirname(os.getcwd()+os.sep))[1]+"-"+str(uuid.uuid4())
if subprocess.call([subuserExecutable,"subuser","add",devSubuser,devImage+"@./"]) == 0:
devSubusers[devImage] = devSubuser
with open(devSubuserRegistry,"w") as fd:
json.dump(devSubusers,fd)
if options.entrypoint is None:
subprocess.call([subuserExecutable,"run",devSubuser])
else:
subprocess.call([subuserExecutable,"run","--entrypoint="+options.entrypoint,devSubuser])
|
claman/apollo | apollo.py | Python | mit | 4,618 | 0.012343 | #!/usr/bin/env python
import argparse
import datetime
class Book:
def __init__(self, title, author, owned, start, end, physical, date):
self.title = title
self.author = author
self.owned = owned
self.start = start
self.end = end
self.physical = physical
self.date = date
def readTime(self):
try:
start = self.start.split('/')
end = self.end.split('/')
startDate = datetime.date(int(start[2]), int(start[0]), int(start[1]))
endDate = datetime.date(int(end[2]), int(end[0]), int(end[1]))
readingTime = endDate - startDate
return 'You read this in ' + str(readingTime.days) + ' days.'
except (IndexError, ValueError):
return 'Unread, current, or unknown'
def returnReadingYears(self):
yearS = self.start.split('/')
yearE = self.end.split('/')
if len(yearS) and len(yearE) == 3:
return [yearS[2], yearE[2]]
else:
return []
def returnAllInfo(self):
return [self.title, self.author, self.owned, self.start, self.end, self.physical, self.date, self.readTime()]
def returnFormatted(self):
print self.title + ' by ' + self.author
print 'Owned: ' + self.owned
print 'Started: ' + self.start
print 'Finished: ' + self. | end
print 'Format: ' + self.physical
print 'First Published: ' + self.date
print self.readTime()
print
def stats():
totalBooks, totalPhysical, totalEbooks = 0, 0, 0
totalRead, tot | alOwned, totalBorrowed = 0, 0, 0
books_list.next()
books_list.next()
for line in books_list:
line = line.strip('|\n')
entry = line.split('|')
currentBook = Book(entry[0], entry[1], entry[2], entry[3], entry[4], entry[5], entry[6])
totalBooks += 1
if currentBook.physical == 'Paperback' or format == 'Hardcover':
totalPhysical += 1
elif currentBook.physical == 'Ebook':
totalEbooks += 1
if currentBook.owned == 'x':
totalOwned += 1
elif currentBook.owned == 'o':
totalBorrowed += 1
readStatus = currentBook.returnAllInfo()
if readStatus[7] != 'Unread or current':
totalRead += 1
print 'You have ' + str(totalBooks) + ' books on your list; you have read ' + str(totalRead) + '.'
print 'You own ' + str(totalOwned) + ' books: ' + str(totalPhysical) \
+ ' physical (paperback or hardcover) and ' + str(totalEbooks) + ' ebooks.'
print 'You have borrowed ' + str(totalBorrowed) + ' books.'
def search(args):
books_list.next()
books_list.next()
for line in books_list:
line = line.strip('|\n')
entry = line.split('|')
currentBook = Book(entry[0], entry[1], entry[2], entry[3], entry[4], entry[5], entry[6])
if args.t:
if args.t in currentBook.title:
currentBook.returnFormatted()
elif args.y and currentBook.start and currentBook.end != '-':
if args.y in currentBook.returnReadingYears():
currentBook.returnFormatted()
elif args.a and args.a in currentBook.author:
currentBook.returnFormatted()
elif args.p and args.p == currentBook.date:
currentBook.returnFormatted()
elif args.list:
currentBook.returnFormatted()
parser = argparse.ArgumentParser(description='Use \'book\' to query reading list, or \'movie\' to query watch list.')
parser.add_argument('--stats', action='store_true', help='Show stats about list (no argument)')
subparsers = parser.add_subparsers()
parser_books = subparsers.add_parser('book', help='List books based on queries.')
parser_books.add_argument('-a', help='Search by author')
parser_books.add_argument('-p', help='Search by publication date')
parser_books.add_argument('-t', help='Search by title')
parser_books.add_argument('-y', help='Search by reading year')
parser_books.add_argument('--list', action='store_true', help='List all books')
parser_books.set_defaults(func=search)
parser_movies = subparsers.add_parser('movie', help='List movies based on queries')
parser_movies.add_argument('-t', help='Search by title')
parser_movies.add_argument('-y', help='Search by release year')
parser_movies.add_argument('-d', help='Search by watch date')
parser_movies.add_argument('-r', help='Search by rating')
args = parser.parse_args()
if __name__ == '__main__':
books_list = open('example.txt', 'r') # change this to correspond to your list
args.func(args)
# if args.t:
# search('t', args.t)
# elif args.y:
# search('y', args.y)
# elif args.a:
# search('a', args.a)
# elif args.p:
# search('p', args.p)
# elif args.stats:
# stats()
# elif args.list:
# search('--list', '')
# else:
# print 'Try running again with \'-h\''
books_list.close()
|
xiaohan2012/lst | check_dataframe.py | Python | mit | 74 | 0 | import sys
impor | t pandas as pd
df = pd.read_pickle(sys.arg | v[1])
print df
|
mlackman/pymapp | pymapp/setters.py | Python | lgpl-2.1 | 1,495 | 0.011371 |
class Attribute(object):
def __init__(self, target_attribute_name):
self.target_attribute_name = target_attribute_name
def set(self, target_object, value):
target_object.__dict__[self.target_attribute_name] = value
def get(self, target_object):
return getattr(target_object, self.target_attribute_name)
def satisfies(self, obj):
assert hasattr(obj, self.target_attribute_name), \
"MAPPING NOT SATISFIED: %s does not have attribute %s" % (str(obj.__class__), self.target_attribute_name)
class Relation(Attribute):
"RelationSetter sets specisic class | to target object attribute"""
def __init__(self, object_target_attribute_name, mapper):
super().__init__(object_targe | t_attribute_name)
self.mapper = mapper
def get(self, target_object):
value = super().get(target_object)
return self.mapper.to_hash(value)
def set(self, target_object, value):
target_object.__dict__[self.target_attribute_name] = self.mapper.from_hash(value)
class ListRelation(Attribute):
"""Sets list of relation object to target object"""
def __init__(self, object_target_attribute_name, mapper):
super().__init__(object_target_attribute_name)
self.mapper = mapper
def get(self, target_object):
values = super().get(target_object)
return [self.mapper.to_hash(value) for value in values]
def set(self, target_object, values):
target_object.__dict__[self.target_attribute_name] = [self.mapper.from_hash(value) for value in values]
|
EricMuller/mynotes-backend | requirements/twisted/Twisted-17.1.0/src/twisted/application/twist/__init__.py | Python | mit | 166 | 0 | # -*- test | -case-name: twisted.application.twist.test -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
C{twist} command line tool.
"" | "
|
knotman90/google-interview | problems/distributedJam/dcj_linux/dcj/command_chooser.py | Python | gpl-3.0 | 1,360 | 0.005147 | """Class for choosing cli commands."""
import argparse
class CommandChooser(object):
"""Chooses command to run based on commandline arguments."""
def __init__(self, commands_dict):
"""Initialize CommandChooser.
Args:
commands_dict: dict from command name to | object responsible for executing
the command. The object should provide t | wo methods:
* AddToParser(parser) returning parser to parse arguments passed to the
command.
* Decription() returning string that will describe the command when
using --help flag.
* Run(args) runing the commands with given args.
"""
self.commands_dict = commands_dict
def AddToParser(self, parser):
"""Returns parser that should be used to parse arguments for Run().
Args:
parser: parse to which commands will be added.
"""
subparsers = parser.add_subparsers(title='Command to perform')
for (command, executor) in sorted(self.commands_dict.iteritems()):
parser_command = subparsers.add_parser(
command,
help=executor.Description(),
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
conflict_handler='resolve',
)
executor.AddToParser(parser_command)
parser_command.set_defaults(func=executor.Run)
def Run(self, args):
args.func(vars(args))
|
CSCI-462-01-2017/bedrock | bedrock/redirects/tests/test_util.py | Python | mpl-2.0 | 16,002 | 0.002062 | # This Source Code Form is subject to the terms of the Mozi | lla Public
# License, v. 2.0. | If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from urlparse import parse_qs, urlparse
from django.conf.urls import RegexURLPattern
from django.test import TestCase
from django.test.client import RequestFactory
from mock import patch
from nose.tools import eq_, ok_
from bedrock.redirects.middleware import RedirectsMiddleware
from bedrock.redirects.util import (get_resolver, header_redirector, is_firefox_redirector,
no_redirect, redirect, ua_redirector, platform_redirector)
class TestHeaderRedirector(TestCase):
def setUp(self):
self.rf = RequestFactory()
def test_header_redirects(self):
callback = header_redirector('user-agent', 'dude', '/abide/', '/flout/')
url = callback(self.rf.get('/take/comfort/', HTTP_USER_AGENT='the dude browses'))
self.assertEqual(url, '/abide/')
def test_ua_redirector(self):
callback = ua_redirector('dude', '/abide/', '/flout/')
url = callback(self.rf.get('/take/comfort/', HTTP_USER_AGENT='the dude browses'))
self.assertEqual(url, '/abide/')
def test_header_redirects_case_sensitive(self):
callback = header_redirector('user-agent', 'dude', '/abide/', '/flout/',
case_sensitive=True)
url = callback(self.rf.get('/take/comfort/', HTTP_USER_AGENT='The Dude Browses'))
self.assertEqual(url, '/flout/')
class TestIsFirefoxRedirector(TestCase):
def setUp(self):
self.rf = RequestFactory()
def test_firefox_redirects(self):
callback = is_firefox_redirector('/abide/', '/flout/')
url = callback(self.rf.get('/take/comfort/', HTTP_USER_AGENT='Mozilla Firefox/42.0'))
self.assertEqual(url, '/abide/')
def test_non_firefox_redirects(self):
callback = is_firefox_redirector('/abide/', '/flout/')
url = callback(self.rf.get('/take/comfort/',
HTTP_USER_AGENT='Mozilla Firefox/17.0 Iceweasel/17.0.1'))
self.assertEqual(url, '/flout/')
class TestPlatformRedirector(TestCase):
def setUp(self):
self.rf = RequestFactory()
def test_desktop_redirects(self):
callback = platform_redirector('/red/', '/green/', '/blue/')
url = callback(self.rf.get('/take/comfort/',
HTTP_USER_AGENT='Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; '
'rv:53.0) Gecko/20100101 Firefox/53.0'))
self.assertEqual(url, '/red/')
def test_android_redirects(self):
callback = platform_redirector('/red/', '/green/', '/blue/')
url = callback(self.rf.get('/take/comfort/',
HTTP_USER_AGENT='Mozilla/5.0 (Android 6.0.1; Mobile; rv:51.0) '
'Gecko/51.0 Firefox/51.0'))
self.assertEqual(url, '/green/')
def test_ios_redirects(self):
callback = platform_redirector('/red/', '/green/', '/blue/')
url = callback(self.rf.get('/take/comfort/',
HTTP_USER_AGENT='Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3 like '
'Mac OS X; de-de) AppleWebKit/533.17.9 (KHTML, '
'like Gecko) Mobile/8F190'))
self.assertEqual(url, '/blue/')
class TestNoRedirectUrlPattern(TestCase):
def setUp(self):
self.rf = RequestFactory()
def test_no_redirect(self):
"""Should be able to skip redirects."""
resolver = get_resolver([
no_redirect(r'^iam/the/walrus/$'),
redirect(r'^iam/the/.*/$', '/coo/coo/cachoo/'),
])
middleware = RedirectsMiddleware(resolver)
resp = middleware.process_request(self.rf.get('/iam/the/walrus/'))
self.assertIsNone(resp)
# including locale
middleware = RedirectsMiddleware(resolver)
resp = middleware.process_request(self.rf.get('/pt-BR/iam/the/walrus/'))
self.assertIsNone(resp)
resp = middleware.process_request(self.rf.get('/iam/the/marmot/'))
eq_(resp.status_code, 301)
eq_(resp['Location'], '/coo/coo/cachoo/')
def test_match_flags(self):
"""
Should be able to set regex flags for redirect URL.
"""
resolver = get_resolver([
redirect(r'^iam/the/walrus/$', '/coo/coo/cachoo/'),
no_redirect(r'^iam/the/walrus/$', re_flags='i'),
])
middleware = RedirectsMiddleware(resolver)
resp = middleware.process_request(self.rf.get('/IAm/The/Walrus/'))
self.assertIsNone(resp)
# also with locale
resp = middleware.process_request(self.rf.get('/es-ES/Iam/The/Walrus/'))
self.assertIsNone(resp)
# sanity check
resp = middleware.process_request(self.rf.get('/iam/the/walrus/'))
eq_(resp.status_code, 301)
eq_(resp['Location'], '/coo/coo/cachoo/')
class TestRedirectUrlPattern(TestCase):
def setUp(self):
self.rf = RequestFactory()
def test_name(self):
"""
Should return a RegexURLPattern with a matching name attribute
"""
url_pattern = redirect(r'^the/dude$', 'abides', name='Lebowski')
ok_(isinstance(url_pattern, RegexURLPattern))
eq_(url_pattern.name, 'Lebowski')
def test_no_query(self):
"""
Should return a 301 redirect
"""
pattern = redirect(r'^the/dude$', 'abides')
request = self.rf.get('the/dude')
response = pattern.callback(request)
eq_(response.status_code, 301)
eq_(response['Location'], 'abides')
def test_preserve_query(self):
"""
Should preserve querys from the original request by default
"""
pattern = redirect(r'^the/dude$', 'abides')
request = self.rf.get('the/dude?aggression=not_stand')
response = pattern.callback(request)
eq_(response.status_code, 301)
eq_(response['Location'], 'abides?aggression=not_stand')
def test_replace_query(self):
"""
Should replace query params if any are provided
"""
pattern = redirect(r'^the/dude$', 'abides',
query={'aggression': 'not_stand'})
request = self.rf.get('the/dude?aggression=unchecked')
response = pattern.callback(request)
eq_(response.status_code, 301)
eq_(response['Location'], 'abides?aggression=not_stand')
def test_merge_query(self):
"""
Should merge query params if requested
"""
pattern = redirect(r'^the/dude$', 'abides',
query={'aggression': 'not_stand'}, merge_query=True)
request = self.rf.get('the/dude?hates=the-eagles')
response = pattern.callback(request)
eq_(response.status_code, 301)
url = urlparse(response['location'])
query_dict = parse_qs(url.query)
self.assertTrue(url.path, 'abides')
self.assertEqual(query_dict, {'aggression': ['not_stand'], 'hates': ['the-eagles']})
def test_empty_query(self):
"""
Should strip query params if called with empty query
"""
pattern = redirect(r'^the/dude$', 'abides', query={})
request = self.rf.get('the/dude?white=russian')
response = pattern.callback(request)
eq_(response.status_code, 301)
eq_(response['Location'], 'abides')
def test_temporary_redirect(self):
"""
Should use a temporary redirect (status code 302) if permanent == False
"""
pattern = redirect(r'^the/dude$', 'abides', permanent=False)
request = self.rf.get('the/dude')
response = pattern.callback(request)
eq_(response.status_code, 302)
eq_(response['Location'], 'abides')
def test_anchor(self):
"""
Should append anchor text to the end, including after any querystring
"""
p |
koparasy/faultinjection-gem5 | src/cpu/inorder/InOrderCPU.py | Python | bsd-3-clause | 4,432 | 0.003159 | # Copyright (c) 2007 MIPS Technologies, Inc.
# | All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redis | tributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Korey Sewell
from m5.params import *
from m5.proxy import *
from BaseCPU import BaseCPU
class ThreadModel(Enum):
vals = ['Single', 'SMT', 'SwitchOnCacheMiss']
class InOrderCPU(BaseCPU):
type = 'InOrderCPU'
activity = Param.Unsigned(0, "Initial count")
threadModel = Param.ThreadModel('SMT', "Multithreading model (SE-MODE only)")
cachePorts = Param.Unsigned(2, "Cache Ports")
stageWidth = Param.Unsigned(4, "Stage width")
fetchMemPort = Param.String("icache_port" , "Name of Memory Port to get instructions from")
dataMemPort = Param.String("dcache_port" , "Name of Memory Port to get data from")
icache_port = Port("Instruction Port")
dcache_port = Port("Data Port")
_cached_ports = ['icache_port', 'dcache_port']
fetchBuffSize = Param.Unsigned(4, "Fetch Buffer Size (Number of Cache Blocks Stored)")
memBlockSize = Param.Unsigned(64, "Memory Block Size")
predType = Param.String("tournament", "Branch predictor type ('local', 'tournament')")
localPredictorSize = Param.Unsigned(2048, "Size of local predictor")
localCtrBits = Param.Unsigned(2, "Bits per counter")
localHistoryTableSize = Param.Unsigned(2048, "Size of local history table")
localHistoryBits = Param.Unsigned(11, "Bits for the local history")
globalPredictorSize = Param.Unsigned(8192, "Size of global predictor")
globalCtrBits = Param.Unsigned(2, "Bits per counter")
globalHistoryBits = Param.Unsigned(13, "Bits of history")
choicePredictorSize = Param.Unsigned(8192, "Size of choice predictor")
choiceCtrBits = Param.Unsigned(2, "Bits of choice counters")
BTBEntries = Param.Unsigned(4096, "Number of BTB entries")
BTBTagSize = Param.Unsigned(16, "Size of the BTB tags, in bits")
RASSize = Param.Unsigned(16, "RAS size")
instShiftAmt = Param.Unsigned(2, "Number of bits to shift instructions by")
functionTrace = Param.Bool(False, "Enable function trace")
functionTraceStart = Param.Tick(0, "Cycle to start function trace")
stageTracing = Param.Bool(False, "Enable tracing of each stage in CPU")
multLatency = Param.Unsigned(1, "Latency for Multiply Operations")
multRepeatRate = Param.Unsigned(1, "Repeat Rate for Multiply Operations")
div8Latency = Param.Unsigned(1, "Latency for 8-bit Divide Operations")
div8RepeatRate = Param.Unsigned(1, "Repeat Rate for 8-bit Divide Operations")
div16Latency = Param.Unsigned(1, "Latency for 16-bit Divide Operations")
div16RepeatRate = Param.Unsigned(1, "Repeat Rate for 16-bit Divide Operations")
div24Latency = Param.Unsigned(1, "Latency for 24-bit Divide Operations")
div24RepeatRate = Param.Unsigned(1, "Repeat Rate for 24-bit Divide Operations")
div32Latency = Param.Unsigned(1, "Latency for 32-bit Divide Operations")
div32RepeatRate = Param.Unsigned(1, "Repeat Rate for 32-bit Divide Operations")
|
jeremiahyan/odoo | addons/l10n_ar/models/account_move_line.py | Python | gpl-3.0 | 1,367 | 0.002926 | # Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
def _l10n_ar_prices_and_taxes(sel | f):
self.ensure_one()
invoice = self.move_id
included_taxes = self.tax_ids.filtered('tax_group_id.l10n_ar_vat_afip_code') if self.move_id._l10n_ar_include_vat() else self.tax_ids
if not included_taxes:
price_unit = self.tax_ids.with_ | context(round=False).compute_all(
self.price_unit, invoice.currency_id, 1.0, self.product_id, invoice.partner_id)
price_unit = price_unit['total_excluded']
price_subtotal = self.price_subtotal
else:
price_unit = included_taxes.compute_all(
self.price_unit, invoice.currency_id, 1.0, self.product_id, invoice.partner_id)['total_included']
price = self.price_unit * (1 - (self.discount or 0.0) / 100.0)
price_subtotal = included_taxes.compute_all(
price, invoice.currency_id, self.quantity, self.product_id, invoice.partner_id)['total_included']
price_net = price_unit * (1 - (self.discount or 0.0) / 100.0)
return {
'price_unit': price_unit,
'price_subtotal': price_subtotal,
'price_net': price_net,
}
|
IEEEDTU/CMS | Assessment/views/Assignment.py | Python | mit | 2,991 | 0.001672 | from django.core import serializers
from django.http import HttpResponse, JsonResponse
from Assessment.models import *
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST, require_GET
import json
@csrf_exempt
@require_GET
def getAssignmentByCode(request):
response_data = {}
try:
C = Assignment.objects.getAssignmentByCode(request.GET)
except Exception as e:
response_data["success"] = 0
response_data['exception'] = str(e)
else:
response_data["success"] = 1
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def getAssignmentsByCourse(request):
print (request)
response_data = {}
try:
C = Assignment.objects.getAssignmentsByCourse(request.GET)
except Exception as e:
response_data["success"] = 0
response_data['exception'] = str(e)
else:
response_data["success"] = 1
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def retrieveAssignmentByBranch(request):
response_data = {}
try:
C = Assignment.objects.filter(assignmentCode__contains="SE")
except Exception as e:
response_data['success'] = '0'
| response_data['exception'] = str(e)
else:
response_data['success'] = '1'
global data
try:
data = serializers.serialize('json', C)
except Exception as e:
data = serializers.seriali | ze('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def retrieveAssignmentResponses(request):
response_data = {}
try:
C = AssignmentResponse.objects.retrieveAssignmentResponsesByStudent(request.GET)
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
global data
try:
data = serializers.serialize('json', C)
except Exception as e:
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
@csrf_exempt
@require_GET
def retrieveAssignments(request):
response_data = {}
try:
C = Assignment.objects.retrieveAssignments(request.GET)
except Exception as e:
response_data['success'] = '0'
response_data['exception'] = str(e)
else:
response_data['success'] = '1'
global data
try:
data = serializers.serialize('json', C)
except Exception as e:
data = serializers.serialize('json', [C, ])
response_data["assignment"] = json.loads(data)
return JsonResponse(response_data)
|
bqbn/addons-server | src/olympia/reviewers/views.py | Python | bsd-3-clause | 65,909 | 0.000926 | import functools
import json
import time
from collections import OrderedDict
from datetime import date, datetime, timedelta
from django import http
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from django.core.paginator import Paginator
from django.db.models import Prefetch, Q
from django.db.transaction import non_atomic_requests
from django.http import JsonResponse
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.utils.http import urlquote
from django.utils.translation import gettext
from django.views.decorators.cache import never_cache
import pygit2
from csp.decorators import csp as set_csp
from rest_framework import status
from rest_framework.decorators import action as drf_action
from rest_framework.exceptions import NotFound
from rest_framework.generics import ListAPIView
from rest_framework.mixins import (
CreateModelMixin,
DestroyModelMixin,
ListModelMixin,
RetrieveModelMixin,
UpdateModelMixin,
)
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
import olympia.core.logger
from olympia import amo
from olympia.abuse.models import AbuseReport
from olympia.access import acl
from olympia.accounts.views import API_TOKEN_COOKIE
from olympia.activity.models import ActivityLog, CommentLog, DraftComment
from olympia.addons.decorators import addon_view, owner_or_unlisted_viewer_or_reviewer
from olympia.addons.models import (
Addon,
AddonApprovalsCounter,
AddonReviewerFlags,
AddonGUID,
)
from olympia.amo.decorators import (
json_view,
login_required,
permission_required,
post_required,
)
from olympia.amo.utils import paginate, render
from olympia.api.permissions import (
AllowAnyKindOfReviewer,
AllowListedViewerOrReviewer,
AllowUnlistedViewerOrReviewer,
AnyOf,
GroupPermission,
)
from olympia.constants.promoted import RECOMMENDED
from olympia.constants.reviewers import REVIEWS_PER_PAGE, REVIEWS_PER_PAGE_MAX
from olympia.devhub import tasks as devhub_tasks
from olympia.files.models import File
from olympia.ratings.models import Rating, RatingFlag
from olympia.reviewers.forms import (
AllAddonSearchForm,
MOTDForm,
PublicWhiteboardForm,
QueueSearchForm,
RatingFlagFormSet,
RatingModerationLogForm,
ReviewForm,
ReviewLogForm,
WhiteboardForm,
)
from olympia.reviewers.models import (
AutoApprovalSummary,
CannedResponse,
PerformanceGraph,
ReviewerScore,
ReviewerSubscription,
ViewExtensionQueue,
ViewRecommendedQueue,
ViewThemeFullReviewQueue,
ViewThemePendingQueue,
Whiteboard,
clear_reviewing_cache,
get_flags,
get_reviewing_cache,
get_reviewing_cache_key,
set_reviewing_cache,
)
from olympia.reviewers.serializers import (
AddonBrowseVersionSerializer,
AddonBrowseVersionSerializerFileOnly,
AddonCompareVersionSerializer,
AddonCompareVersionSerializerFileOnly,
AddonReviewerFlagsSerializer,
CannedResponseSerializer,
DiffableVersionSerializer,
DraftCommentSerializer,
FileInfoSerializer,
)
from olympia.reviewers.utils import (
AutoApprovedTable,
ContentReviewTable,
MadReviewTable,
PendingRejectionTable,
ReviewHelper,
ScannersReviewTable,
ViewUnlistedAllListTable,
view_table_factory,
UnlistedPendingManualApprovalQueueTable,
)
from olympia.scanners.models import ScannerResult
from olympia.users.models import UserProfile
from olympia.versions.models import Version, VersionReviewerFlags
from olympia.zadmin.models import get_config, set_config
from .decorators import (
any_reviewer_or_moderator_required,
any_reviewer_required,
permission_or_tools_listed_view_required,
permission_or_tools_unlisted_view_required,
)
def reviewer_addon_view_factory(f):
decorator = functools.partial(
addon_view, qs=Addon.unfiltered.all, include_deleted_when_checking_versions=True
)
return decorator(f)
def context(**kw):
ctx = {'motd': get_config('reviewers_review_motd')}
ctx.update(kw)
return ctx
@permission_or_tools_listed_view_required(amo.permissions.RATINGS_MODERATE)
def ratings_moderation_log(request):
form = RatingModerationLogForm(request.GET)
mod_log = ActivityLog.objects.moderation_events()
if form.is_valid():
if form.cleaned_data['start']:
mod_log = mod_log.filter(created__gte=form.cleaned_data['start'])
if form.cleaned_data['end']:
mod_log = mod_log.filter(created__lt=form.cleaned_data['end'])
if form.cleaned_data['filter']:
mod_log = mod_log.filter(action=form.cleaned_data['filter'].id)
pager = paginate(request, mod_log, 50)
data = context(form=form, pager=pager)
return render(request, 'reviewers/moderationlog.html', data)
@permission_or_tools_listed_view_required(amo.permissions.RATINGS_MODERATE)
def ratings_moderation_log_detail(request, id):
log = get_object_or_404(ActivityLog.objects.moderation_events(), pk=id)
review = None
# I really cannot express the depth of the insanity incarnate in
# our logging code...
if len(log.arguments) > 1 and isinstance(log.arguments[1], Rating):
review = log.arguments[1]
is_admin = acl.action_allowed(request, amo.permissions.REVIEWS_ADMIN)
can_undelete = (
review and review.deleted and (is_admin or request.user.pk == log.user.pk)
)
if request.method == 'POST':
# A Form seems overkill for this.
if request.POST['action'] == 'undelete':
if not can_undelete:
raise PermissionDenied
ReviewerScore.award_moderation_points(
log.user, review.addon, review.id, undo=True
)
review.undelete()
return redirect('reviewers.ratings_moderation_log.detail', id)
data = context(log=log, can_undelete=can_undelete)
return render(request, 'reviewers/moderationlog_detail.html', data)
@any_reviewer_or_moderator_required
def dashboard(request):
# The dashboard is divided into sections that depend on what the reviewer
# has access to, each section having one or more links, each link being
# defined by a text and an URL. The template will show every link of every
# section we provide in the context.
sections = OrderedDict()
view_all_permissions = [
amo.permissions.REVIEWER_TOOLS_VIEW,
amo.permissions.REVIEWER_TOOLS_UNLISTED_VIEW,
]
view_all = any(acl.action_allowed(request, perm) for perm in view_all_permissions)
admin_reviewer = is_admin_reviewer(request)
queue_counts = fetch_queue_counts(admin_reviewer=admin_reviewer)
if view_all or acl.action_allowed(request, amo.permissions.ADDONS_REVIEW):
sections[gettext('Pre-Review Add-ons')] = []
if acl.action_allowed(request, amo.permissions.ADDONS_RECOMMENDED_REVIEW):
sections[gettext('Pre-Review Add-ons')].append(
(
gettext('Recommended ({0})').format(queue_counts['recommended']),
reverse('reviewers.queue_recommended'),
)
)
sections[gettext('Pre-Review Add-ons')].extend(
(
(
gettext('Other Pending Review ({0})').format(
queue_counts['extension']
),
reverse('reviewers.queue_extension'),
),
(gettext('Performance'), reverse('reviewers.performance')),
(gettext('Review Log'), reverse('reviewers.reviewlog')),
(
| gettext('Add-on Review Guide'), |
'https://wiki.mozilla.org/Add-ons/Reviewers/Guide',
),
)
)
sections[gettext('Security Scanners')] = [
(
gettext('Flagged By Scanners'),
reverse('reviewers.queue_scanners'),
),
(
gettext('Flagged for Human Review'),
reverse('rev |
anhstudios/swganh | data/scripts/templates/object/building/poi/shared_dathomir_freedprisonerscamp_large1.py | Python | mit | 465 | 0.047312 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION | FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_dathomir_freedprisonerscamp_large1.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### EN | D MODIFICATIONS ####
return result |
mylene-campana/hpp-rbprm-corba | script/tests/skeleton_plateforms_path.py | Python | lgpl-3.0 | 5,500 | 0.021822 | #/usr/bin/env python
# author: Mylene Campana (mcampana@laas.fr)
# Script which goes with hpp-rbprm-corba package.
# The script launches a skeleton-robot and a ??? environment.
# It defines init and final configs, and solve them with RBPRM.
# Range O | f Motions are sphe | res linked to the 4 end-effectors
#blender/urdf_to_blender.py -p rbprmBuilder/ -i /local/mcampana/devel/hpp/src/animals_description/urdf/skeleton.urdf -o skeleton_blend.py
#TODO: Lombaire joints -> impose same values
from hpp.corbaserver import Client
from hpp.corbaserver.rbprm.problem_solver import ProblemSolver
from hpp.corbaserver.rbprm.rbprmbuilder import Builder
from hpp.gepetto import Viewer, PathPlayer
import math
#import numpy as np
from viewer_display_library import normalizeDir, plotCone, plotFrame, plotThetaPlane, shootNormPlot, plotStraightLine, plotConeWaypoints, plotSampleSubPath, contactPosition, addLight, plotSphere, plotSpheresWaypoints, plotConesRoadmap, plotEdgesRoadmap
rootJointType = 'freeflyer'
packageName = 'hpp-rbprm-corba'
meshPackageName = 'hpp-rbprm-corba'
urdfName = 'skeleton_trunk_flexible'
urdfNameRoms = ['LHandSphere','RHandSphere','LFootSphere','RFootSphere']
urdfSuffix = ""
srdfSuffix = ""
rbprmBuilder = Builder () # RBPRM
rbprmBuilder.loadModel(urdfName, urdfNameRoms, rootJointType, meshPackageName, packageName, urdfSuffix, srdfSuffix)
rbprmBuilder.setJointBounds ("base_joint_xyz", [-8.5, 8.5, -3, 3, 0.1, 9])
#rbprmBuilder.boundSO3([-0.2,0.2,-3.14,3.14,-0.3,0.3])
rbprmBuilder.setFilter(urdfNameRoms)
#rbprmBuilder.setNormalFilter('LHandSphere', [0,0,1], 0.5)
#rbprmBuilder.setNormalFilter('RHandSphere', [0,0,1], 0.5)
#rbprmBuilder.setNormalFilter('LFootSphere', [0,0,1], 0.5)
#rbprmBuilder.setNormalFilter('RFootSphere', [0,0,1], 0.5)
rbprmBuilder.client.basic.robot.setDimensionExtraConfigSpace(4)
rbprmBuilder.client.basic.robot.setExtraConfigSpaceBounds([0,0,0,0,0,0,-3.14,3.14])
rbprmBuilder.getCurrentConfig ()
ps = ProblemSolver (rbprmBuilder)
r = Viewer (ps); gui = r.client.gui
r(rbprmBuilder.getCurrentConfig ())
#addLight (r, [-2,0,1,1,0,0,0], "li")
pp = PathPlayer (rbprmBuilder.client.basic, r)
r.loadObstacleModel ("hpp-rbprm-corba","high_plateforms","high_plateforms")
#addLight (r, [-3,3,4,1,0,0,0], "li"); addLight (r, [3,-3,4,1,0,0,0], "li1")
# Configs : [x, y, z, q1, q2, q3, q4, dir.x, dir.y, dir.z, theta]
q11 = rbprmBuilder.getCurrentConfig ()
q11[(len(q11)-4):]=[0,0,1,0] # set normal for init / goal config # Mylène
q11[0:7] = [6.3, 0, 0.4, 0, -math.sqrt(2)/2.0, 0, math.sqrt(2)/2.0]
r(q11)
rbprmBuilder.isConfigValid(q11)
rbprmBuilder.client.rbprm.rbprm.isRbprmValid (q11)
q22 = q11[::]
#q22[0:7] = [-7.3, -2, 7.9, 0, -math.sqrt(2)/2.0, 0, math.sqrt(2)/2.0] # final
q22[0:7] = [-6.5, 2.4, 0.4, 0, -math.sqrt(2)/2.0, 0, math.sqrt(2)/2.0] # interm
r(q22)
rbprmBuilder.isConfigValid(q22)
rbprmBuilder.client.rbprm.rbprm.isRbprmValid (q22)
#q1 = cl.rbprmBuilder.projectOnObstacle (q11, 0.001); q2 = cl.rbprmBuilder.projectOnObstacle (q22, 0.001)
#q1 = rbprmBuilder.getCurrentConfig (); q2 = rbprmBuilder.getCurrentConfig ();
#rbprmBuilder.isConfigValid(q1); rbprmBuilder.isConfigValid(q2); r(q1)
offsetOrientedPath = 1 # If remove oriented path computation in ProblemSolver, set to 1 instead of 2
#ps.addPathOptimizer("RandomShortcut")
#ps.client.problem.selectSteeringMethod("SteeringDynamic")
ps.selectPathPlanner("PRMplanner")
ps.setInitialConfig (q11)
ps.addGoalConfig (q22)
ps.client.problem.selectConFigurationShooter("RbprmShooter")
ps.client.problem.selectPathValidation("RbprmPathValidation",0.05)
ps.client.problem.setFrictionCoef(1.2); ps.client.problem.setMaxVelocityLim(6.5)
ps.clearRoadmap();
solveTime = ps.solve ()
pp.displayPath(0)
pathId = ps.numberPaths()-offsetOrientedPath # path without orientation stuff
pathSamples = plotSampleSubPath (cl, r, pathId, 70, "path0", [0,0,1,1])
plotCone (q1, cl, r, "cone_first", "friction_cone_SG2"); plotCone (q2, cl, r, "cone_second", "friction_cone_SG2")
plotConeWaypoints (cl, pathId, r, "cone_wp_group", "friction_cone_WP2")
## 3D viewer tools ##
plotFrame (r, 'frame_group', [0,0,0], 0.6)
cl.obstacle.getObstaclePosition('decor_base')
rbprmBuilder.isConfigValid(q1)
rbprmBuilder.setCurrentConfig(q1)
res=rbprmBuilder.distancesToCollision()
r( ps.configAtParam(0,5) )
ps.optimizePath (0)
ps.clearRoadmap ()
ps.resetGoalConfigs ()
from numpy import *
argmin(rbprmBuilder.distancesToCollision()[0])
rbprmBuilder.getJointNames ()
rbprmBuilder.getConfigSize ()
r.client.gui.getNodeList()
gui.removeFromGroup("path0",r.sceneName)
gui.getNodeList()
ps.numberNodes()
# Plot cones and edges in viewer
plotConesRoadmap (cl, r, 'cone_rm_group', "friction_cone2")
plotEdgesRoadmap (cl, r, 'edgeGroup', 70, [0,1,0.2,1])
gui = r.client.gui
gui.setCaptureTransform ("frames.yaml ", ["skeleton_trunk_flexible"])
q = q11
r (q); cl.rbprmBuilder.setCurrentConfig(q)
gui.refresh (); gui.captureTransform ()
gui.setVisibility('skeleton_trunk_flexible/thorax_rhand_rom',"OFF")
gui.setVisibility('skeleton_trunk_flexible/pelvis_rfoot_rom',"OFF")
gui.addLight ("li", r.windowId, 0.0001, [0.9,0.9,0.9,1])
gui.addToGroup ("li", r.sceneName)
gui.applyConfiguration ("li", [1,0,0,1,0,0,0])
gui.refresh ()
rbprmBuilder.isConfigValid(q11)
rbprmBuilder.client.rbprm.rbprm.setRbShooter ()
q = rbprmBuilder.client.rbprm.rbprm.rbShoot ()
r(q)
rbprmBuilder.client.rbprm.rbprm.isRbprmValid (q)
rbprmBuilder.client.rbprm.rbprm.setRbShooter ()
r(rbprmBuilder.client.rbprm.rbprm.rbShoot ())
|
sfcl/ancon | index/migrations/0013_auto_20170221_2204.py | Python | gpl-2.0 | 578 | 0.00173 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2017 | -02-21 17:04
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('index', '0012_cartridgeitemname_manufacturer'),
]
operations = [
migrations.AlterField(
model_name='cartridgeitemname',
name='manuf | acturer',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='common.Manufacturer'),
),
]
|
endlessm/chromium-browser | third_party/grpc/src/test/http2_test/test_max_streams.py | Python | bsd-3-clause | 1,926 | 0.004673 | # Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, eith | er express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
impor | t hyperframe.frame
import logging
import http2_base_server
class TestcaseSettingsMaxStreams(object):
"""
This test sets MAX_CONCURRENT_STREAMS to 1 and asserts that at any point
only 1 stream is active.
"""
def __init__(self):
self._base_server = http2_base_server.H2ProtocolBaseServer()
self._base_server._handlers['DataReceived'] = self.on_data_received
self._base_server._handlers['ConnectionMade'] = self.on_connection_made
def get_base_server(self):
return self._base_server
def on_connection_made(self):
logging.info('Connection Made')
self._base_server._conn.initiate_connection()
self._base_server._conn.update_settings(
{hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 1})
self._base_server.transport.setTcpNoDelay(True)
self._base_server.transport.write(self._base_server._conn.data_to_send())
def on_data_received(self, event):
self._base_server.on_data_received_default(event)
sr = self._base_server.parse_received_data(event.stream_id)
if sr:
logging.info('Creating response of size = %s' % sr.response_size)
response_data = self._base_server.default_response_data(sr.response_size)
self._base_server.setup_send(response_data, event.stream_id)
# TODO (makdharma): Add assertion to check number of live streams
|
pirobot/pedsim_ros | pedsim_simulator/scripts/mocktracks_rss_scenario_one.py | Python | bsd-2-clause | 12,091 | 0.022248 | #!/usr/bin/env python
# Author: Timm Linder, linder@cs.uni-freiburg.de
#
# Publishes fake tracked persons and the corresponding detections (if not occluded) at
# /spencer/perception/tracked_persons and /spencer/perception/detected_persons.
import rospy, yaml, tf
from spencer_tracking_msgs.msg import TrackedPersons, TrackedPerson
from nav_msgs.msg import Gri | dCells
from math import cos, sin, tan, pi, radians
def createTrackedPerson(track_id, x, y, theta):
trackedPerson = TrackedPerson()
theta = radians(theta) + pi/2.0
trackedPerson.track_id = track_id
quaternion = tf.transformations.quaternion_from_euler(0, 0, theta)
trackedPerson.pose.pose.position.x = x
trackedPerson.pose.pose.position.y = y
trackedPer | son.pose.pose.orientation.x = quaternion[0]
trackedPerson.pose.pose.orientation.y = quaternion[1]
trackedPerson.pose.pose.orientation.z = quaternion[2]
trackedPerson.pose.pose.orientation.w = quaternion[3]
trackedPerson.pose.covariance[0 + 0 * 6] = 0.001 # x
trackedPerson.pose.covariance[1 + 1 * 6] = 0.001 # y
trackedPerson.pose.covariance[2 + 2 * 6] = 999999 # z
trackedPerson.pose.covariance[3 + 3 * 6] = 999999 # x rotation
trackedPerson.pose.covariance[4 + 5 * 6] = 999999 # y rotation
trackedPerson.pose.covariance[4 + 5 * 6] = 999999 # z rotation
trackedPerson.twist.twist.linear.x = cos(theta)
trackedPerson.twist.twist.linear.y = sin(theta)
for i in range(0, 3):
trackedPerson.twist.covariance[i + i * 6] = 1.0 # linear velocity
for i in range(3, 6):
trackedPerson.twist.covariance[i + i * 6] = float("inf") # rotational velocity
return trackedPerson
def main():
# Main code
trackPublisher = rospy.Publisher('/spencer/perception/tracked_persons', TrackedPersons )
#obstaclesPublisher = rospy.Publisher('/pedsim/static_obstacles', GridCells )
rospy.init_node( 'mock_tracked_persons' )
rate = rospy.Rate(10)
#obstacles = yaml.load(OBSTACLE_YAML)
#obstacles = [ d for d in obstacles]
seqCounter = 0
while not rospy.is_shutdown():
trackedPersons = TrackedPersons()
trackedPersons.header.seq = seqCounter
trackedPersons.header.frame_id = "odom"
trackedPersons.header.stamp = rospy.Time.now()
#trackedPersons.tracks.append( createTrackedPerson( trackId, x, y, theta ) )
trackedPersons.tracks.append( createTrackedPerson( 01, 5, 4, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 02, 6, 5.45878, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 03, 7.22, 5.70, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 04, 2+7.22, 7.33, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 05, 2+8.92, 8.42, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 06, 2+7.92, 10.41, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 07, 2+7.2, 9.44, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 8, 2+7, 14-2, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 9, 2+6, 15.4123-2, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 10, 5-1, 18.595-5, 280 ) )
trackedPersons.tracks.append( createTrackedPerson( 11, 5-1, 20-5, 270 ) )
trackedPersons.tracks.append( createTrackedPerson( 12, 6-1, 21.5491-5, 240 ) )
trackedPersons.tracks.append( createTrackedPerson( 13, 7.48044-1, 19-5, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 14, 6, 24.5463, 45 ) )
trackedPersons.tracks.append( createTrackedPerson( 15, 8, 28, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 16, 10.4458, 23, 68 ) )
trackedPersons.tracks.append( createTrackedPerson( 17, 11.5004, 27, 88 ) )
trackedPersons.tracks.append( createTrackedPerson( 18, 14, 25.4389, 20 ) )
trackedPersons.tracks.append( createTrackedPerson( 19, 15, 21, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 20, 15, 22.4308, 92 ) )
trackedPersons.tracks.append( createTrackedPerson( 21, 15.4676, 24, 91 ) )
trackedPersons.tracks.append( createTrackedPerson( 22, 16.5423, 25.4178, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 23, 18, 20, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 24, 18.5532, 21.5011, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 25, 15.4739, 16.5314, 45 ) )
trackedPersons.tracks.append( createTrackedPerson( 26, 20, 25.5746, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 27, 21.5327, 24, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 28, 22, 26.4632, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 29, 21, 18, 45 ) )
trackedPersons.tracks.append( createTrackedPerson( 30, 23, 20.4335, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 31, 23.4972, 21.4055, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 32, 23.4025, 22.4749, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 33, 24.5281, 18.5868, 54 ) )
trackedPersons.tracks.append( createTrackedPerson( 34, 16.554, 3.40568-2, 94 ) )
trackedPersons.tracks.append( createTrackedPerson( 35, 16, 6-1, 94 ) )
trackedPersons.tracks.append( createTrackedPerson( 36, 20, 4, 0 ) )
trackedPersons.tracks.append( createTrackedPerson( 37, 19, 12, 25 ) )
trackedPersons.tracks.append( createTrackedPerson( 38, 23, 8, 50 ) )
trackedPersons.tracks.append( createTrackedPerson( 39, 24, 10, 90 ) )
trackedPersons.tracks.append( createTrackedPerson( 40, 25, 12, 120 ) )
trackedPersons.tracks.append( createTrackedPerson( 41, 7.51, 22.41, 80 ) )
trackedPersons.tracks.append( createTrackedPerson( 42, 8.21, 25.7, 81 ) )
trackedPersons.tracks.append( createTrackedPerson( 43, 3.31, 27.7, 81 ) )
trackedPersons.tracks.append( createTrackedPerson( 44, 11.421, 18.7, 75 ) )
trackedPersons.tracks.append( createTrackedPerson( 45, 25.21, 27.0, 85 ) )
trackedPersons.tracks.append( createTrackedPerson( 46, 18.23, 6.87, -91 ) )
trackedPersons.tracks.append( createTrackedPerson( 47, 18.6, 8.90, -90 ) )
trackedPersons.tracks.append( createTrackedPerson( 48, 20.4, 7.87, 85 ) )
trackedPersons.tracks.append( createTrackedPerson( 49, 15.684, 10.74, 75 ) )
trackedPersons.tracks.append( createTrackedPerson( 50, 15.72,14.51 , 70 ) )
trackPublisher.publish( trackedPersons )
#obstacles['header'] = trackedPersons.header
#obstaclesPublisher.publish( obstacles )
seqCounter += 1
rate.sleep()
# Constants
OBSTACLE_YAML= """
header:
seq: 48860
stamp:
secs: 0
nsecs: 0
frame_id: world
cell_width: 1.0
cell_height: 1.0
cells:
-
x: -0.5
y: -0.5
z: 0.0
-
x: 0.5
y: -0.5
z: 0.0
-
x: 1.5
y: -0.5
z: 0.0
-
x: 2.5
y: -0.5
z: 0.0
-
x: 3.5
y: -0.5
z: 0.0
-
x: 4.5
y: -0.5
z: 0.0
-
x: 5.5
y: -0.5
z: 0.0
-
x: 6.5
y: -0.5
z: 0.0
-
x: 7.5
y: -0.5
z: 0.0
-
x: 8.5
y: -0.5
z: 0.0
-
x: 9.5
y: -0.5
z: 0.0
-
x: 10.5
y: -0.5
z: 0.0
-
x: 11.5
y: -0.5
z: 0.0
-
x: 12.5
y: -0.5
z: 0.0
-
x: 13.5
y: -0.5
z: 0.0
-
x: 14.5
y: -0.5
z: 0.0
-
x: 15.5
y: -0.5
z: 0.0
-
x: 16.5
y: -0.5
z: 0.0
-
x: 17.5
y: -0.5
z: 0.0
-
x: 18.5
y: -0.5
z: 0.0
-
x: 19.5
y: -0.5
z: 0.0
-
x: 20.5
y: -0.5
z: 0.0
-
x: 21.5
y: -0.5
z: 0.0
-
x: 22.5
y: -0.5
z: 0.0
-
x: 23.5
y: -0.5
z: 0.0
-
x: 24.5
y: -0.5
z: 0.0
-
x: 25.5
y: -0.5
z: 0.0
-
x: 26.5
y: -0.5
z: 0.0
-
x: 27.5
y: -0.5
z: 0.0
-
x: -0.5
y: -0.5
z: 0.0
-
x: -0.5
y: 0.5
z: 0.0
-
x: -0.5
y: 1.5
z: 0.0
-
x: -0.5
y: 2.5
z: 0.0
-
x: -0.5
y: 3.5
z: 0.0
|
ilveroluca/pydoop | test/common/test_pydoop.py | Python | apache-2.0 | 2,152 | 0 | # BEGIN_COPYRIGHT
#
# Copyright 2009-2015 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END_COPYRIGHT
"""
Test suite for top-level functions.
"""
import unittest
import os
import tempfile
import shutil
import pydoop
class TestPydoop(unittest.TestCase):
def setUp(self):
self.wd = tempfile.mkdtemp(prefix='pydoop_test_')
self.old_env = os.environ.copy()
def tearDown(self):
shutil.rmtree(self.wd)
os.environ = self.old_env
reload(pydoop)
def test_home(self):
old_home = pydoop.hadoop_home()
if os.path.isdir(old_home):
new_home = os.path.join(self.wd, 'hadoop')
os.symlink(old_home, new_home)
os.environ['HADOOP_HOME'] = new_home
reload(pydoop)
self.assertEqual(pydoop.hadoop_home(), new_home)
def test_conf(self):
os.environ['HADOOP_CONF_DIR'] = self.wd
reload(pydoop)
self.assertEqual(pydoop.hadoop_conf(), self.wd)
def test_pydoop_jar_path(self):
jar_path = pydoop.jar_path()
if jar_path is not None:
self.assertTrue(os.path.exists(jar_path))
directory, filename = os.path.split(jar_path)
self.assertEqual(filename, pydoop.ja | r_name())
self.assertEqual('pydoop', os.path.basename(directory))
def suite():
suite_ = unittes | t.TestSuite()
suite_.addTest(TestPydoop('test_home'))
suite_.addTest(TestPydoop('test_conf'))
suite_.addTest(TestPydoop('test_pydoop_jar_path'))
return suite_
if __name__ == '__main__':
_RUNNER = unittest.TextTestRunner(verbosity=2)
_RUNNER.run((suite()))
|
LimpidTech/django-themes | themes/managers.py | Python | mit | 1,188 | 0.000842 | from django.contrib.sites import models as site_models
from django.db import models
def _get_current_site(request):
# Attempts to use monodjango.middleware.SiteProviderMiddleware
try:
return Site.objects.get_current(request)
except TypeError:
pass
return Site.objects.get_current()
class ThemeManager(models.Manager):
def get_current_by_request(self, request=None):
""" Gets the current """
return self.get_current(_get_current_site(request))
def get_current(self, site=None):
""" Gets the current system theme. """
if site is None:
site | = Site.objects.get_current()
| return self.get(sites_enabled=site)
def get_list_by_request(self, request=None):
""" Gets a list of themes that are available for the given request. """
return self.get_list(_get_current_site(request))
def get_list(self, site=None):
""" Get a list of themes available on a specific site. """
if site is None:
site = Site.objects.get_current()
return self.filter(sites_available=site)
def get_default(self):
return self.get(default=True)
|
tbenthompson/LMS_public | lms_code/lib/gps.py | Python | mit | 1,311 | 0.003051 | from scipy.io import loadmat
# which_profile = "LMS"
# which_profile = "longer_profile2"
# which_profile = "properly_aligned3"
def get_stations(which_profile):
all_gps = loadmat('data/gps/CalaisGanSocquetBanerjeeApel_in_Calais_RF_trim_Clean_Tog.sta.data.mat')
all_station_names = all_gps['station_name']
prof_ | gps = loadmat(' | data/gps/' + which_profile + '.profile.mat')
names = prof_gps['profile_station_names']
stations = []
for (idx, n) in enumerate(names):
s = dict()
s['name'] = n
s['distance'] = prof_gps['parallel_distance'][idx][0]
s['parallel_vel'] = prof_gps['parallel_vel'][idx][0]
s['normal_vel'] = prof_gps['perpendicular_vel'][idx][0]
s['parallel_sigma'] = prof_gps['par_sig'][idx][0]
s['normal_sigma'] = prof_gps['per_sig'][idx][0]
for (all_idx, all_name) in enumerate(all_station_names):
if all_name == n:
s['lat'] = all_gps['station_lat'][0][all_idx]
s['lon'] = all_gps['station_lon'][0][all_idx]
s['east_vel'] = all_gps['station_east_vel'][0][all_idx]
s['north_vel'] = all_gps['station_north_vel'][0][all_idx]
stations.append(s)
return stations
if __name__ == "__main__":
print get_stations(properly_aligned3)
|
mvinoba/MinervaBot | MinervaBot.py | Python | gpl-3.0 | 4,031 | 0.001488 | from getpass import getpass
from selenium import webdriver
from selenium.common import exceptions
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def inserir_dados(salvar=None):
# Pergunta ao usuario CPF e senha
for label in driver.find_elements_by_tag_name('label'):
if label.get_attribute('for') == 'pat_id':
idcpf = driver.find_element_by_name('bor_id')
inputcpf = input('%s ' % label.text)
elif label.get_attribute('for') == 'pat_password':
inputpassword = getpass('%s ' % label.text)
# Pergunta ao usuario se deseja salvar os dados de login
if salvar == None:
resposta = input('Salvar dados em arquivo? (s/N) ')
if resposta.lower() in {'sim', 's'}:
salvar = True
else:
salvar = False
print()
return inputcpf, inputpassword, salvar
def ler_dados(arquivo):
file = open(arquivo, 'r')
read = file.read()
inputlist = read.split('\n')
return inputlist[0], inputlist[1]
def salvar_dados(arquivo, inputcpf, inputpassword, salvar):
if not salvar: return
file = open(arquivo, 'w')
file.write('%s\n%s' % (inputcpf, inputpassword))
# Coloca o Chrome em modo headless
options = Options()
options.set_headless(True)
# Abre o driver e entra na minerva
driver = webdriver.Chrome(chrome_options=options)
driver.get('http://minerva.ufrj.br')
# Acha a secao de login e a acessa
login = driver.find_element_by_link_text('Login')
login.send_keys(Keys.RETURN)
# Le os dados para login
try:
inputcpf, inputpassword = ler_dados('login.txt')
salvar = True
except FileNotFoundError:
print("Falha ao encontrar o arquivo de login")
inputcpf, inputpassword, salvar = inserir_dados(None)
except IndexError:
print("Arquivo de login incompleto")
inputcpf, inputpassword, salvar = inserir_dados(True)
# Entra em loop ate obter sucesso no login
while True:
# Acha as entradas para o id e senha
idcpf = driver.find_element_by_name('bor_id')
password = driver.find_element_by_name('bor_verification')
# Preenche as entradas com os parametros dados
idcpf.clear()
idcpf.send_keys(inputcpf)
password.send_keys(inputpassword, Keys.RETURN)
# Checa mensagem de erro no login
feedback = driver.find_element_by_class_name('feedbackbar')
if feedback.text == ' ':
salvar_dados('login.txt', inputcpf, inputpassword, salvar)
break
else:
print(feedback.text.strip())
inputcpf, inputpassword, salvar = inserir_dados(salvar)
# Acha o botao de fechar o popup e o fecha
try:
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.ID, 'myModal'))
) # Aguarda o popup estar disponivel
except exceptions.TimeoutException:
pass
else:
popup = driver.find_element_by_css_selector(
'.modal-footer > button[data-dismiss="modal"]')
popup.send_keys(Keys.RETURN)
# Acha a secao emprestimos e a acessa
emprestimos = driver.find_element_by_css_selector('a[href*="bor-loan"]')
emprestimos.send_keys(Keys.RETURN)
# Acha o botao de renova | r todos
try:
renovartodos = driver.find_element_by_partial_link_text('Renovar Todos')
except exceptions.NoSuchElementException:
mensagem = driver.find_element_by_class_name('text3')
print(mensagem.text)
else:
# Clica em renovar todos
renovarto | dos.send_keys(Keys.RETURN)
# Imprime na tela o resultado da renovacao
tabela = driver.find_elements_by_tag_name('table')[-1]
linhas = tabela.find_elements_by_tag_name('tr')
cabecalho = linhas[0].find_elements_by_tag_name('th')
for livro in linhas[1:]:
corpo = livro.find_elements_by_tag_name('td')
for x in range(len(corpo)):
print(cabecalho[x].text, end=': ')
print(corpo[x].text)
driver.quit()
|
anish/phatch | phatch/data/info.py | Python | gpl-3.0 | 16,717 | 0.00252 | # -*- coding: UTF-8 -*-
# Phatch - Photo Batch Processor
# Copyright (C) 2007-2008 www.stani.be
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/
#
# Phatch recommends SPE (http://pythonide.stani.be) for python editing.
"""Important: Run this file everytime info is changed."""
import codecs
import sys
import time
#constants
NAME = 'Phatch'
AUTHOR = 'Stani'
AUTHOR_EMAIL = 'spe.stani.be@gmail.com'
GPL_VERSION = '3'
STANI = {
'name': AUTHOR,
'email': AUTHOR_EMAIL,
'url': 'www.stani.be',
}
NADIA = {
'name': 'Nadia Alramli',
'email': 'mail@nadiana.com',
'url': 'http://nadiana.com',
}
SUPPORTED_LANGUAGES = ['Dutch', 'English']
from version import VERSION, DATE
#credits
CREDITS = {
'code': [
STANI,
NADIA,
{'name': u'Erich Heine',
'email':'sophacles@gmail.com'},
{'name': u'Juho Vepsäläinen',
'email':'bebraw@gmail.com'},
{'name': u'Robin Mills',
'email': 'robin@clanmills.com'},
{'name': 'Bas van Oostveen',
'email': 'v.oostveen@gmail.com'},
{'name': 'Pawel T. Jochym',
'email': 'jochym@gmail.com'},
],
'documentation': [
STANI,
{'name': u'Frédéric Mantegazza',
'email': 'frederic.mantegazza@gbiloba.org',
'url': 'http://www.gbiloba.org'},
{'name': 'Dwarrel Egel',
'email': 'dwarrel.egel@gmail.com'},
],
'translation': [
STANI,
{'name': u'ad Madi'},
{'name': u'abdessmed mohamed amine'},
{'name': u'abuyop'},
{'name': u'adaminikisi'},
{'name': u'adura'},
{'name': u'aeglos'},
{'name': u'agatzebluz'},
{'name': u'Ahmed Noor Kader Mustajir Md Eusoff'},
{'name': u'Aktiwers'},
{'name': u'Alan Teixeira'},
{'name': u'Albert Cervin'},
{'name': u'Alberto T.'},
{'name': u'alex'},
{'name': u'Alex Debian'},
{'name': u'Alexandre Prokoudine'},
{'name': u'Ali Sattari'},
{'name': u'Anders'},
{'name': u'Andras Bibok'},
{'name': u'André Gondim'},
{'name': u'Andrea (pikkio)'},
{'name': u'Andrey Skuryatin'},
{'name': u'Andrzej MoST (Marcin Ostajewski)'},
{'name': u'Archie'},
{'name': u'Ardaking'},
{'name': u'Arielle B Cruz'},
{'name': u'Aristo | telis Grammatikakis'},
{'name': u'arnau'},
{'name': u'Arnaud Bonatti'},
{'name': u'Aron Xu'},
{'name': u'Artin'},
{'name': u'Artur Chmarzyński'},
{'name': u'Åskar'},
{'name': u"Balaam's Miracle"},
{'name': u'Bjørn Sivertsen'},
{'name': u'bt4wang'},
{'name': u'Cedric Graebin'},
{'name': u'César Flores'},
{'name': u'Clovis Gauzy'},
{'name': u'cumulus007'},
{'name': u'Daniël H.'},
{'name': | u'Daniel Nylander'},
{'name': u'Daniel Voicu'},
{'name': u'Daniele de Virgilio'},
{'name': u'Darek'},
{'name': u'David A Páez'},
{'name': u'David Machakhelidze'},
{'name': u'deukek'},
{'name': u'Diablo'},
{'name': u'DiegoJ'},
{'name': u'Dirk Tas'},
{'name': u'Diska'},
{'name': u'Dobrosław Żybort'},
{'name': u'DPini'},
{'name': u'Dr. Gráf'},
{'name': u'Dread Knight'},
{'name': u'Edgardo Fredz'},
{'name': u'Emil Pavlov'},
{'name': u'emil.s'},
{'name': u'Emilio Pozuelo Monfort'},
{'name': u'Emre Ayca'},
{'name': u'EN'},
{'name': u'Endresz_Z'},
{'name': u'ercole'},
{'name': u'Ervin Triana'},
{'name': u'Ervin Triana'},
{'name': u'Fabien Basmaison'},
{'name': u'Federico Antón'},
{'name': u'Felipe'},
{'name': u'Gabriel Čenkei'},
{'name': u'Gabriel Rota'},
{'name': u'Galvin'},
{'name': u'Gérard Duteil'},
{'name': u'Giacomo Mirabassi'},
{'name': u'Gianfranco Marino'},
{'name': u'Guo Xi'},
{'name': u'Guybrush88'},
{'name': u'Halgeir'},
{'name': u'Ionuț Jula'},
{'name': u'Ivan Lucas'},
{'name': u'Jan Tojnar'},
{'name': u'Jaroslav Lichtblau'},
{'name': u'Javier García Díaz'},
{'name': u'jean-luc menut'},
{'name': u'jgraeme'},
{'name': u'Johannes'},
{'name': u'John Lejeune'},
{'name': u'jollyr0ger'},
{'name': u'Juho Vepsäläinen'},
{'name': u'Juss1962'},
{'name': u'kasade'},
{'name': u'kekeljevic'},
{'name': u'kenan3008'},
{'name': u'Koptev Oleg'},
{'name': u'Kulcsár, Kázmér'},
{'name': u'Lauri Potka'},
{'name': u'liticovjesac'},
{'name': u'Lomz'},
{'name': u'Luca Livraghi'},
{'name': u'luojie-dune'},
{'name': u'madcore'},
{'name': u'mahirgul'},
{'name': u'Marcos'},
{'name': u'Marielle Winarto'},
{'name': u'Mario Ferraro'},
{'name': u'Martin Lettner'},
{'name': u'Matteo Ferrabone'},
{'name': u'Matthew Gadd'},
{'name': u'Mattias Ohlsson'},
{'name': u'Maudy Pedrao'},
{'name': u'MaXeR'},
{'name': u'Michael Christoph Jan Godawski'},
{'name': u'Michael Katz'},
{'name': u'Michał Trzebiatowski'},
{'name': u'Michal Zbořil'},
{'name': u'Miguel Diago'},
{'name': u'Mijia'},
{'name': u'milboy'},
{'name': u'Miroslav Koucký'},
{'name': u'Miroslav Matejaš'},
{'name': u'momou'},
{'name': u'Mortimer'},
{'name': u'Motin'},
{'name': u'nEJC'},
{'name': u'Newbuntu'},
{'name': u'nicke'},
{'name': u'Nicola Piovesan'},
{'name': u'Nicolae Istratii'},
{'name': u'Nicolas CHOUALI'},
{'name': u'nipunreddevil'},
{'name': u'Nizar Kerkeni'},
{'name': u'Nkolay Parukhin'},
{'name': u'orange'},
{'name': u'Paco Molinero'},
{'name': u'pasirt'},
{'name': u'Pavel Korotvička'},
{'name': u'pawel'},
{'name': u'Petr Pulc'},
{'name': u'petre'},
{'name': u'Pierre Slamich'},
{'name': u'Piotr Ożarowski'},
{'name': u'Pontus Schönberg'},
{'name': u'pveith'},
{'name': u'pygmee'},
{'name': u'qiuty'},
{'name': u'quina'},
{'name': u'rainofchaos'},
{'name': u'Rodrigo Garcia Gonzalez'},
{'name': u'rokkralj'},
{'name': u'Roman Shiryaev'},
{'name': u'royto'},
{'name': u'Rune C. Akselsen'},
{'name': u'rylleman'},
{'name': u'Salandro'},
{'name': u'Saša Pavić'},
{'name': u'Sasha'},
{'name': u'SebX86'},
{'name': u'Sergiy Babakin'},
{'name': u'Serhey Kusyumoff (Сергій Кусюмов)'},
{'name': u'Shrikant Sharat'},
{'name': u'skarevoluti'},
{'name': u'Skully'},
{'name': u'smo'},
{'name': u'SnivleM'},
{'name': u'stani'},
{'name': u'Stephan Klein'},
{'name': u'studiomohawk'},
{'name': u'Svetoslav Stefanov'},
{'name': u'Tao Wei'},
{'name': u'tarih mehmet'},
{'name': u'theli'},
{'name': u'therapiekind'},
{'name': u'Todor Eemreorov'},
{'name': u'Tommy Brunn'},
{'name': u'Tosszyx'},
{'name': u'TuniX12'},
{'name': u'ubby'},
{'name': u'Vadim Peretokin'},
{'name': u'VerWolF'},
{'name': u'Vyacheslav S.'},
{'name' |
goirijo/thermoplotting | old/scripts/strainrelaxations.py | Python | mit | 3,619 | 0.038685 | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import math
#Reads deformations.txt, generated via
#`casm query -k 'struc_score(PRIM,basis_score)' 'struc_score(PRIM,lattice_score)' 'struc_score(../NiAl-B2/PRIM-true.vasp,basis_score)' 'struc_score(../NiAl-B2/PRIM-true.vasp,lattice_score)' 'struc_score(../NiAl-HCP/PRIM,basis_score)' 'struc_score(../NiAl-HCP/PRIM,lattice_score)' relaxation_strain comp atom_frac -c selections/enumerated.json -o deformations.txt`
#Load everything as a string
stringdump=np.genfromtxt("./deformations.txt", dtype="S25")
names=stringdump[:,0]
#Recast data as float
datadump=np.array(stringdump[:,2:-1],dtype=float)
#take views of relevant columns
FCCscore=datadump[:,0]+datadump[:,1]
BCCscore=datadump[:,2]+datadump[:,3]
HCPscore=datadump[:,4]+datadump[:,5]
E11=datadump[:,6]
E22=datadump[:,7]
E33=datadump[:,8]
#Calculate strain order parameters
eta3=-(E11+E22-2*E33)/math.sqrt(6)
eta2=(E11-E22)/math.sqrt(2)
etadump=np.vstack((names,eta2,eta3)).T
np.savetxt("etadump.txt",etadump,fmt="%s")
etamat=np.array([eta2,eta3]).T
#Bin structures by type of PRIM
scoremat=np.vstack((FCCscore,BCCscore,HCPscore)).T
bestscore=np.array([np.argmin(row) for row in scoremat]) #0=FCC. 1=BCC, 2=HCP
#Write the scores out for each structure
strucscores=np.vstack((names,bestscore.astype(str))).T
np.savetxt("./strucscores.txt", strucscores,fmt="%s")
colordict={0:'r',1:'g',2:'b'}
colorscore=[]
for score in bestscore:
colorscore.append(colordict[score])
#Apply symmetry to strain order parameters
mirrortrans=np.array([[-1,0],[0,1]])
rottrans=np.array([[-0.5, -0.5*math.sqrt(3)],[0.5*math.sqrt(3),-0.5]])
etarotmat1=np.dot(etamat,rottrans)
etarotmat2=np.dot(etarotmat1,rottrans)
etarotmat=np.vstack((etamat,etarotmat1,etarotmat2))
etamirrormat=np.dot(etarotmat,mirrortrans)
redundantcolorscore=np.array(3*colorscore)
#Specify maximum radius in eta space to get configurations from
#maxrad=0.085
#radmat=np.sqrt(eta2*eta2+eta3*eta3)
#centeredidx=[radmat < maxrad]
#plot that shit yo
fig=plt.figure()
ax=fig.add_subplot('111')
#plot symmetrized metrics
ax.scatter(etamirrormat[:,0],etamirrormat[:,1],color=redundantcolorscore,edgecolors='gray',s=50,alpha=0.5)
ax.scatter(etarotmat[:,0],etarotmat[:,1],color=redundantcolorscore,edgecolors='gray',s=50,alpha=0.5)
#plot original data
ax.scatter(eta2,eta3,s=50,edgecolors='k',color=colorscore)
ax.set_aspect('equal')
ax.set_xlim([-0.3,0.3])
ax.set_ylim([-0.3,0.3])
ax.set_xlabel(r"$\mathrm{e_2}$")
ax.set_ylabel(r"$\mathrm{e_3}$")
ax.set_title(r"\textbf{FCC strain relaxations}")
red_patch = mpatches.Patch(color='red', label=r'\textbf{FCC}')
green_patch = mpatches.Patch(color='green', label=r'\textbf{BCC}')
blue_patch = mpatches.Pa | tch(color | ='blue', label=r'\textbf{HCP}')
ax.legend(handles=[red_patch,green_patch,blue_patch],prop={'size':12},loc="upper left")
plt.tight_layout()
plt.show()
#Save the configurations that are FCC
FCCindx=[(bestscore==0)]
FCCnames=names[FCCindx]
FCCfiltered=np.array(FCCscore[FCCindx],dtype="S25")
FCCdump=np.vstack((FCCnames,FCCfiltered)).T
np.savetxt("FCC_scores.txt",FCCdump,fmt="%s")
#Save the configurations that are BCC
BCCindx=[(bestscore==1)]
BCCnames=names[BCCindx]
BCCfiltered=np.array(BCCscore[BCCindx],dtype="S25")
BCCdump=np.vstack((BCCnames,BCCfiltered)).T
np.savetxt("BCC_scores.txt",BCCdump,fmt="%s")
#Save the configurations that are HCP
HCPindx=[(bestscore==2)]
HCPnames=names[HCPindx]
HCPfiltered=np.array(HCPscore[HCPindx],dtype="S25")
HCPdump=np.vstack((HCPnames,HCPfiltered)).T
np.savetxt("HCP_scores.txt",HCPdump,fmt="%s")
|
c3nav/c3nav | src/c3nav/routing/route.py | Python | apache-2.0 | 9,539 | 0.002516 | # flake8: noqa
import copy
from collections import OrderedDict, deque |
import numpy as np
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
def describe_location(location, locations):
if location.can_describe:
final_location = locations.get(location.pk)
if final_location is not None:
location = final_location
result = location.serialize(inc | lude_type=True, detailed=False, simple_geometry=True)
if hasattr(location, 'serialize_position'):
result.update(location.serialize_position())
return result
class Route:
def __init__(self, router, origin, destination, path_nodes, options,
origin_addition, destination_addition, origin_xyz, destination_xyz):
self.router = router
self.origin = origin
self.destination = destination
self.path_nodes = path_nodes
self.options = options
self.origin_addition = origin_addition
self.destination_addition = destination_addition
self.origin_xyz = origin_xyz
self.destination_xyz = destination_xyz
def serialize(self, locations):
nodes = [[node, None] for node in self.path_nodes]
if self.origin_addition and any(self.origin_addition):
nodes.insert(0, (self.origin_addition[0], None))
nodes[1][1] = self.origin_addition[1]
if self.destination_addition and any(self.destination_addition):
nodes.append(self.destination_addition)
if self.origin_xyz is not None:
node = nodes[0][0]
if not hasattr(node, 'xyz'):
node = self.router.nodes[node]
origin_distance = np.linalg.norm(node.xyz - self.origin_xyz)
else:
origin_distance = 0
if self.destination_xyz is not None:
node = nodes[-1][0]
if not hasattr(node, 'xyz'):
node = self.router.nodes[node]
destination_distance = np.linalg.norm(node.xyz - self.destination_xyz)
else:
destination_distance = 0
items = deque()
last_node = None
last_item = None
walk_factor = self.options.walk_factor
distance = origin_distance
duration = origin_distance * walk_factor
for i, (node, edge) in enumerate(nodes):
if edge is None:
edge = self.router.edges[last_node, node] if last_node else None
node_obj = self.router.nodes[node] if isinstance(node, (int, np.int32, np.int64)) else node
item = RouteItem(self, node_obj, edge, last_item)
if edge:
distance += edge.distance
duration += item.router_waytype.get_duration(edge, walk_factor)
items.append(item)
last_item = item
last_node = node
distance += destination_distance
duration += destination_distance * walk_factor
# descriptions for waytypes
next_item = None
last_primary_level = None
for item in reversed(items):
icon = 'arrow'
if not item.level.on_top_of_id:
last_primary_level = item.level
if item.waytype:
icon = item.waytype.icon_name or 'arrow'
if item.waytype.join_edges and next_item and next_item.waytype == item.waytype:
continue
if item.waytype.icon_name:
icon = item.waytype.icon_name
if item.waytype.up_separate:
icon += '-up' if item.edge.rise > 0 else '-down'
icon += '.svg'
description = item.waytype.description
if item.waytype.up_separate and item.edge.rise > 0:
description = item.waytype.description_up
if (item.waytype.level_change_description != False and last_primary_level and
((item.last_item and item.level != item.last_item.level) or
item.level.on_top_of_id)): # != False because it's lazy
level_change_description = (
str(item.waytype.level_change_description).replace('{level}', str(last_primary_level.title))
)
description = str(description).replace(
'{level_change_description}', ' ' + level_change_description + ' '
).replace(' ', ' ').replace(' .', '.')
last_primary_level = None
else:
description = description.replace('{level_change_description}', '')
item.descriptions.append((icon, description))
next_item = item
# add space transfer descriptions
last_space = None
current_space = None
for item in items:
if item.new_space:
next_space = item.space
if item.last_item and not item.descriptions:
description = None
if last_space:
description = current_space.cross_descriptions.get((last_space.pk, next_space.pk), None)
if description is None:
description = current_space.leave_descriptions.get(next_space.pk, None)
if description is None:
description = item.space.enter_description
if description == None: # could be a lazy None
description = _('Go to %(space_title)s.') % {'space_title': item.space.title}
item.descriptions.append(('more_vert', description))
last_space = current_space
current_space = next_space
# add description for last space
remaining_distance = destination_distance
for item in reversed(items):
if item.descriptions:
break
if item.edge:
remaining_distance += item.edge.distance
if remaining_distance:
item.descriptions.append(('more_vert', _('%d m remaining to your destination.') % max(remaining_distance, 1)))
items[-1].descriptions.append(('done', _('You have reached your destination.')))
duration = round(duration)
seconds = int(duration) % 60
minutes = int(duration/60)
if minutes:
duration_str = '%d min %d s' % (minutes, seconds)
else:
duration_str = '%d s' % seconds
distance = round(distance, 1)
distance_str = '%d m' % distance
summary = '%s (%s)' % (duration_str, distance_str)
options_summary = [
{
'fastest': _('fastest route'),
'shortest': _('shortest route')
}[self.options['mode']],
]
waytypes_excluded = sum((name.startswith('waytype_') and value != 'allow')
for name, value in self.options.items())
if waytypes_excluded:
options_summary.append(_('some path types avoided'))
else:
options_summary.append(_('default options'))
options_summary = ', '.join(str(s) for s in options_summary)
return OrderedDict((
('origin', describe_location(self.origin, locations)),
('destination', describe_location(self.destination, locations)),
('distance', round(distance, 2)),
('duration', round(duration)),
('distance_str', distance_str),
('duration_str', duration_str),
('summary', summary),
('options_summary', options_summary),
('items', tuple(item.serialize(locations=locations) for item in items)),
))
class RouteItem:
def __init__(self, route, node, edge, last_item):
self.route = route
self.node = node
self.edge = edge
self.last_item = last_item
self.descriptions = []
@cached_property
def waytype(self):
if self.edge and self.edge.waytype:
return self.route.router.waytypes[self.edge.waytype]
|
atria-soft/zeus | tools/service-picture/lutin_zeus-service-picture.py | Python | apache-2.0 | 845 | 0.040237 | #!/usr/bin/python
import realog.debug as debug
import lutin.tools a | s tools
import lutin.macro as macro
def get_type():
return "LIBRARY"
#return "BINARY"
def get_sub_type():
return "TOOLS"
def get_desc():
return "ZEUS picture service"
def get_licence():
return "MPL-2"
def get_compagny_type():
return "com"
def get_compagny_name():
return "atria-soft"
def get_maintainer():
return ["Mr DUPIN Edouard <yui.heero@gmail.com>"]
def configure(target, my_module):
my | _module.add_depend([
'zeus'
])
zeus_macro = macro.load_macro('zeus')
zeus_macro.parse_object_idl(my_module, 'appl/zeus-service-picture.srv.zeus.idl')
#module_zeus = target.get_module('zeus')
#module_zeus.parse_service_idl(my_module, 'appl/zeus-service-picture.zeus.idl')
my_module.add_flag('c++', "-DSERVICE_NAME=\"\\\"picture\\\"\"")
return True
|
meta-it/misc-addons | ir_attachment_s3/models/ir_attachment.py | Python | lgpl-3.0 | 3,506 | 0.001997 | # -*- coding: utf-8 -*-
import os
import hashlib
import logging
from odoo import api, models, _
from odoo.tools.safe_eval import safe_eval
_logger = logging.getLogger(__name__)
try:
import boto3
except:
_logger.debug('boto3 package is required which is not \
found on your installation')
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
def _get_s3_settings(self, param_name, os_var_name):
config_obj = self.env['ir.config_parameter']
res = config_obj.get_param(param_name)
if not res:
res = os.environ.get(os_var_name)
if res:
config_obj.set_param(param_name, res)
return res
@api.model
def _get_s3_object_url(self, s3, s3_bucket_name, key_name):
bucket_location = s3.meta.client.get_bucket_location(Bucket=s3_bucket_name)
location_constraint = bucket_location.get('LocationConstraint')
domain_part = 's3' + '-' + location_constraint if location_constraint else 's3'
object_url = "https://{0}.amazonaws.com/{1}/{2}".format(
domain_part,
s3_bucket_name,
key_name)
return object_url
@api.model
def _get_s3_resource(self):
access_key_id = self._get_s3_settings('s3.access_key_id', 'S3_ACCESS_KEY_ID')
secret_key = self._get_s3_settings('s3.secret_key', 'S3_SECRET_KEY')
bucket_name = self._get_s3_settings('s3.bucket', 'S3_BUCKET')
if not access_key_id or not secret_key:
_logger.info(_('Amazon S3 access_key_id and secret_access_key are not defined. Attachments are not saved on S3.'))
return False
s3 = boto3.resource(
's3',
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_key,
)
bucket = s3.Bucket(bucket_name)
if not bucket:
s3.create_bucket(Bucket=bucket_name)
return s3
de | f _inverse_datas(self):
s3 = self._get_s3_resource()
condition = self._get_s3_settings('s3.condition', 'S3_CONDITION')
if not s3:
# set s3_records to empty recordset
s3_records = self.env[self. | _name]
elif condition:
s3_records = self.filtered(lambda r: safe_eval(condition, {}, {'attachment': r}, mode="eval"))
else:
# if there is no condition then store all attachments on s3
s3_records = self
s3_records = s3_records._filter_protected_attachments()
for attach in s3_records:
value = attach.datas
bin_data = value and value.decode('base64') or ''
fname = hashlib.sha1(bin_data).hexdigest()
bucket_name = self._get_s3_settings('s3.bucket', 'S3_BUCKET')
s3.Bucket(bucket_name).put_object(
Key=fname,
Body=bin_data,
ACL='public-read',
ContentType=attach.mimetype,
)
vals = {
'file_size': len(bin_data),
'checksum': self._compute_checksum(bin_data),
'index_content': self._index(bin_data, attach.datas_fname, attach.mimetype),
'store_fname': fname,
'db_datas': False,
'type': 'url',
'url': self._get_s3_object_url(s3, bucket_name, fname),
}
super(IrAttachment, attach.sudo()).write(vals)
super(IrAttachment, self - s3_records)._inverse_datas()
|
toconn/Python3-Core | Source/ua/core/utils/fileutils.py | Python | gpl-3.0 | 4,659 | 0.013093 | import glob
import os
import shutil
from ua.core.utils.isfirst import IsFirst
# Renamed Methods:
#
# dir_file_names -> read_dir_file_names
# dir_file_paths -> read_dir_file_paths
# get_dir_file_names -> read_dir_file_names
# get_dir_file_paths -> read_dir_file_paths
# latest_file -> read_latest_file
# join_path -> join
def add_cwd_to_file_name(file_name):
'''Add the current working directory the the file name.
'''
return os.getcwd() + os.path.sep + file_name
def copy (source_path, destination_path):
''' Copy a file.
'''
shutil.copyfile(source_path, destination_path)
def create_dir (dir_path):
''' Creates a directory. Will create multilevel paths.
'''
os.makedirs(dir_path)
def create_text_file(file_path, text_lines):
''' Create a text file from a text list.
New lines are placed between each line.
'''
with open(file_path, 'w') as file_handle:
first = IsFirst()
for line in text_lines:
if first.is_first():
file_handle.write('%s' % line)
else:
file_handle.write('\n%s' % line)
def delete (file_path, file_filter=None):
''' No fuss file / dir delete command.
Wouldn't throw an error if it does not exist.
Will delete it no matter what it is.
'''
if file_filter:
file_names = read_dir_file_names (file_path, file_filter)
[ delete (os.path.join (file_path, file_name)) for file_name in file_names ]
elif is_file_exists (file_path):
if os.path.isdir(file_path):
shutil.rmtree(file_path)
else:
os.remove(file_path)
def file_base_name (file_name):
''' Returns the base name of a file name (filename.ext = filename).
'''
file_name = path_file_name(file_name)
file_base_name, file_ext = os.path.splitext(file_name)
return file_base_name
def file_dir (file_path):
''' Return the directory portion of a file name (dir/dir/filename.ext -> dir/dir)
'''
return os.path.dirname (file_path)
def file_extension (file_name):
''' Returns the file extension (dir/filename.ext = ext)
'''
file_base_name, file_ext = os.path.splitext(file_name)
return file_ext
def has_dir_in_path(file_path):
''' Returns true if the file path contains a direcory component.
dir1/filename = True
filename = False
'''
return file_path != path_file_name(file_path)
def is_dir_and_file_exists (dir_path, file_name):
return is_file_exists (os.path.join(dir_path, file_name))
def is_dir_exists(dir_path):
''' Tests if the directory exists and is in fact a directory
'''
exists = os.path.isdir(dir_path)
return exists
def is_file_exists(file_path):
exists = os.path.exists(file_path)
return exists
def join(root_path, *sub_paths):
return os.path.join(root_path, *sub_paths)
def read_dir_file_names(dir_path, file_filter=None):
''' Returns a list of files in a directory. The file list can be filtered (ex: *.txt).
'''
if file_filter:
file_names = [os.path.basename(file_name) for file_name in glob.glob (os.path.join (dir_path, file_filter))]
else:
file_names = os.listdir(dir_path)
return file_names
def read_dir_file_paths(dir_path, file_filter=None):
''' Returns a list of the full paths of the files in a directory. The file list can be filtered (ex: *.txt).
'''
if file_filter:
dir_path = os.path.join (dir_path, file_filter)
else:
if dir_path[-1] != os.path.sep:
dir_path = dir_path + os.path.sep
return glob.glob (dir_path)
def read_dir_files(dir_path, file_filter=None):
''' Returns a list of files in a directory. The file list can be filtered (ex: *.txt).
'''
if file_filter:
files = glob.glob (os.path.join (dir_path, file_filter))
else:
files = glob.glob (dir_path)
return files
def read_latest_file(path):
""" Returns the file with the latest timestamp.
"""
files = read_dir_files(path, "*")
latest_file = max(files, key=os.path.getctime)
latest_file_name = path_file_name(latest_file)
return latest_file_name
def read_to_t | ext(path):
with open(path, 'r') as file_handle:
content_text = file_handle.read()
return content_text
def path_file_name (file_path):
''' Returns the full file name from the path (dir/filename.ext -> filename.ext)
'''
file_name = os.path.bas | ename(file_path)
return file_name
def rename (current_path, new_path):
os.renames(current_path, new_path)
|
gedhe/sidesa2.0 | input_administrasi_surat.py | Python | gpl-2.0 | 2,779 | 0.011875 | #Boa:Frame:adm_surat
import wx
import wx.lib.buttons
import frm_sideka_menu
import pembuatan_surat_keluar
import surat_masuk
def create(parent):
return adm_surat(parent)
[wxID_ADM_SURAT, wxID_ADM_SURATSTATICLINE1, wxID_ADM_SURATTOMBOL_INPUT_SURAT,
wxID_ADM_SURATTOMBOL_KEMBALI_KE_MENU, wxID_ADM_SURATTOMBOL_SURAT_KELUAR,
] = [wx.NewId() for _init_ctrls in range(5)]
class adm_surat(wx.Frame):
def _init_ctrls(self, prnt):
# generated method, don't | edit
wx.Frame.__init__(self, id=wxID_ADM_SURAT, name=u'adm_surat',
parent=prnt, pos=wx.Point(641, 356), size=wx.Size(417, 102),
style=wx.DEFAULT_FRAME_STYLE, title=u'Administrasi Persuratan')
self.SetClientSize(wx.Size(417, 102))
self.Center(wx.BOTH)
self.tombol_input_ | surat = wx.lib.buttons.GenBitmapTextButton(bitmap=wx.NullBitmap,
id=wxID_ADM_SURATTOMBOL_INPUT_SURAT, label=u'Surat Masuk',
name=u'tombol_input_surat', parent=self, pos=wx.Point(16, 16),
size=wx.Size(184, 31), style=0)
self.tombol_input_surat.Bind(wx.EVT_BUTTON,
self.OnTombol_input_suratButton,
id=wxID_ADM_SURATTOMBOL_INPUT_SURAT)
self.tombol_surat_keluar = wx.lib.buttons.GenBitmapTextButton(bitmap=wx.NullBitmap,
id=wxID_ADM_SURATTOMBOL_SURAT_KELUAR, label=u'Surat Keluar',
name=u'tombol_surat_keluar', parent=self, pos=wx.Point(216, 16),
size=wx.Size(184, 31), style=0)
self.tombol_surat_keluar.Bind(wx.EVT_BUTTON,
self.OnTombol_surat_keluarButton,
id=wxID_ADM_SURATTOMBOL_SURAT_KELUAR)
self.tombol_kembali_ke_menu = wx.lib.buttons.GenBitmapTextButton(bitmap=wx.NullBitmap,
id=wxID_ADM_SURATTOMBOL_KEMBALI_KE_MENU,
label=u'Kembali Ke Menu Utama', name=u'tombol_kembali_ke_menu',
parent=self, pos=wx.Point(128, 64), size=wx.Size(184, 31),
style=0)
self.tombol_kembali_ke_menu.Bind(wx.EVT_BUTTON,
self.OnTombol_kembali_ke_menuButton,
id=wxID_ADM_SURATTOMBOL_KEMBALI_KE_MENU)
self.staticLine1 = wx.StaticLine(id=wxID_ADM_SURATSTATICLINE1,
name='staticLine1', parent=self, pos=wx.Point(8, 56),
size=wx.Size(400, 2), style=0)
def __init__(self, parent):
self._init_ctrls(parent)
def OnTombol_kembali_ke_menuButton(self, event):
self.Close()
def OnTombol_surat_keluarButton(self, event):
self.Close()
self.main=pembuatan_surat_keluar.create(None)
self.main.Show()
def OnTombol_input_suratButton(self, event):
self.Close()
self.main=surat_masuk.create(None)
self.main.Show()
|
foundit/Piped | piped/decorators.py | Python | mit | 447 | 0 | # Copyright (c) 2010-2011, Found IT A/S and Piped Project Contributors.
# See LICENSE for details.
""" Decorators used in the project """
def coroutine(func):
""" Advances the coroutine to its first yield. | """
def wrapper(*args, **kw):
gen = func(*args, **kw)
gen.next()
return gen
wrapper.__name__ = func.__name__
wrapper.__dict__ = func.__dict__
| wrapper.__doc__ = func.__doc__
return wrapper
|
beepaste/beepaste | beepaste/views/panels.py | Python | gpl-3.0 | 1,941 | 0.004637 | from pyramid_layout.panel import panel_config
from pyramid.view import view_config
from os.path import join
from beepaste.paths import base
from ..models.users import Users
import datetime
import json
@panel_config(name='navbar', renderer='templates/panels/navbar.jinja2')
def navbar(context, request):
return {}
@panel_config(name='footer', renderer='templates/panels/footer.jinja2')
def footer(context, request):
year = datetime.datetime.now().year
version = request.registry.settings['beepaste.version']
return {'version': version, 'year': year}
@panel_config(name='menu', renderer='templates/panels/menu.jinja2')
def menu(context, request):
def nav_item(name, path, items=[]):
active = any([item['active'] for item in items]) if items else request.path == path
item = dict(
name=name,
path=path,
active=active,
items= | items
)
return item
items = []
# items.append(nav_item('resume', '#', [nav_item(name, request.route_path(name)) for name in ['resume_list','resume_edit']]))
items.append(nav_item('<i class="fa fa-plus-circle" aria-hidden="true"></i> Create Paste', request.route_pa | th('home')))
items.append(nav_item('<i class="fa fa-cogs" aria-hidden="true"></i> API', request.route_path('api_intro')))
items.append(nav_item('<i class="fa fa-info" aria-hidden="true"></i> About Us', request.route_path('about')))
return {'items': items}
@panel_config(name='authors', renderer='templates/panels/authors.jinja2')
def authors(context, request):
def authorItems(name, img, about, social):
#item = dict(name = name, about = about, img = img, social = social)
item = {'name': name, 'about': about, 'img': img, 'social': social}
return item
with open(join(base, 'AUTHORS.txt')) as f:
content = f.read()
items = json.loads(content)
return {'items': items}
|
cjk4wr/cs3240-labdemo | Test.py | Python | mit | 38 | 0 | __a | uthor__ = 'cjk4wr'
pr | int("test!")
|
metalink-dev/checker | metalinkcw.py | Python | gpl-2.0 | 11,953 | 0.011211 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
########################################################################
#
# Project: Metalink Checker
# URL: https://github.com/metalink-dev/checker
# E-mail: nabber00@gmail.com
#
# Copyright: (C) 2007-2016, Neil McNab
# License: GNU General Public License Version 2
# (http://www.gnu.org/copyleft/gpl.html)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the h | ope that it will be useful,
# but WITHOUT ANY WARRANTY; without even th | e implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Description:
# GUI for checking metalink files.
#
########################################################################
import Tkinter
import tkFileDialog
import tkMessageBox
import os
import gettext
import locale
import time
import webbrowser
import threading
import sys
import metalinkc
def translate():
'''
Setup translation path
'''
global PATH
if __name__=="__main__":
try:
base = os.path.basename(__file__)[:-3]
PATH = os.path.dirname(__file__)
localedir = os.path.join(PATH, "locale")
except NameError:
base = os.path.basename(sys.executable)[:-4]
PATH = os.path.dirname(sys.executable)
localedir = os.path.join(PATH, "locale")
else:
temp = __name__.split(".")
base = temp[-1]
PATH = "/".join(["%s" % k for k in temp[:-1]])
localedir = os.path.join(PATH, "locale")
#print base, localedir
t = gettext.translation(base, localedir, [locale.getdefaultlocale()[0]], None, 'en')
return t.ugettext
_ = translate()
class Table(Tkinter.Frame):
def __init__(self, *args):
Tkinter.Frame.__init__(self, *args)
Tkinter.Frame.grid_rowconfigure(self, 0, weight=1)
Tkinter.Frame.grid_columnconfigure(self, 0, weight=1)
self.vscrollbar = AutoScrollbar(self, orient=Tkinter.VERTICAL)
self.vscrollbar.grid(row=0, column=1, sticky="NS")
self.hscrollbar = AutoScrollbar(self, orient=Tkinter.HORIZONTAL)
self.hscrollbar.grid(row=1, column=0, sticky="WE")
self.canvas = Tkinter.Canvas(self, yscrollcommand=self.vscrollbar.set, xscrollcommand=self.hscrollbar.set)
self.canvas.grid(row=0, column=0, sticky="NEWS")
self.vscrollbar.config(command=self.canvas.yview)
self.hscrollbar.config(command=self.canvas.xview)
self.container = Tkinter.Frame(self.canvas)
self.container.grid(sticky="NEWS")
self.update(0)
self.canvas.create_window(0,0,window=self.container, anchor=Tkinter.NW)
self.subelements = []
def update(self, height=1000, width=1000):
self.canvas["scrollregion"] = (0, 0, width, height)
def data(self, datalist):
self.clear()
row = 1
for datarow in datalist:
column = 1
for datacolumn in datarow:
color = ""
if column in (3, 4, 5) and row != 1:
if datacolumn == "OK":
color = "green"
elif datacolumn[0] == "3":
color = "yellow"
elif datacolumn[0] == "?":
pass
else:
color = "red"
if color != "":
label = Tkinter.Label(self.container, text=datacolumn, bg=color)
else:
label = Tkinter.Label(self.container, text=datacolumn)
label.config(anchor="w")
label.grid(column=column, row=row, sticky="NEWS")
self.subelements.append(label)
column += 1
row += 1
self.update(len(self.subelements) * 5)
def clear(self):
self.update(0)
#self.container = Tkinter.Frame(self.canvas)
#self.container.grid(sticky="NEWS")
for element in self.subelements:
#element.grid_remove()
element.config(text="")
#del(element)
self.subelements = []
class AutoScrollbar(Tkinter.Scrollbar):
# a scrollbar that hides itself if it's not needed. only
# works if you use the grid geometry manager.
def set(self, lo, hi):
if float(lo) <= 0.0 and float(hi) >= 1.0:
# grid_remove is currently missing from Tkinter!
self.tk.call("grid", "remove", self)
else:
self.grid()
Tkinter.Scrollbar.set(self, lo, hi)
def pack(self, **kw):
raise Tkinter.TclError, "cannot use pack with this widget"
def place(self, **kw):
raise Tkinter.TclError, "cannot use place with this widget"
class Application:
def __init__(self, master):
self.master = master
self.checker = metalinkc.Checker()
self.createWidgets()
self.quit = self.exit
self.destroy = self.exit
def createWidgets(self):
# create menu
menu = Tkinter.Menu(self.master)
self.master.config(menu=menu)
self.master.grid_columnconfigure(0,weight=1)
self.master.grid_rowconfigure(0,weight=1)
Filemenu = Tkinter.Menu(menu)
menu.add_cascade(label=_("File"), menu=Filemenu, underline=0)
Filemenu.add_command(label=_("Open") + "...", underline=0, command=self.open)
Filemenu.add_separator()
Filemenu.add_command(label=_("Exit"), underline=1, command=self.exit)
Helpmenu = Tkinter.Menu(menu)
menu.add_cascade(label=_("Help"), menu=Helpmenu, underline=0)
Helpmenu.add_command(label=_("About") + "...", underline=0, command=self.about)
Helpmenu.add_command(label=_("Website") + "...", underline=0, command=self.website)
# main frame
self.main_frame = Tkinter.Frame(self.master)
self.main_frame.grid(sticky="NEWS")
self.main_frame.grid_rowconfigure(1,weight=1)
self.main_frame.grid_columnconfigure(0,weight=1)
longtext = 100
row_index = 0
self.control_frame = Tkinter.Frame(self.main_frame)
self.control_frame.grid(row=row_index, column=0, sticky="NW")
Tkinter.Label(self.control_frame, text=_("File") + ":").grid(row=row_index)
self.filename_txt = Tkinter.Entry(self.control_frame, width=longtext)
self.filename_txt.grid(row=row_index, column=1)
self.filename_txt.bind("<Return>", ThreadCallback(self.do_check))
Tkinter.Button(self.control_frame, text=_("Browse") + "...", command=self.open).grid(row=row_index, column=2)
self.check_button = Tkinter.Button(self.control_frame, text=_("Check"), command=ThreadCallback(self.do_check))
self.check_button.grid(row=row_index, column=3)
self.cancel_button = Tkinter.Button(self.control_frame, text=_("Cancel"), command=ThreadCallback(self.do_cancel))
self.cancel_button.grid(row=row_index, column=4)
self.cancel_button.configure(state="disabled")
#tooltip = _("File to open and check.")
#ToolTip(self.filename_txt, text=tooltip)
row_index += 1
self.tableframe = Table(self.main_frame)
self.tableframe.grid(column=0, row=row_index, columnspan=5, sticky="NEWS")
def do_check(self):
#self.tableframe.clear()
self.update()
self.check_button.configure(state="disabled")
self.cancel_button.configure(state="active")
# should start new thread here
if self.filename_txt.get() != "":
#self.checker.ch |
upsuper/onenet-firefox | tools/extract_pac.py | Python | mit | 796 | 0.005025 | #!/usr/bin/env python3
import re
import | sys
import base64
_pac_line = re.compile(r"^var pac_(\S+) = '([a-zA-Z0-9+/]+=*)';$")
def parse_line(line):
match = _pac_line.match(line)
if not match:
return None
return (match.group(1), base64.b64decode(match.group(2)))
def extract_pac_files(source):
for line in source:
result = parse_line(line)
if not result:
continue
name, dat | a = result
print("extract {}, length {} bytes".format(name, len(data)))
with open('data/pac/' + name + '.pac', 'wb') as pac:
pac.write(data)
def main():
if not sys.argv[1]:
print('Expect popup.js')
exit(1)
with open(sys.argv[1], 'r') as f:
extract_pac_files(f)
if __name__ == '__main__':
main()
|
SanPen/GridCal | src/GridCal/Engine/Devices/load.py | Python | lgpl-3.0 | 9,713 | 0.00278 | # GridCal
# Copyright (C) 2022 Santiago Peñate Vera
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import pandas as pd
from matplotlib import pyplot as plt
from GridCal.Engine.Devices.editable_device import EditableDevice, DeviceType, GCProp
class Load(EditableDevice):
"""
The load object implements the so-called ZIP model, in which the load can be
represented by a combination of power (P), current(I), and impedance (Z).
The sign convention is: Positive to act as a load, negative to act as a generator.
Arguments:
**name** (str, "Load"): Name of the load
**G** (float, 0.0): Conductance in equivalent MW
**B** (float, 0.0): Susceptance in equivalent MVAr
**Ir** (float, 0.0): Real current in equivalent MW
**Ii** (float, 0.0): Imaginary current in equivalent MVAr
**P** (float, 0.0): Active power in MW
**Q** (float, 0.0): Reactive power in MVAr
**G_prof** (DataFrame, None): Pandas DataFrame with the conductance profile in equivalent MW
**B_prof** (DataFrame, None): Pandas DataFrame with the susceptance profile in equivalent MVAr
**Ir_prof** (DataFrame, None): Pandas DataFrame with the real current profile in equivalent MW
**Ii_prof** (DataFrame, None): Pandas DataFrame with the imaginary current profile in equivalent MVAr
**P_prof** (DataFrame, None): Pandas DataFrame with the active power profile in equivalent MW
**Q_prof** (DataFrame, None): Pandas DataFrame with the reactive power profile in equivalent MVAr
**active** (bool, True): Is the load active?
**mttf** (float, 0.0): Mean time to failure in hours
**mttr** (float, 0.0): Mean time to recovery in hours
"""
def __init__(self, name='Load', idtag=None, code='', G=0.0, B=0.0, Ir=0.0, Ii=0.0, P=0.0, Q=0.0, cost=1200.0,
G_prof=None, B_prof=None, Ir_prof=None, Ii_prof=None, P_prof=None, Q_prof=None,
active=True, mttf=0.0, mttr=0.0):
EditableDevice.__init__(self,
name=name,
idtag=idtag,
code=code,
active=active,
device_type=DeviceType.LoadDevice,
editable_headers={'name': GCProp('', str, 'Load name'),
'idtag': GCProp('', str, 'Unique ID', False),
'code': GCProp('', str, 'Secondary ID', True),
'bus': GCProp('', DeviceType.BusDevice, 'Connection bus name'),
'active': GCProp('', bool, 'Is the load active?'),
'P': GCProp('MW', float, 'Active power'),
'Q': GCProp('MVAr', float, 'Reactive power'),
'Ir': GCProp('MW', float,
'Active power of the current component at V=1.0 p.u.'),
'Ii': GCProp('MVAr', float,
'Reactive power of the current component at V=1.0 p.u.'),
'G': GCProp('MW', float,
'Active power of the impedance component at V=1.0 p.u.'),
'B': GCProp('MVAr', float,
'Reactive power of the impedance component at V=1.0 p.u.'),
'mttf': GCProp('h', float, 'Mean time to failure'),
'mttr': GCProp('h', float, 'Mean time to recovery'),
'Cost': GCProp('e/MWh', float,
'Cost of not served energy. Used in OPF.')},
non_editable_attributes=['bus', 'idtag'],
properties_with_profile={'active': 'active_prof',
'P': 'P_prof',
'Q': 'Q_prof',
'Ir': 'Ir_prof',
'Ii': 'Ii_prof',
'G': 'G_prof',
'B': 'B_prof',
'Cost': 'Cost_prof'})
self.bus = None
self.active_prof = None
self.mttf = mttf
self.mttr = mttr
self.Cost = cost
self.Cost_prof = None
# Impedance in equivalent MVA
self.G = G
self.B = B
self.Ir = Ir
self.Ii = Ii
self.P = P
self.Q = Q
self.G_prof = G_prof
self.B_prof = B_prof
self.Ir_prof = Ir_prof
self.Ii_prof = Ii_prof
self.P_prof = P_prof
self.Q_prof = Q_prof
def copy(self):
load = Load()
load.name = self.name
load.active = self.active
load.active_prof = self.active_prof
# Impedance (MVA)
load.G = self.G
load.B = self.B
# Current (MVA)
load.Ir = self.Ir
load.Ii = self.Ii
# Power (MVA)
load.P = self.P
load.Q = self.Q
# Impedance (MVA)
load.G_prof = self.G_prof
load.B_prof = self.B_prof
# Current (MVA)
load.Ir_prof = self.Ir_prof
load.Ii_prof = self.Ii_prof
# Power (MVA)
load.P_prof = self.P_prof
load.Q_prof = self.Q_prof
load.mttf = self.mttf
load.mttr = self.mttr
return load
def get_properties_dict(self, version=3):
"""
Get json dictionary
:return:
"""
if version in [2, 3]:
ret | urn {'id': self.idtag,
'type': 'load',
'phases': 'ps',
'name': self.name,
'name_code': self.code,
'bus': self.bus.idtag,
'active': bool(self.active),
| 'g': self.G,
'b': self.B,
'ir': self.Ir,
'ii': self.Ii,
'p': self.P,
'q': self.Q
}
else:
return dict()
def get_profiles_dict(self, version=3):
"""
:return:
"""
if self.active_prof is not None:
active_profile = self.active_prof.tolist()
P_prof = self.P_prof.tolist()
Q_prof = self.Q_prof.tolist()
Ir_prof = self.Ir_prof.tolist()
Ii_prof = self.Ii_prof.tolist()
G_prof = self.G_prof.tolist()
B_prof = self.B_prof.tolist()
else:
active_profile = list()
P_prof = list()
Q_prof = list()
Ir_prof = list()
Ii_prof = list()
G_prof = list()
B_prof = list()
return {'id': self.idtag,
'activ |
iemejia/incubator-beam | sdks/python/apache_beam/transforms/util_test.py | Python | apache-2.0 | 42,823 | 0.00822 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the transform.util classes."""
# pytype: skip-file
from __future__ import absolute_import
from __future__ import division
import itertools
import logging
import math
import random
import re
import time
import unittest
import warnings
from builtins import object
from builtins import range
# patches unittest.TestCase to be python3 compatible
import future.tests.base # pylint: disable=unused-import
from nose.plugins.attrib import attr
import apache_beam as beam
from apache_beam import WindowInto
from apache_beam.coders import coders
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.test_stream import TestStream
from apache_beam.testing.util import TestWindowedValue
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import contains_in_any_order
from apache_beam.testing.util import equal_to
from apache_beam.transforms import util
from apache_beam.transforms import window
from apache_beam.transforms.window import FixedWindows
from apache_beam.transforms.window import GlobalWindow
from apache_beam.transforms.window import GlobalWindows
from apache_beam.transforms.window import IntervalWindow
from apache_beam.transforms.window import Sessions
from apache_beam.transforms.window import SlidingWindows
from apache_beam.transforms.window import TimestampedValue
from apache_beam.utils import timestamp
from apache_beam.utils.timestamp import MAX_TIMESTAMP
from apache_beam.utils.timestamp import MIN_TIMESTAMP
from apache_beam.utils.windowed_value import WindowedValue
warnings.filterwarnings(
'ignore', category=FutureWarning, module='apache_beam.transform.util_test')
class FakeClock(object):
def __init__(self):
self._now = time.time()
def __call__(self):
return self._now
def sleep(self, duration):
self._now += duration
class BatchElementsTest(unittest.TestCase):
def test_constant_batch(self):
# Assumes a single bundle...
with TestPipeline() as p:
res = (
p
| beam.Create(range(35))
| util.BatchElements(min_batch_size=10, max_batch_size=10)
| beam.Map(len))
assert_that(res, equal_to([10, 10, 10, 5]))
def test_grows_to_max_batch(self):
# Assumes a single bundle...
with TestPipeline() as p:
res = (
p
| beam.Create(range(164))
| util.BatchElements(
min_batch_size=1, max_batch_size=50, clock=FakeClock())
| beam.Map(len))
assert_that(res, equal_to([1, 1, 2, 4, 8, 16, 32, 50, 50]))
def test_windowed_batches(self):
# Assumes a single bundle, in order...
with TestPipeline() as p:
res = (
p
| beam.Create(range(47), reshuffle=False)
| beam.Map(lambda t: window.TimestampedValue(t, t))
| beam.WindowInto(window.FixedWindows(30))
| util.BatchElements(
min_batch_size=5, max_batch_size=10, clock=FakeClock())
| beam.Map(len))
assert_that(res, equal_to([
5, 5, 10, 10, # elements in [0, 30)
10, 7, # elements in [30, 47)
]))
def test_target_duration(self):
clock = FakeClock()
batch_estimator = util._BatchSizeEstimator(
target_batch_overhead=None, target_batch_duration_secs=10, clock=clock)
batch_duration = lambda batch_size: 1 + .7 * batch_size
# 1 + 12 * .7 is as close as we can get to 10 as possible.
expected_sizes = [1, 2, 4, 8, 12, 12, 12]
actual_sizes = []
for _ in range(len(expected_sizes)):
actual_sizes.append(batc | h_estimator.next_batch_size())
with batch_estimator.record_time(actual_sizes[-1]):
| clock.sleep(batch_duration(actual_sizes[-1]))
self.assertEqual(expected_sizes, actual_sizes)
def test_target_overhead(self):
clock = FakeClock()
batch_estimator = util._BatchSizeEstimator(
target_batch_overhead=.05, target_batch_duration_secs=None, clock=clock)
batch_duration = lambda batch_size: 1 + .7 * batch_size
# At 27 items, a batch takes ~20 seconds with 5% (~1 second) overhead.
expected_sizes = [1, 2, 4, 8, 16, 27, 27, 27]
actual_sizes = []
for _ in range(len(expected_sizes)):
actual_sizes.append(batch_estimator.next_batch_size())
with batch_estimator.record_time(actual_sizes[-1]):
clock.sleep(batch_duration(actual_sizes[-1]))
self.assertEqual(expected_sizes, actual_sizes)
def test_variance(self):
clock = FakeClock()
variance = 0.25
batch_estimator = util._BatchSizeEstimator(
target_batch_overhead=.05,
target_batch_duration_secs=None,
variance=variance,
clock=clock)
batch_duration = lambda batch_size: 1 + .7 * batch_size
expected_target = 27
actual_sizes = []
for _ in range(util._BatchSizeEstimator._MAX_DATA_POINTS - 1):
actual_sizes.append(batch_estimator.next_batch_size())
with batch_estimator.record_time(actual_sizes[-1]):
clock.sleep(batch_duration(actual_sizes[-1]))
# Check that we're testing a good range of values.
stable_set = set(actual_sizes[-20:])
self.assertGreater(len(stable_set), 3)
self.assertGreater(
min(stable_set), expected_target - expected_target * variance)
self.assertLess(
max(stable_set), expected_target + expected_target * variance)
def test_ignore_first_n_batch_size(self):
clock = FakeClock()
batch_estimator = util._BatchSizeEstimator(
clock=clock, ignore_first_n_seen_per_batch_size=2)
expected_sizes = [
1, 1, 1, 2, 2, 2, 4, 4, 4, 8, 8, 8, 16, 16, 16, 32, 32, 32, 64, 64, 64
]
actual_sizes = []
for i in range(len(expected_sizes)):
actual_sizes.append(batch_estimator.next_batch_size())
with batch_estimator.record_time(actual_sizes[-1]):
if i % 3 == 2:
clock.sleep(0.01)
else:
clock.sleep(1)
self.assertEqual(expected_sizes, actual_sizes)
# Check we only record the third timing.
expected_data_batch_sizes = [1, 2, 4, 8, 16, 32, 64]
actual_data_batch_sizes = [x[0] for x in batch_estimator._data]
self.assertEqual(expected_data_batch_sizes, actual_data_batch_sizes)
expected_data_timing = [0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01]
for i in range(len(expected_data_timing)):
self.assertAlmostEqual(
expected_data_timing[i], batch_estimator._data[i][1])
def test_ignore_next_timing(self):
clock = FakeClock()
batch_estimator = util._BatchSizeEstimator(clock=clock)
batch_estimator.ignore_next_timing()
expected_sizes = [1, 1, 2, 4, 8, 16]
actual_sizes = []
for i in range(len(expected_sizes)):
actual_sizes.append(batch_estimator.next_batch_size())
with batch_estimator.record_time(actual_sizes[-1]):
if i == 0:
clock.sleep(1)
else:
clock.sleep(0.01)
self.assertEqual(expected_sizes, actual_sizes)
# Check the first record_time was skipped.
expected_data_batch_sizes = [1, 2, 4, 8, 16]
actual_data_batch_sizes = [x[0] for x in batch_estimator._data]
self.assertEqual(expected_data_batch_sizes, actual_data_batch_sizes)
expected_data_timing = [0.01, 0.01, 0.01, 0.01, 0.01]
for i in range(len(expected_data_t |
rusch95/calypso | src/common/metro.py | Python | bsd-3-clause | 2,521 | 0.003173 | #####################################################################
#
# metro.py
#
# Copyright (c) 2016, Eran Egozy
#
# Released under the MIT License (http://opensource.org/licenses/MIT)
#
#####################################################################
from clock import kTicksPerQuarter, quantize_tick_up
class Metronome(object):
"""Plays a steady click every beat.
"""
def __init__(self, sched, synth, channel = 0, patch=(128, 0), pitch = 60):
super(Metronome, self).__init__()
self.sched = sched
self.synth = synth
self.channel = channel
self.patch = patch
self.pitch = pitch
self.beat_len = kTicksPerQuarter
# run-time variables
self.on_cmd = None
self.off_cmd = None
self.playing = False
def start(self):
if self.playing:
return
self.playing = True
# set up the correct sound (program change)
self.synth.program(self.channel | , self.patch[0], self.patch[1])
# find the tick of the next beat, and make it "beat aligned"
now = self.sched.get_tick()
next_beat = quantize_tick_up(now, self.beat_len)
# now, post the _noteon function (and remember this command)
| self.on_cmd = self.sched.post_at_tick(next_beat, self._noteon)
def stop(self):
if not self.playing:
return
self.playing = False
# in case there is a note on hanging, turn it off immediately
if self.off_cmd:
self.off_cmd.execute()
# cancel anything pending in the future.
self.sched.remove(self.on_cmd)
self.sched.remove(self.off_cmd)
# reset these so we don't have a reference to old commands.
self.on_cmd = None
self.off_cmd = None
def toggle(self):
if self.playing:
self.stop()
else:
self.start()
def _noteon(self, tick, ignore):
# play the note right now:
self.synth.noteon(self.channel, self.pitch, 100)
# post the note off for half a beat later:
self.off_cmd = self.sched.post_at_tick(tick + self.beat_len/2, self._noteoff, self.pitch)
# schedule the next noteon for one beat later
next_beat = tick + self.beat_len
self.on_cmd = self.sched.post_at_tick(next_beat, self._noteon)
def _noteoff(self, tick, pitch):
# just turn off the currently sounding note.
self.synth.noteoff(self.channel, pitch)
|
zamattiac/osf.io | website/addons/base/views.py | Python | apache-2.0 | 25,393 | 0.001536 | import datetime
import httplib
import os
import uuid
import markupsafe
from flask import make_response
from flask import redirect
from flask import request
import furl
import jwe
import jwt
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from framework import sentry
from framework.auth import Auth
from framework.auth import cas
from framework.auth import oauth_scopes
from framework.auth.decorators import collect_auth, must_be_logged_in, must_be_signed
from framework.exceptions import HTTPError
from framework.routing import json_renderer
from framework.sentry import log_exception
from framework.transactions.context import TokuTransaction
from framework.transactions.handlers import no_auto_transaction
from website import mails
from website import settings
from website.addons.base import StorageAddonBase
from website.addons.base import exceptions
from website.addons.base import signals as file_signals
from website.files.models import FileNode, StoredFileNode, TrashedFileNode
from website.models import Node, NodeLog, User
from website.profile.utils import get_gravatar
from website.project import decorators
from website.project.decorators import must_be_contributor_or_public, must_be_valid_project
from website.project.model import DraftRegistration, MetaSchema
from website.project.utils import serialize_node
from website.util import rubeus
# import so that associated listener is instantiated and gets emails
from website.notifications.events.files import FileEvent # noqa
ERROR_MESSAGES = {'FILE_GONE': u'''
<style>
#toggleBar{{display: none;}}
</style>
<div class="alert alert-info" role="alert">
<p>
The file "{file_name}" stored on {provider} was deleted via the OSF.
</p>
<p>
It was deleted by <a href="/{deleted_by_guid}">{deleted_by}</a> on {deleted_on}.
</p>
</div>''',
'FILE_GONE_ACTOR_UNKNOWN': u'''
<style>
#toggleBar{{display: none;}}
</style>
<div class="alert alert-info" role="alert">
<p>
The file "{file_name}" stored on {provider} was deleted via the OSF.
</p>
<p>
It was deleted on {deleted_on}.
</p>
</div>''',
'DONT_KNOW': u'''
<style>
#toggleBar{{display: none;}}
</style>
<div class="alert alert-info" role="alert">
<p>
File not found at {provider}.
</p>
</div>''',
'BLAME_PROVIDER': u'''
<style>
#toggleBar{{display: none;}}
</style>
<div class="alert alert-info" role="alert">
<p>
This {provider} link to the file "{file_name}" is currently unresponsive.
The provider ({provider}) may currently be unavailable or "{file_name}" may have been removed from {provider} through another interface.
</p>
<p>
You may wish to verify this through {provider}'s website.
</p>
</div>''',
'FILE_SUSPENDED': u'''
<style>
#toggleBar{{display: none;}}
</style>
<div class="alert alert-info" role="alert">
This content has been removed.
</div>'''}
WATERBUTLER_JWE_KEY = jwe.kdf(settings.WATERBUTLER_JWE_SECRET.encode('utf-8'), settings.WATERBUTLER_JWE_SALT.encode('utf-8'))
@decorators.must_have_permission('write')
@decorators.must_not_be_registration
def disable_addon(auth, **kwargs):
node = kwargs['node'] or kwargs['project']
addon_name = kwargs.get('addon')
if addon_name is None:
raise HTTPError(httplib.BAD_REQUEST)
deleted = node.delete_addon(addon_name, auth)
return {'deleted': deleted}
@must_be_logged_in
def get_addon_user_config(**kwargs):
user = kwargs['auth'].user
addon_name = kwargs.get('addon')
if addon_name is None:
raise HTTPError(httplib.BAD_REQUEST)
addon = user.get_addon(addon_name)
if addon is None:
raise HTTPError(httplib.BAD_REQUEST)
return addon.to_json(user)
permission_map = {
'create_folder': 'write',
'revisions': 'read',
'metadata': 'read',
'download': 'read',
'upload': 'write',
'delete': 'write',
'copy': 'write',
'move': 'write',
'copyto': 'write',
'moveto': 'write',
'copyfrom': 'read',
'movefrom': 'write',
}
def check_access(node, auth, action, cas_resp):
"""Verify that user can perform requested action on resource. Raise appropriate
error code if action cannot proceed.
"""
permission = permission_map.get(action, None)
if permission is None:
raise HTTPError(httplib.BAD_REQUEST)
if cas_resp:
if permission == 'read':
if node.is_public:
return True
required_scope = oauth_scopes.CoreScopes.NODE_FILE_READ
else:
required_scope = oauth_scopes.CoreScopes.NODE_FILE_WRITE
if not cas_resp.authenticated \
or required_scope not in oauth_scopes.normalize_scopes(cas_resp.attributes['accessTokenScope']):
raise HTTPError(httplib.FORBIDDEN)
if permission == 'read' and node.can_view(auth):
return True
if permission == 'write' and node.can_edit(auth):
return True
# Users attempting to register projects with components might not have
# `write` permissions for all components. This will result in a 403 for
# all `copyto` actions as well as `copyfrom` actions if the component
# in question is not public. To get around this, we have to recursively
# check the node's parent node to determine if they have `write`
# permissions up the stack.
# TODO(hrybacki): is there a way to tell if this is for a registration?
# All nodes being registered that receive the `copyto` action will have
# `node.is_registration` == True. However, we have no way of telling if
# `copyfrom` actions are originating from a node being registered.
# TODO This is raise UNAUTHORIZED for registrations that have not been archived yet
if action == 'copyfrom' or (action == 'copyto' and node.is_registration):
parent = node.parent_node
while parent:
if parent.can_edit(auth):
return True
parent = parent.parent_node
# Users with the PREREG_ADMIN_TAG should be allowed to download files
# from prereg challenge draft registrations.
try:
prereg_schema = MetaSchema.find_one(
Q('name', 'eq', 'Prereg Challenge') &
Q('schema_version', 'eq', 2)
)
allowed_nodes = [node] + node.parents
prereg_draft_registration = DraftRegistration.find(
Q('branched_from', 'in', [n._id for n in allowed_nodes]) &
Q('registration_schema', 'eq', prereg_schema)
)
if action == 'download' and \
auth.user is not None and \
prereg_draft_registration.count() > 0 and \
settings.PREREG_ADMIN_TAG in auth.user.system_tags:
return True
except NoResultsFound:
pass
raise HTTPError(httplib.FORBIDDEN if auth.user else httplib.UNAUTHORIZED)
def make_auth(user):
if user is not None:
re | turn {
'id': user._id,
'email': '{}@osf.io'.format(user._id),
'name': user.fullname,
}
return {}
@collect_auth
def get_auth(auth, **kwargs):
cas_resp = None
if not auth | .user:
# Central Authentication Server OAuth Bearer Token
authorization = request.headers.get('Authorization')
if authorization and authorization.startswith('Bearer '):
client = cas.get_client()
try:
access_token = cas.parse_auth_header(authorization)
cas_resp = client.profile(access_token)
except cas.CasError as err:
sentry.log_exception()
# NOTE: We assume that the request is an AJAX request
return json_renderer(err)
if cas_resp.authenticated:
auth.user = User.load(cas_resp.user)
try:
data = jwt.decode(
jwe.decrypt(request.args.get('payload', '').encode('utf-8'), WATERBUTLER_JWE_KEY),
settings.WATERBUTLER_JWT_SECRET,
options={'require_exp': True},
algorithm=settings.WATERBUTLER_JWT_ALGORITHM
)['data']
except (jwt.InvalidTokenError, KeyError):
raise HTTPError(httplib.FORBIDDEN)
if not auth.user |
Zlash65/erpnext | erpnext/accounts/doctype/exchange_rate_revaluation_account/exchange_rate_revaluation_account.py | Python | gpl-3.0 | 296 | 0.006757 | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see licens | e.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import | Document
class ExchangeRateRevaluationAccount(Document):
pass
|
LEAF-BoiseState/SPEED | Other/download_SNOTEL.py | Python | mit | 805 | 0.014907 | # -*- coding: utf-8 -*-
"""
Created on Tue Dec 8 17:10:51 2015
@author: kawatson
Purpose: Download data from select SNOTEL sites for a given time period
and save data to text files
Required libraries: Climata (available via PyPI | "pip install climata")
"""
# Import Climata
from climata.snotel import RegionDailyDataIO
# Download data for given specifications
sites = RegionDailyDataIO(
start_date = "2012-10-01",
end_date = "2013-09-30",
state = "ID",
county = "Boise",
parameter | = "WTEQ",
)
# Iterate over sites and write data to text files
for site in sites:
f = open(site.stationtriplet.split(':')[0] + ".txt","w")
print "Writing data to file for: " + site.name
for row in site.data:
| f.write(str(row.date)+","+str(row.value)+"\n")
f.close() |
lukas-hetzenecker/home-assistant | homeassistant/components/sense/const.py | Python | apache-2.0 | 2,110 | 0 | """Constants for monitoring a Sense energy sensor."""
import asyncio
from sense_energy import SenseAPITimeoutException
DOMAIN = "sense"
DEFAULT_TIMEOUT = 10
ACTIVE_UPDATE_RATE = 60
DEFAULT_NAME | = "Sense"
SENSE_DATA = "sense_data"
SENSE_DEVICE_UPDATE = "sense_devices_update"
SENSE_DEVICES_DATA = "sense_devices_data"
SENSE_DISCOVERED_DEVICES_DATA = "sense_discovered_devices"
SENSE_TRENDS_COORDINATOR = "sense_trends_coordinator"
ACTIVE_NAME = "Energy"
ACTIVE_TYPE = "active"
ATTRIBUTION = "Data provided by Sense.com"
CONSUMPTION_NAME = "Usage"
CONSUMPTION_ID = "usage"
PRODUCTION_NAME = "Production"
PRODUCTION_ID = "production"
PROD | UCTION_PCT_NAME = "Net Production Percentage"
PRODUCTION_PCT_ID = "production_pct"
NET_PRODUCTION_NAME = "Net Production"
NET_PRODUCTION_ID = "net_production"
TO_GRID_NAME = "To Grid"
TO_GRID_ID = "to_grid"
FROM_GRID_NAME = "From Grid"
FROM_GRID_ID = "from_grid"
SOLAR_POWERED_NAME = "Solar Powered Percentage"
SOLAR_POWERED_ID = "solar_powered"
ICON = "mdi:flash"
SENSE_TIMEOUT_EXCEPTIONS = (asyncio.TimeoutError, SenseAPITimeoutException)
MDI_ICONS = {
"ac": "air-conditioner",
"aquarium": "fish",
"car": "car-electric",
"computer": "desktop-classic",
"cup": "coffee",
"dehumidifier": "water-off",
"dishes": "dishwasher",
"drill": "toolbox",
"fan": "fan",
"freezer": "fridge-top",
"fridge": "fridge-bottom",
"game": "gamepad-variant",
"garage": "garage",
"grill": "stove",
"heat": "fire",
"heater": "radiatior",
"humidifier": "water",
"kettle": "kettle",
"leafblower": "leaf",
"lightbulb": "lightbulb",
"media_console": "set-top-box",
"modem": "router-wireless",
"outlet": "power-socket-us",
"papershredder": "shredder",
"printer": "printer",
"pump": "water-pump",
"settings": "cog",
"skillet": "pot",
"smartcamera": "webcam",
"socket": "power-plug",
"solar_alt": "solar-power",
"sound": "speaker",
"stove": "stove",
"trash": "trash-can",
"tv": "television",
"vacuum": "robot-vacuum",
"washer": "washing-machine",
}
|
rartino/ENVISIoN | envisionpy/utils/exceptions.py | Python | bsd-2-clause | 2,304 | 0.00651 | # ENVISIoN
#
# Copyright (c) 2019 Jesper Ericsson
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer | in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCL | AIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##############################################################################################
def format_error(error):
# Return a list with error type and message.
return [type(error).__name__, str(error)]
class EnvisionError(Exception):
pass
class HandlerNotFoundError(EnvisionError):
''' Error used for non-critical unhandled requests.
User does not know when raised.'''
pass
class HandlerAlreadyExistError(EnvisionError):
''' Used when new visualisation tries to initialize
with existing handler id.'''
pass
class InvalidRequestError(EnvisionError):
''' Error used for invalid requests, such as invalid parameters.
Should generate an alert so user knows something did not work.
'''
pass
class ProcessorNotFoundError(EnvisionError):
pass
class BadHDF5Error(EnvisionError):
pass
class ProcessorNetworkError(EnvisionError):
pass
# TODO: Custom parse errors
|
zamanashiq3/code-DNN | dense_v1.py | Python | mit | 2,055 | 0.019951 | """
locally connected implimentation on the lip movement data.
Akm Ashiquzzaman
13101002@uap-bd.edu
Fall 2016
after 1 epoch , val_acc: 0.0926
"""
from __future__ import print_function, division
#random seed fixing for reproducibility
import numpy as np
np.random.seed(1337)
import time
#Data loading
X_train = np.load('videopart43.npy')
Y_train = np.load('audiopart43.npy')
#Reshaping to the 'th' order to feed into the cn | n
X_train = X_train.reshape((X_tra | in.shape[0],53*53)).astype('float32')
Y_train = Y_train.reshape((Y_train.shape[0],4702)).astype('float32')
#setting batch_size and epoch
batchSize = 20
tt_epoch = 1
from keras.models import Sequential
from keras.layers import Dense,Dropout, Activation
#time to measure the experiment.
tt = time.time()
#model building starts here
seq = Sequential()
#first lc layer
seq.add(Dense(2048,input_dim=(53*53)))
seq.add(Activation('relu'))
seq.add(Dropout(0.25))
#second lc layer
seq.add(Dense(4096))
seq.add(Activation('relu'))
seq.add(Dropout(0.5))
#3rd lc layers
seq.add(Dense(1024))
seq.add(Activation('relu'))
seq.add(Dropout(0.5))
seq.add(Dense(1024))
seq.add(Activation('relu'))
seq.add(Dropout(0.5))
seq.add(Dense(4702))
seq.add(Activation('softmax'))
seq.compile(loss='binary_crossentropy', optimizer='sgd', metrics=['accuracy'])
#checkpoint import
from keras.callbacks import ModelCheckpoint
from os.path import isfile, join
#weight file name
weight_file = 'lc_weights.h5'
#loading previous weight file for resuming training
if isfile(weight_file):
seq.load_weights(weight_file)
#weight-checkmark
checkpoint = ModelCheckpoint(weight_file, monitor='acc', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]
print('total time: ',time.time()-tt)
seq.fit(X_train,Y_train,batch_size=batchSize, nb_epoch=tt_epoch
,validation_split=0.2,callbacks=callbacks_list)
#generating prediction for testing
pred = seq.predict(X_train,batch_size=batchSize,verbose=1)
print('pred shape',pred.shape)
print('pred dtype',pred.dtype)
np.save('pred-lc.npy',pred)
|
jmartinm/inspire-next | inspire/modules/predicter/tasks.py | Python | gpl-2.0 | 4,080 | 0.00049 | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2015 CERN.
#
# INSPIRE is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Tasks for classifier."""
from __future__ import print_function
import json
import os
import cPickle as pickle
from functools import wraps
from invenio.celery import celery
from inspire.utils.helpers import (
get_record_from_model,
)
from .utils import (
prepare_prediction_record,
load_model
)
def guess_coreness(model_path="arxiv_guessing.pickle", top_words=0):
"""Using a prediction model, predict if record is CORE."""
@wraps(guess_coreness)
def _guess_coreness(obj, eng):
from invenio_base.globals import cfg
from .arxiv import predict
if os.path.basename(model_path) == model_path:
# Just the name is given, so we fill in the rest
full_model_path = os.path.join(
cfg.get("CLASSIFIER_MODEL_PATH"),
model_path
)
else:
| # The new variable is needed due to how parameters in closures work
full_model_path = model_path
if not os.path.isfile(full_model_path):
obj.log.error(
"Model file {0} not found! Skipping prediction...".format(
full_model_path
)
)
return
model = eng.workflow_definition.model(obj)
record = get_record_from_model(model)
prepared_recor | d = prepare_prediction_record(record)
pipeline = load_model(full_model_path)
result = {}
if not top_words:
decision, scores = predict(pipeline, prepared_record, top_words)
else:
decision, scores, top_core, top_noncore, top_rejected = \
predict(pipeline, prepared_record, top_words)
result["top_core"] = top_core
result["top_noncore"] = top_noncore
result["top_rejected"] = top_rejected
obj.log.info("Successfully predicted as {0} with {1}".format(decision, max(scores)))
result["decision"] = decision
result["max_score"] = max(scores)
result["all_scores"] = scores
task_result = {
"name": "arxiv_guessing",
"result": result,
"template": "workflows/results/arxiv_guessing.html"
}
obj.update_task_results(
task_result.get("name"),
[task_result]
)
return _guess_coreness
@celery.task()
def train(records, output, skip_categories=True, skip_astro=True):
"""Train a set of records and save model to file."""
from .arxiv import train as core_train
records = json.load(open(records, "r"))
if isinstance(records, dict):
records = records.values()
print("Records found: {0}".format(len(records)))
if skip_astro:
astro_categories = {
'astro-ph.SR',
'astro-ph',
'astro-ph.EP',
'astro-ph.IM',
'astro-ph.GA'
}
records = [r for r in records
if not (astro_categories & set(r["categories"]) and not
(r["id"].startswith("14") or r["id"].startswith("15")))]
print("Records after filtering: {0}".format(len(records)))
pipeline = core_train(records, not skip_categories)
pickle.dump(pipeline, open(output, "w"))
print("Dumped trained model to {0}".format(output))
|
mgraupe/acq4 | acq4/util/flowchart/Analysis.py | Python | mit | 48,966 | 0.014051 | # -*- coding: utf-8 -*-
from acq4.pyqtgraph.flowchart.library.common import *
import acq4.util.functions as functions
import numpy as np
import scipy
#from acq4.pyqtgraph import graphicsItems
import acq4.pyqtgraph as pg
import acq4.util.metaarray as metaarray
#import acq4.pyqtgraph.CheckTable as CheckTable
from collections import OrderedDict
from acq4.analysis.tools.Fitting import Fitting
class EventFitter(CtrlNode):
"""Takes a waveform and event list as input, returns extra information about each event.
Optionally performs an exponential reconvolution before measuring each event.
Plots fits of reconstructed events if the plot output is connected."""
nodeName = "EventFitter"
uiTemplate = [
('multiFit', 'check', {'value': False}),
#('parallel', 'check', {'value': False}),
#('nProcesses', 'spin', {'value': 1, 'min': 1, 'int': True}),
('plotFits', 'check', {'value': True}),
('plotGuess', 'check', {'value': False}),
('plotEvents', 'check', {'value': False}),
]
def __init__(self, name):
CtrlNode.__init__(self, name, terminals={
'waveform': {'io': 'in'},
'events': {'io': 'in'},
'output': {'io': 'out'},
'plot': {'io': 'out'}
})
self.plotItems = []
self.selectedFit = None
self.deletedFits = []
self.pool = None ## multiprocessing pool
self.poolSize = 0
#self.ctrls['parallel'].toggled.connect(self.setupPool)
#self.ctrls['nProcesses'].valueChanged.connect(self.setupPool)
#def setupPool(self):
#import multiprocessing as mp
#if self.ctrls['parallel'].isChecked():
#nProc = self.ctrls['nProcesses'].value()
#if self.pool is not None and self.poolSize != nProc:
#self.pool.terminate()
#self.pool = None
#if self.pool is None:
#self.pool = mp.Pool(processes=nProc)
#self.poolSize = nProc
#else:
#if self.pool is not None:
#self.pool.terminate()
#self.pool = None
def process(self, waveform, events, display=True):
self.deletedFits = []
for item in self.plotItems:
try:
item.sigClicked.disconnect(self.fitClicked)
except:
pass
self.plotItems = []
tau = waveform.infoCopy(-1).get('expDeconvolveTau', None)
dt = waveform.xvals(0)[1] - waveform.xvals(0)[0]
opts = {
'dt': dt, 'tau': tau, 'multiFit': self.ctrls['multiFit'].isChecked(),
'waveform': waveform.view(np.ndarray),
'tvals': waveform.xvals('Time'),
}
#if not self.ctrls['parallel'].isChecked():
output = processEventFits(events, startEvent=0, stopEvent=len(events), opts=opts)
guesses = output['guesses']
eventData = output['eventData']
indexes = output['indexes']
xVals = output['xVals']
yVals = output['yVals']
output = output['output']
#else:
#print "parallel:", self.pool, self.poolSize
#results = []
#nProcesses = self.ctrls['nProcesses'].value()
#evPerProcess = int(len(events) / nProcesses)
#start = 0
#for i in range(nProcesses):
#stop = start + evPerProcess
#if stop > len(events):
#stop = len(events)
#args = (events, start, stop, opts)
#results.append(self.pool.apply_async(processEventFits, args))
#print "started process", start, stop
#start = stop
#data = []
#guesses = []
#eventData = []
#indexes = []
#xVals = []
#yVals = []
#for res in results: ## reconstruct results here
#print "getting result", res
#output = res.get(10)
#data.append(output['output'])
| #guesses.extend(output['guesses'])
#eventData.extend(output['eventData'])
#indexes.extend(output['indexes'])
#xVals.extend(output['xVals'])
#yVals.extend(output['yVals'])
#output = np.concatenate(data)
for i in range(len(indexes)):
if display and self['plot'].isConnected():
if self.ctrls['plo | tFits'].isChecked():
item = pg.PlotDataItem(x=xVals[i], y=yVals[i], pen=(0, 0, 255), clickable=True)
item.setZValue(100)
self.plotItems.append(item)
item.eventIndex = indexes[i]
item.sigClicked.connect(self.fitClicked)
item.deleted = False
if self.ctrls['plotGuess'].isChecked():
item2 = pg.PlotDataItem(x=xVals[i], y=functions.pspFunc(guesses[i], xVals[i]), pen=(255, 0, 0))
item2.setZValue(100)
self.plotItems.append(item2)
if self.ctrls['plotEvents'].isChecked():
item2 = pg.PlotDataItem(x=xVals[i], y=eventData[i], pen=(0, 255, 0))
item2.setZValue(100)
self.plotItems.append(item2)
#plot = self.plot.connections().keys()[0].node().getPlot()
#plot.addItem(item)
self.outputData = output
return {'output': output, 'plot': self.plotItems}
def deleteSelected(self):
item = self.selectedFit
d = not item.deleted
if d:
self.deletedFits.append(item.eventIndex)
self.selectedFit.setPen((100, 0, 0))
else:
self.deletedFits.remove(item.eventIndex)
self.selectedFit.setPen((0, 0, 255))
item.deleted = d
inds = np.ones(len(self.outputData), dtype=bool)
inds[self.deletedFits] = False
self.setOutput(output=self.outputData[inds], plot=self.plotItems)
## Intercept keypresses on any plot that is connected.
def connected(self, local, remote):
if local is self['plot']:
self.filterPlot(remote.node())
remote.node().sigPlotChanged.connect(self.filterPlot)
CtrlNode.connected(self, local, remote)
def disconnected(self, local, remote):
if local is self['plot']:
self.filterPlot(remote.node(), install=False)
try:
remote.node().sigPlotChanged.disconnect(self.filterPlot)
except:
pass
CtrlNode.disconnected(self, local, remote)
## install event filter on remote plot (for detecting del key press)
def filterPlot(self, node, install=True):
plot = node.getPlot()
if plot is None:
return
if install:
plot.installEventFilter(self)
else:
plot.removeEventFilter(self)
def fitClicked(self, curve):
if self.selectedFit is not None:
if self.selectedFit.deleted:
self.selectedFit.setPen((100,0,0))
else:
self.selectedFit.setPen((0,0,255))
self.selectedFit = curve
curve.setPen((255,255,255))
def eventFilter(self, obj, event):
if self.selectedFit is None:
return False
if event.type() == QtCore.QEvent.KeyPress and event.key() == QtCore.Qt.Key_Delete:
self.deleteSelected()
return True
return False
def processEventFits(events, startEvent, stopEvent, opts):
## This function does all the processing work for EventFitter.
dt = opts['dt']
origTau = opts['tau']
multiFit = opts['multiFit']
waveform = opts['waveform']
tvals = opts['tvals']
nFields = len(events.dtype.fields)
dtype = [(n, events[n].dtype) for n in events.dtype.names]
output = np.empty(len(events), dtype=dtype + [
('fitAmplitude', float),
('fitTime', float),
|
janezkranjc/clowdflows | workflows/perfeval/visualization_views.py | Python | gpl-3.0 | 446 | 0.024664 | from django.shortcuts import render
def perfeval_display_summation(request,input_dict,output_dict,widget):
i | f sum(input_dict['intList']) == input_dict['sum']:
check = 'The calculation appe | ars correct.'
else:
check = 'The calculation appears incorrect!'
return render(request, 'visualizations/perfeval_display_integers.html',{'widget':widget,'input_dict':input_dict, 'output_dict':output_dict, 'check':check})
|
fbergmann/libSEDML | examples/python/create_nested_task.py | Python | bsd-2-clause | 1,555 | 0.001286 | #!/bin/env python
import libsedml
def create_nested_task(file_name):
doc = libsedml.SedDocument(1, 4)
# create simulation
sim = doc.createSteadyState()
sim.setId("steady1")
# need to set the correct KISAO Term
alg = sim.createAlgorithm()
alg.setKisaoID("KISAO:0000282")
# create model
model = doc.crea | teModel()
model.setId("model1")
model.setLanguage("urn:sedml:language:sbml")
model.setSource("oscli.xml")
# create tasks
task = doc.createTask()
task.setId( | "task0")
task.setModelReference("model1")
task.setSimulationReference("steady1")
task = doc.createRepeatedTask()
assert(isinstance(task, libsedml.SedRepeatedTask))
task.setId("task1")
task.setResetModel(True)
task.setRangeId("current")
range = task.createUniformRange()
assert(isinstance(range, libsedml.SedUniformRange))
range.setId("current")
range.setStart(0)
range.setEnd(0)
range.setNumberOfSteps(100)
range.setType("linear")
change = task.createTaskChange()
assert(isinstance(change, libsedml.SedSetValue))
change.setModelReference("model1")
change.setTarget("/sbml:sbml/sbml:model/sbml:listOfParameters/sbml:parameter[@id="J0_v0"]")
change.setRange("current")
change.setMath(libsedml.parseL3Formula("current"))
subtask = task.createSubTask()
subtask.setOrder(1)
subtask.setTask("task0")
# write doc
libsedml.writeSedML(doc, file_name)
if __name__ == "__main__":
create_nested_task('nested_task.xml')
|
bmaggard/luigi | test/worker_parallel_scheduling_test.py | Python | apache-2.0 | 5,302 | 0.000377 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# lim | itations under the License.
#
import contextlib
import gc
import os
import pickle
import time
from helpers import unittest
import luigi
import mock
import psutil
from luigi.worker import Worker
def running_children():
children = set()
process = psutil.Process(os.getpid())
for child in process.children():
if child.is_running():
children.add(child.pid)
return children
@contextlib.contextmanager
def pause_gc():
if not gc.isenabled():
| yield
try:
gc.disable()
yield
finally:
gc.enable()
class SlowCompleteWrapper(luigi.WrapperTask):
def requires(self):
return [SlowCompleteTask(i) for i in range(4)]
class SlowCompleteTask(luigi.Task):
n = luigi.IntParameter()
def complete(self):
time.sleep(0.1)
return True
class OverlappingSelfDependenciesTask(luigi.Task):
n = luigi.IntParameter()
k = luigi.IntParameter()
def complete(self):
return self.n < self.k or self.k == 0
def requires(self):
return [OverlappingSelfDependenciesTask(self.n - 1, k) for k in range(self.k + 1)]
class ExceptionCompleteTask(luigi.Task):
def complete(self):
assert False
class ExceptionRequiresTask(luigi.Task):
def requires(self):
assert False
class UnpicklableExceptionTask(luigi.Task):
def complete(self):
class UnpicklableException(Exception):
pass
raise UnpicklableException()
class ParallelSchedulingTest(unittest.TestCase):
def setUp(self):
self.sch = mock.Mock()
self.w = Worker(scheduler=self.sch, worker_id='x')
def added_tasks(self, status):
return [kw['task_id'] for args, kw in self.sch.add_task.call_args_list if kw['status'] == status]
def test_children_terminated(self):
before_children = running_children()
with pause_gc():
self.w.add(
OverlappingSelfDependenciesTask(5, 2),
multiprocess=True,
)
self.assertLessEqual(running_children(), before_children)
def test_multiprocess_scheduling_with_overlapping_dependencies(self):
self.w.add(OverlappingSelfDependenciesTask(5, 2), True)
self.assertEqual(15, self.sch.add_task.call_count)
self.assertEqual(set((
OverlappingSelfDependenciesTask(n=1, k=1).task_id,
OverlappingSelfDependenciesTask(n=2, k=1).task_id,
OverlappingSelfDependenciesTask(n=2, k=2).task_id,
OverlappingSelfDependenciesTask(n=3, k=1).task_id,
OverlappingSelfDependenciesTask(n=3, k=2).task_id,
OverlappingSelfDependenciesTask(n=4, k=1).task_id,
OverlappingSelfDependenciesTask(n=4, k=2).task_id,
OverlappingSelfDependenciesTask(n=5, k=2).task_id,
)), set(self.added_tasks('PENDING')))
self.assertEqual(set((
OverlappingSelfDependenciesTask(n=0, k=0).task_id,
OverlappingSelfDependenciesTask(n=0, k=1).task_id,
OverlappingSelfDependenciesTask(n=1, k=0).task_id,
OverlappingSelfDependenciesTask(n=1, k=2).task_id,
OverlappingSelfDependenciesTask(n=2, k=0).task_id,
OverlappingSelfDependenciesTask(n=3, k=0).task_id,
OverlappingSelfDependenciesTask(n=4, k=0).task_id,
)), set(self.added_tasks('DONE')))
@mock.patch('luigi.notifications.send_error_email')
def test_raise_exception_in_complete(self, send):
self.w.add(ExceptionCompleteTask(), multiprocess=True)
send.check_called_once()
self.assertEqual(0, self.sch.add_task.call_count)
self.assertTrue('assert False' in send.call_args[0][1])
@mock.patch('luigi.notifications.send_error_email')
def test_raise_unpicklable_exception_in_complete(self, send):
# verify exception can't be pickled
self.assertRaises(Exception, UnpicklableExceptionTask().complete)
try:
UnpicklableExceptionTask().complete()
except Exception as e:
ex = e
self.assertRaises(pickle.PicklingError, pickle.dumps, ex)
# verify this can run async
self.w.add(UnpicklableExceptionTask(), multiprocess=True)
send.check_called_once()
self.assertEqual(0, self.sch.add_task.call_count)
self.assertTrue('raise UnpicklableException()' in send.call_args[0][1])
@mock.patch('luigi.notifications.send_error_email')
def test_raise_exception_in_requires(self, send):
self.w.add(ExceptionRequiresTask(), multiprocess=True)
send.check_called_once()
self.assertEqual(0, self.sch.add_task.call_count)
if __name__ == '__main__':
unittest.main()
|
oshadmon/StreamingSQL | tests/test_db.py | Python | mit | 4,584 | 0.006981 | """
The following tests that db connections works properly.
Make sure the default configurations match your connection to the database
"""
import pymysql
import warnings
warnings.filterwarnings("ignore")
from StreamingSQL.db import create_connection, execute_command
from StreamingSQL.fonts import Colors, Formats
"""Default configuration to connect to the DB"""
host = 'localhost'
port = 3306
usr = 'root'
paswd = ''
db = 'test'
def test_default_create_connection():
"""
Test the connection to database
Assert:
Connection occurs
"""
cur = create_connection(host=host, port=port, user=usr, password=paswd, db=db)
assert type(cur) == pymysql.cursors.Cursor
def test_wrong_host_fail_create_connection():
"""
Test that error is properly returned when there is an incorrect host
Assert:
Proper error is returned/formatted
"""
error = "2003: Cant connect to MySQL server on '%s' [Errno 61] Connection refused"
error = (Formats.BOLD + Colors.RED + "Connection Error - " + error + Formats.END + Colors.END) % host[:-3]
cur = create_connection(host=host[:-3], port=port, user=usr, password=paswd, db=db)
try:
assert cur == error
except AssertionError:
pass
def test_wrong_port_fail_create_connection():
"""
Test that | error is properly returned when there is an incorrect port number
Assert:
Proper error is returned/formatted
"""
error = | "2003: Cant connect to MySQL server on '%s' [Errno 61] Connection refused"
error = (Formats.BOLD + Colors.RED + "Connection Error - " + error + Formats.END + Colors.END) % host
cur = create_connection(host=host, port=port + 13, user=usr, password=paswd, db=db)
try:
assert cur == error
except AssertionError:
pass
def test_wrong_user_fail_create_connection():
"""
Test that error is properly returned when there is an incorrect user
Assert:
Proper error is returned/formatted
"""
error = "2003: Cant connect to MySQL server on '%s' [Errno 61] Connection refused"
error = (Formats.BOLD + Colors.RED + "Connection Error - " + error + Formats.END + Colors.END) % host
cur = create_connection(host=host, port=port, user='', password=paswd, db=db)
try:
assert cur == error
except AssertionError:
pass
def test_wrong_passwd_fail_create_connection():
"""
Test that error is properly returned when there is an incorrect password
Assert:
Proper error is returned/formatted
"""
error = "2003: Cant connect to MySQL server on '%s' [Errno 61] Connection refused"
error = (Formats.BOLD + Colors.RED + "Connection Error - " + error + Formats.END + Colors.END) % host
cur = create_connection(host=host, port=port, user=usr, password=usr, db=db)
try:
assert cur == error
except AssertionError:
pass
def test_execute_command():
"""
Execute "SELECT 1;"
Assert:
A result of 1 is returned
"""
cur = create_connection(host=host, port=port, user=usr, password=paswd, db=db)
assert type(cur) == pymysql.cursors.Cursor
stmt = "SELECT 1"
result = execute_command(cur, stmt)
assert result[0][0] == 1
def test_syntax_fail_execute_command():
"""
Execute "SLCT 1;"
Assert:
An error message is returned
"""
stmt = "SLCT 1"
error = ("1064: You have an error in your SQL syntax; check the manual that corresponds to your MariaDB server "+
"version for the right syntax to use near '%s' at line 1")
error = Formats.BOLD + Colors.RED + "Connection Error - " + error % stmt + Formats.END + Colors.END
cur = create_connection(host=host, port=port, user=usr, password=paswd, db=db)
assert type(cur) == pymysql.cursors.Cursor
result = execute_command(cur, stmt)
try:
assert result == error
except AssertionError:
pass
def test_new_db_create_connection():
"""
Create a connection to a new database
Assert:
New database is created/removed
"""
db="db2"
cur = create_connection(host=host, port=port, user=usr, password=paswd, db=db)
assert type(cur) == pymysql.cursors.Cursor
stmt = "SELECT `SCHEMA_NAME` from `INFORMATION_SCHEMA`.`SCHEMATA` WHERE `SCHEMA_NAME` LIKE '%s';" % db
result = execute_command(cur, stmt)
assert result[0][0] == db
stmt = "FLUSH TABLES; DROP DATABASE IF EXISTS %s;" % db
result = execute_command(cur, stmt)
assert result == ()
|
leighpauls/k2cro4 | third_party/WebKit/Tools/Scripts/webkitpy/tool/commands/abstractsequencedcommand.py | Python | bsd-3-clause | 2,379 | 0.001261 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# R | edistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions | are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.common.system.executive import ScriptError
from webkitpy.common.system.deprecated_logging import log
from webkitpy.tool.commands.stepsequence import StepSequence
from webkitpy.tool.multicommandtool import AbstractDeclarativeCommand
class AbstractSequencedCommand(AbstractDeclarativeCommand):
steps = None
def __init__(self):
self._sequence = StepSequence(self.steps)
AbstractDeclarativeCommand.__init__(self, self._sequence.options())
def _prepare_state(self, options, args, tool):
return None
def execute(self, options, args, tool):
try:
state = self._prepare_state(options, args, tool)
except ScriptError, e:
log(e.message_with_output())
self._exit(e.exit_code or 2)
self._sequence.run_and_handle_errors(tool, options, state)
|
weechat/weechat.org | weechat/common/views.py | Python | gpl-3.0 | 1,083 | 0 | #
# Copyright (C) 2003-2022 Sébastien Helleu <flashcode@flashtux.org>
#
# This file is part of WeeChat.org.
#
# WeeChat.org is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# WeeChat.org is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# |
# You should have received a copy of the GNU General Public License
# along with WeeChat.org. If not, see <https://www.gnu.org/licenses/>.
#
"""Some useful views."""
from django.views.generic import TemplateView
class TextTemplateView(TemplateView):
"""View for a plain text file."""
def render_to_response(self, context, **response_kwargs):
response_kwargs['content_type'] = 'text/plain'
return super().render_to_response(conte | xt, **response_kwargs)
|
pylayers/pylayers | pylayers/simul/tests/test_espoo.py | Python | mit | 2,820 | 0.040426 | from pylayers.simul.link import *
from pylayers.antprop.channel import *
from pylayers.antprop.antenna import *
from pylayers.util.geomutil import *
# parameters
fmin = 31.8 # GHz
fmax = 33.4 # GHz
bw = fmax - fmin # bandwidth in GHz
nf = 10001 # sweep frequency points
fc = (fmin + fmax)/2.
hms = 1.65 # height of the MS
hbs = 6 # height of the BS
fGHz = np.linspace(fmin,fmax,nf)
fonts = 20 # fontsize
# Initialization of the Layout and the link.
L = Layout('espoo.lay',bbuild=1)
DL = DLink(L=L)
DL.fGHz = fGHz
# coordinates of the MS and the BS in the scene.
ms10 = np.array([188.2,199.5,hms])
ms11 = np.array([170.6,192,hms])
ms12 = np.array([208,208.3,hms])
ms13 = np.array([224,170.4,hms])
ms1 = np.array([197,189.5,hms])
ms2 = np.array([201.7,179.7,hms])
ms3 = np.array([185.4,171.1,hms])
ms4 = np.array([195.1,159.9,hms]) # np.array([198,161,hms])
ms5 = np.array([232.5,148.5,hms])
ms6 = np.array([176.5,179,hms])
ms7 = np.array([163.2,165.8,hms])
ms8 = np.array([148.4,154.9,hms])
ms9 = np.array([191.9,209.6,hms])
ms = np.vstack((ms1,ms2,ms3,ms4,ms5,ms6,ms7,ms8,ms9,ms10,ms11,ms12,ms13))
bs = np.array([220,185,hbs])
# coordinates of the MS and the BS antennas
DL.a = ms12
DL.b = bs
# MS antenna
DL.Aa = Antenna('Omni')
# BS antenna
DL.Ab = Antenna('aperture',fGHz=fGHz)
# orientation of the Horn antenna
# alpha = 0.37 rads (z axis)
# beta = 0 (y axis)
# gamma = - (40deg + pi/2 (x axis) ; 40 deg = 0.698 rad
DL.Tb = MEulerAngle(0.37,0,-(0.698+np.pi/2.))
plot_pos = False
if plot_pos:
DL.L.show(L=DL.L)
plt.axis('on')
plt.plot(bs[0],bs[1],'or')
for k in range(13):
plt.plot(ms[k][0],ms[k][1],'ob')
plt.annotate(xy=(ms[k][0],ms[k][1]),s='ms'+str(k+1),fontsize=fonts)
plt.title('Layout of the Umi environment',fontsize=fonts)
plt.show()
# DL.L._show3() # to see the scene of the layout
# DL._show3() # to see the scene + antennas in the layout
# L.showG('s',labels=1) # enable to see the no of segments.
#cutoff : jusqu'ou on peut aller en profondeur dans le graph
#threshold :
# proche de 1 = il va chercher les trajets equivalents du LOS
# proche de 0 = il va chercher les trajets equivalents au NLOS (ca va prendre plus de temps)
# ra_cei | l_H only the ground reflection
DL.eval(force=1,cutoff=3,threshold=0.4,nD=1,ra_ceil_H=0)
#DL.C.cut(threshold_dB=90)
# angular range
phimin = np.pi/4. # 45 deg
phimax = 3*np.pi/2. # 270 deg
phistep = 5*np.pi/180. # 5 deg
phi = np.arange(phimin,phimax,phistep)
# angular frequency profile
afp = DL.afp(phi)
# angular delay profile
adp = a | fp.toadp()
# adp.imshow(cmap=cm.jet)
# plt.figure()
# adp.imshow(cmap=cm.jet)
# polarplot
# plt.figure()
adp.polarplot(vmin=-130,title='PADP of BS-MS1')#+str())
plt.show()
# DL.R.show(L=DL.L,rlist=DL.C.selected)
|
borisroman/vdsm | lib/vdsm/ipwrapper.py | Python | gpl-2.0 | 18,145 | 0 | # Copyright 2013-2014 Red Hat, Inc.
#
# This program is free software; you can | redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) | any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from contextlib import closing
from glob import iglob
import array
import errno
import fcntl
import os
import socket
import struct
from netaddr.core import AddrFormatError
from netaddr import IPAddress
from netaddr import IPNetwork
from .config import config
from .utils import anyFnmatch
from .utils import CommandPath
from .utils import execCmd
from .netlink import link
_IP_BINARY = CommandPath('ip', '/sbin/ip')
NET_SYSFS = '/sys/class/net'
DUMMY_BRIDGE = ';vdsmdummy;'
_ROUTE_FLAGS = frozenset((
# copied from iproute2's rtnl_rtntype_n2a()
'unicast',
'local',
'broadcast',
'anycast',
'multicast',
'blackhole',
'unreachable',
'prohibit',
'throw',
'nat',
'xresolve',
'Deleted', # copied from iproute.c
))
def _isValid(ip, verifier):
try:
verifier(ip)
except (AddrFormatError, ValueError):
return False
return True
def equals(cls):
def __eq__(self, other):
return type(other) == cls and self.__dict__ == other.__dict__
cls.__eq__ = __eq__
return cls
class LinkType(object):
"""Representation of the different link types"""
NIC = 'nic'
VLAN = 'vlan'
BOND = 'bond'
BRIDGE = 'bridge'
LOOPBACK = 'loopback'
MACVLAN = 'macvlan'
DUMMY = 'dummy'
TUN = 'tun'
OVS = 'openvswitch'
TEAM = 'team'
VETH = 'veth'
VF = 'vf'
@equals
class Link(object):
"""Represents link information obtained from iproute2"""
_fakeNics = config.get('vars', 'fake_nics').split(',')
_hiddenBonds = config.get('vars', 'hidden_bonds').split(',')
_hiddenNics = config.get('vars', 'hidden_nics').split(',')
_hiddenVlans = config.get('vars', 'hidden_vlans').split(',')
IFF_RUNNING = 1 << 6
IFF_PROMISC = 1 << 8
def __init__(self, address, index, linkType, mtu, name, qdisc, state,
vlanid=None, vlanprotocol=None, master=None, device=None,
**kwargs):
self.address = address
self.index = index
self.type = linkType
self.mtu = mtu
self.name = name
self.qdisc = qdisc
self.state = state
self.master = master
if vlanid is not None:
self.vlanid = vlanid
if vlanprotocol is not None:
self.vlanprotocol = vlanprotocol
if device is not None:
self.device = device
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return '%s: %s(%s) %s' % (self.index, self.name, self.type,
self.address)
@classmethod
def fromDict(cls, data):
data['linkType'] = (data['type'] if 'type' in data else
cls._detectType(data['name']))
return cls(**data)
@staticmethod
def _detectType(name):
"""Returns the LinkType for the specified device."""
# TODO: Add support for virtual functions
detectedType = None
try:
driver = drv_name(name)
except IOError as ioe:
if ioe.errno == errno.EOPNOTSUPP:
if name == 'lo':
detectedType = LinkType.LOOPBACK
else:
detectedType = LinkType.DUMMY
return detectedType
else:
raise # Reraise other errors like ENODEV
if driver in (LinkType.BRIDGE, LinkType.MACVLAN, LinkType.TUN,
LinkType.OVS, LinkType.TEAM, LinkType.VETH):
detectedType = driver
elif driver == 'bonding':
detectedType = LinkType.BOND
elif 'VLAN' in driver or 'vlan' in driver:
detectedType = LinkType.VLAN
elif os.path.exists('/sys/class/net/%s/device/physfn/' % name):
detectedType = LinkType.VF
else:
detectedType = LinkType.NIC
return detectedType
def isBOND(self):
return self.type == LinkType.BOND
def isBRIDGE(self):
return self.type == LinkType.BRIDGE
def isDUMMY(self):
return self.type == LinkType.DUMMY
def isNIC(self):
return self.type == LinkType.NIC
def isVETH(self):
return self.type == LinkType.VETH
def isVF(self):
return self.type == LinkType.VF
def isVLAN(self):
return self.type == LinkType.VLAN
def isMACVLAN(self):
return self.type == LinkType.MACVLAN
def isFakeNIC(self):
"""
Returns True iff vdsm config marks the DUMMY or VETH dev to be reported
as NIC.
"""
if self.isDUMMY() or self.isVETH() or self.isMACVLAN():
return anyFnmatch(self.name, self._fakeNics)
return False
def isNICLike(self):
return self.isNIC() or self.isVF() or self.isFakeNIC()
def isHidden(self):
"""Returns True iff vdsm config hides the device."""
if self.isVLAN():
return anyFnmatch(self.name, self._hiddenVlans)
elif self.isNICLike():
return (anyFnmatch(self.name, self._hiddenNics) or
(self.master and _bondExists(self.master) and
anyFnmatch(self.master, self._hiddenBonds)) or
(self.isVF() and self._isVFhidden()))
elif self.isBOND():
return anyFnmatch(self.name, self._hiddenBonds)
elif self.isBRIDGE():
return self.name == DUMMY_BRIDGE
return False
def _isVFhidden(self):
if self.address == '00:00:00:00:00:00':
return True
# We hide a VF if there exists a macvtap device with the same address.
# We assume that such VFs are used by a VM and should not be reported
# as host nics
for path in iglob('/sys/class/net/*/address'):
dev = os.path.basename(os.path.dirname(path))
if (dev != self.name and _read_stripped(path) == self.address and
self._detectType(dev) == LinkType.MACVLAN):
return True
return False
@property
def oper_up(self):
return bool(self.flags & self.IFF_RUNNING)
def get_promisc(self):
return bool(link.get_link(self.name)['flags'] & self.IFF_PROMISC)
def set_promisc(self, value):
"""Takes a boolean to enable/disable Link promiscuity"""
promisc = 'on' if value else 'off'
linkSet(self.name, ['promisc', promisc])
promisc = property(get_promisc, set_promisc, None, 'Link promiscuity flag')
def drv_name(devName):
"""Returns the driver used by a device.
Throws IOError ENODEV for non existing devices.
Throws IOError EOPNOTSUPP for non supported devices, i.g., loopback."""
encoded_name = devName.encode('utf-8')
ETHTOOL_GDRVINFO = 0x00000003 # ETHTOOL Get driver info command
SIOCETHTOOL = 0x8946 # Ethtool interface
DRVINFO_FORMAT = '= I 32s 32s 32s 32s 32s 12s 5I'
IFREQ_FORMAT = '16sPi' # device_name, buffer_pointer, buffer_len
buff = array.array('c', b'\0' * struct.calcsize(DRVINFO_FORMAT))
cmd = struct.pack('= I', ETHTOOL_GDRVINFO)
buff[0:len(cmd)] = array.array('c', cmd)
data = struct.pack(IFREQ_FORMAT, encoded_name, *buff.buffer_info())
with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock:
fcntl.ioctl(sock, SIOCETHTOOL |
tensorflow/tensorboard | tensorboard/data/server/pip_package/build_test.py | Python | apache-2.0 | 2,101 | 0 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# | Licensed under the Apache License, Version 2.0 (the " | License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the `tensorboard_data_server` build script."""
import os
import subprocess
import sys
from tensorboard import test as tb_test
class BuildPipPackageTest(tb_test.TestCase):
def test(self):
server_bin = os.path.join(self.get_temp_dir(), "server")
with open(server_bin, "wb"):
pass
os.chmod(server_bin, 0o755)
outdir = os.path.join(self.get_temp_dir(), "out")
os.mkdir(outdir)
build_script = os.path.join(os.path.dirname(__file__), "build")
try:
result = subprocess.run(
[
build_script,
"--server-binary=%s" % (server_bin,),
"--out-dir=%s" % (outdir,),
],
check=True,
capture_output=True,
)
except subprocess.CalledProcessError as e:
sys.stdout.buffer.write(e.stdout)
sys.stdout.flush()
sys.stderr.buffer.write(e.stderr)
sys.stderr.flush()
raise
lines = result.stdout.decode("utf-8").splitlines()
self.assertLen(lines, 1)
wheel = lines[0]
self.assertEqual(os.path.dirname(wheel), outdir)
self.assertTrue(os.path.isfile(wheel))
os.unlink(wheel)
os.rmdir(outdir) # make sure no extraneous files were added
if __name__ == "__main__":
tb_test.main()
|
mtulio/kb | dev/satellite6_api/running_queries.py | Python | apache-2.0 | 2,617 | 0.006123 | #!/usr/bin/python
# Red Hat Satellite - API Guide - 4.2.2. Running Queries Using Python
# Link: https://access.redhat.com/documentation/en-US/Red_Hat_Satellite/6.1/html-single/API_Guide/index.html#idp10992432
#
# You can create and run a Python script to achieve the same results as those described in Section 4.3, “API Examples
# Using Curl”(https://access.redhat.com/documentation/en-US/Red_Hat_Satellite/6.1/html-single/API_Guide/index.html#curl-examples).
# The following example script describes this approach. First, create an executable file called sat6api.py and then add the
# following content:
#
import json
import sys
try:
import requests
except ImportError:
print "Please install the python-requests module."
sys.exit(-1)
SAT_API = 'https://satellite6.example.com/api/v2/'
USERNAME = "admin"
PASSWORD = "password"
SSL_VERIFY = False # Ignore SSL for now
def get_json(url):
# Performs a GET using the passed URL location
r = requests.get(url, auth=(USERNAME, PASSWORD), verify=SSL_VERIFY)
ret | urn r.json()
def get_results(url):
jsn = get_json(url)
if jsn.get('error'):
print "Error: " + jsn['error']['message']
else:
if jsn.get('results'):
return jsn['results']
elif 'results' not in jsn:
return jsn
else:
print "No results found"
return None
def display_all_results(url):
results | = get_results(url)
if results:
print json.dumps(results, indent=4, sort_keys=True)
def display_info_for_hosts(url):
hosts = get_results(url)
if hosts:
for host in hosts:
print "ID: %-10d Name: %-30s IP: %-20s OS: %-30s" % (host['id'], host['name'], host['ip'], host['operatingsystem_name'])
def main():
host = 'satellite6.example.com'
print "Displaying all info for host %s ..." % host
display_all_results(SAT_API + 'hosts/' + host)
print "Displaying all facts for host %s ..." % host
display_all_results(SAT_API + 'hosts/%s/facts' % host)
host_pattern = 'example'
print "Displaying basic info for hosts matching pattern '%s'..." % host_pattern
display_info_for_hosts(SAT_API + 'hosts?search=' + host_pattern)
environment = 'production'
print "Displaying basic info for hosts in environment %s..." % environment
display_info_for_hosts(SAT_API + 'hosts?search=environment=' + environment)
model = 'RHEV Hypervisor'
print "Displaying basic info for hosts with model name %s..." % model
display_info_for_hosts(SAT_API + 'hosts?search=model="' + model + '"')
if __name__ == "__main__":
main()
|
jrslocum17/pynet_test | Bonus3/napalm_get_model.py | Python | apache-2.0 | 1,612 | 0.004348 | #!/usr/bin/env python
"""
Ex 1. Construct a script that retrieves NAPALM facts from two IOS routers, two Arista switches, a | nd one Junos device.
pynet-rtr1 (Cisco IOS) 184.105.247.70
pynet-rtr2 (Cisco IOS) 184.105.247.71
pynet-sw1 (Arista EOS) 184.105.247.72
pynet-sw2 (Arista EOS) 184.105.247.73
juniper-srx 184.105.247.76
Retrieve the 'model | ' number from each device and print the model to standard out.
As part of this exercise define the devices that you use in a Python file (for example my_devices.py) and import
these devices into your program. Optionally, define the devices in a YAML file and read this my_devices.yml file in.
"""
from __future__ import print_function
from __future__ import unicode_literals
from getpass import getpass
from pprint import pprint
from napalm_base import get_network_driver
from pyeapi.eapilib import CommandError
import yaml
import re
YAML_FILE = 'my_devices.yml'
def main():
with open(YAML_FILE) as f:
my_devices = yaml.load(f)
#pprint(my_devices)
pwd = getpass()
print("{:<20} {:<20} {:<20}".format("Device Type", "Hostname", "Model"))
for device_dict in my_devices:
device_dict['password'] = pwd
device_type = device_dict.pop('device_type')
driver = get_network_driver(device_type)
device=driver(**device_dict)
device.open()
facts = device.get_facts()
print('*' * 80)
print("{:<20} {:<20} {:<20}".format(device_type, device_dict['hostname'], facts['model']))
print('*' * 80)
print
if __name__ == "__main__":
main()
|
atodorov/anaconda | pyanaconda/ui/tui/spokes/__init__.py | Python | gpl-2.0 | 4,212 | 0.000237 | # The base classes for Anaconda TUI Spokes
#
# Copyright (C) (2012) Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will | be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to t | he
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from pyanaconda.ui.common import Spoke, StandaloneSpoke, NormalSpoke
from pyanaconda.ui.tui.tuiobject import TUIObject
from pyanaconda.ui.lib.help import get_help_path
from pyanaconda.core.i18n import N_, _
from simpleline.render.adv_widgets import HelpScreen
from simpleline.render.screen import InputState
from simpleline.render.screen_handler import ScreenHandler
from simpleline.render.prompt import Prompt
from simpleline.render.widgets import Widget, CheckboxWidget
__all__ = ["TUISpoke", "StandaloneSpoke", "NormalTUISpoke"]
# Inherit abstract methods from Spoke
# pylint: disable=abstract-method
class TUISpoke(TUIObject, Widget, Spoke):
"""Base TUI Spoke class implementing the pyanaconda.ui.common.Spoke API.
It also acts as a Widget so we can easily add it to Hub, where is shows
as a summary box with title, description and completed checkbox.
:param category: category this spoke belongs to
:type category: string
.. inheritance-diagram:: TUISpoke
:parts: 3
"""
def __init__(self, data, storage, payload):
if self.__class__ is TUISpoke:
raise TypeError("TUISpoke is an abstract class")
TUIObject.__init__(self, data)
Widget.__init__(self)
Spoke.__init__(self, storage, payload)
self.input_required = True
self.title = N_("Default spoke title")
@property
def status(self):
return _("testing status...")
@property
def completed(self):
return True
def refresh(self, args=None):
TUIObject.refresh(self, args)
def input(self, args, key):
"""Handle the input, the base class just forwards it to the App level."""
return key
def render(self, width):
"""Render the summary representation for Hub to internal buffer."""
Widget.render(self, width)
if self.mandatory and not self.completed:
key = "!"
elif self.completed:
key = "x"
else:
key = " "
# always set completed = True here; otherwise key value won't be
# displayed if completed (spoke value from above) is False
c = CheckboxWidget(key=key, completed=True,
title=_(self.title), text=self.status)
c.render(width)
self.draw(c)
class NormalTUISpoke(TUISpoke, NormalSpoke):
"""
.. inheritance-diagram:: NormalTUISpoke
:parts: 3
"""
def input(self, args, key):
"""Handle the input."""
# TRANSLATORS: 'h' to help
if key.lower() == Prompt.HELP:
if self.has_help:
help_path = get_help_path(self.helpFile, True)
ScreenHandler.push_screen_modal(HelpScreen(help_path))
return InputState.PROCESSED_AND_REDRAW
return super().input(args, key)
def prompt(self, args=None):
"""Return the prompt."""
prompt = TUISpoke.prompt(self, args)
if self.has_help:
prompt.add_help_option()
return prompt
class StandaloneTUISpoke(TUISpoke, StandaloneSpoke):
"""
.. inheritance-diagram:: StandaloneTUISpoke
:parts: 3
"""
pass
|
jmeyers314/batoid | tests/test_Sphere.py | Python | bsd-2-clause | 7,740 | 0.001163 | import batoid
import numpy as np
from test_helpers import timer, do_pickle, all_obj_diff, init_gpu, rays_allclose
@timer
def test_properties():
rng = np.random.default_rng(5)
for i in range(100):
R = rng.normal(0.0, 0.3) # negative allowed
sphere = batoid.Sphere(R)
assert sphere.R == R
do_pickle(sphere)
@timer
def test_sag():
rng = np.random.default_rng(57)
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
for j in range(10):
x = rng.uniform(-0.7*abs(R), 0.7*abs(R))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R))
result = sphere.sag(x, y)
np.testing.assert_allclose(
result,
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))
)
# Check that it returned a scalar float and not an array
assert isinstance(result, float)
# Check 0,0
np.testing.assert_allclose(sphere.sag(0, 0), 0.0, rtol=0, atol=1e-17)
# Check vectorization
x = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
np.testing.assert_allclose | (
sphere.sag(x, y),
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))
)
# Make sure non-unit stride arrays also work
np.testing.assert_allclose(
sphere.sag(x[::5,::2], y[::5,::2]),
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))[::5,::2]
)
do_pickle(sphere)
@timer
de | f test_normal():
rng = np.random.default_rng(577)
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
for j in range(10):
x = rng.uniform(-0.7*abs(R), 0.7*abs(R))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R))
result = sphere.normal(x, y)
r = np.hypot(x, y)
rat = r/R
dzdr = rat/np.sqrt(1-rat*rat)
nz = 1/np.sqrt(1+dzdr*dzdr)
normal = np.array([-x/r*dzdr*nz, -y/r*dzdr*nz, nz])
np.testing.assert_allclose(result, normal)
# Check 0,0
np.testing.assert_equal(sphere.normal(0, 0), np.array([0, 0, 1]))
# Check vectorization
x = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
r = np.hypot(x, y)
rat = r/R
dzdr = rat/np.sqrt(1-rat*rat)
nz = 1/np.sqrt(1+dzdr*dzdr)
normal = np.dstack([-x/r*dzdr*nz, -y/r*dzdr*nz, nz])
np.testing.assert_allclose(
sphere.normal(x, y),
normal
)
# Make sure non-unit stride arrays also work
np.testing.assert_allclose(
sphere.normal(x[::5,::2], y[::5,::2]),
normal[::5, ::2]
)
@timer
def test_intersect():
rng = np.random.default_rng(5772)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphereCoordSys = batoid.CoordSys(origin=[0, 0, -1])
sphere = batoid.Sphere(R)
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
# If we shoot rays straight up, then it's easy to predict the intersection
vx = np.zeros_like(x)
vy = np.zeros_like(x)
vz = np.ones_like(x)
rv = batoid.RayVector(x, y, z, vx, vy, vz)
np.testing.assert_allclose(rv.z, -2*abs(R))
rv2 = batoid.intersect(sphere, rv.copy(), sphereCoordSys)
assert rv2.coordSys == sphereCoordSys
rv2 = rv2.toCoordSys(batoid.CoordSys())
np.testing.assert_allclose(rv2.x, x)
np.testing.assert_allclose(rv2.y, y)
np.testing.assert_allclose(rv2.z, sphere.sag(x, y)-1, rtol=0, atol=1e-9)
# Check default intersect coordTransform
rv2 = rv.copy().toCoordSys(sphereCoordSys)
batoid.intersect(sphere, rv2)
assert rv2.coordSys == sphereCoordSys
rv2 = rv2.toCoordSys(batoid.CoordSys())
np.testing.assert_allclose(rv2.x, x)
np.testing.assert_allclose(rv2.y, y)
np.testing.assert_allclose(rv2.z, sphere.sag(x, y)-1, rtol=0, atol=1e-9)
@timer
def test_reflect():
rng = np.random.default_rng(57721)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
vx = rng.uniform(-1e-5, 1e-5, size=size)
vy = rng.uniform(-1e-5, 1e-5, size=size)
vz = np.full_like(x, 1)
rv = batoid.RayVector(x, y, z, vx, vy, vz)
rvr = batoid.reflect(sphere, rv.copy())
rvr2 = sphere.reflect(rv.copy())
rays_allclose(rvr, rvr2)
# print(f"{np.sum(rvr.failed)/len(rvr)*100:.2f}% failed")
normal = sphere.normal(rvr.x, rvr.y)
# Test law of reflection
a0 = np.einsum("ad,ad->a", normal, rv.v)[~rvr.failed]
a1 = np.einsum("ad,ad->a", normal, -rvr.v)[~rvr.failed]
np.testing.assert_allclose(
a0, a1,
rtol=0, atol=1e-12
)
# Test that rv.v, rvr.v and normal are all in the same plane
np.testing.assert_allclose(
np.einsum(
"ad,ad->a",
np.cross(normal, rv.v),
rv.v
)[~rvr.failed],
0.0,
rtol=0, atol=1e-12
)
@timer
def test_refract():
rng = np.random.default_rng(577215)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
m0 = batoid.ConstMedium(rng.normal(1.2, 0.01))
m1 = batoid.ConstMedium(rng.normal(1.3, 0.01))
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
vx = rng.uniform(-1e-5, 1e-5, size=size)
vy = rng.uniform(-1e-5, 1e-5, size=size)
vz = np.sqrt(1-vx*vx-vy*vy)/m0.n
rv = batoid.RayVector(x, y, z, vx, vy, vz)
rvr = batoid.refract(sphere, rv.copy(), m0, m1)
rvr2 = sphere.refract(rv.copy(), m0, m1)
rays_allclose(rvr, rvr2)
# print(f"{np.sum(rvr.failed)/len(rvr)*100:.2f}% failed")
normal = sphere.normal(rvr.x, rvr.y)
# Test Snell's law
s0 = np.sum(np.cross(normal, rv.v*m0.n)[~rvr.failed], axis=-1)
s1 = np.sum(np.cross(normal, rvr.v*m1.n)[~rvr.failed], axis=-1)
np.testing.assert_allclose(
m0.n*s0, m1.n*s1,
rtol=0, atol=1e-9
)
# Test that rv.v, rvr.v and normal are all in the same plane
np.testing.assert_allclose(
np.einsum(
"ad,ad->a",
np.cross(normal, rv.v),
rv.v
)[~rvr.failed],
0.0,
rtol=0, atol=1e-12
)
@timer
def test_ne():
objs = [
batoid.Sphere(1.0),
batoid.Sphere(2.0),
batoid.Plane()
]
all_obj_diff(objs)
@timer
def test_fail():
sphere = batoid.Sphere(1.0)
rv = batoid.RayVector(0, 10, 0, 0, 0, -1) # Too far to side
rv2 = batoid.intersect(sphere, rv.copy())
np.testing.assert_equal(rv2.failed, np.array([True]))
# This one passes
rv = batoid.RayVector(0, 0, 0, 0, 0, -1)
rv2 = batoid.intersect(sphere, rv.copy())
np.testing.assert_equal(rv2.failed, np.array([False]))
if __name__ == '__main__':
test_properties()
test_sag()
test_normal()
test_intersect()
test_reflect()
test_refract()
test_ne()
test_fail()
|
forseti-security/forseti-security | google/cloud/forseti/services/model/modeller.py | Python | apache-2.0 | 3,684 | 0 | # Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Modeller API. """
from builtins import object
from google.cloud.forseti.services.model.importer import importer
from google.cloud.forseti.common.util import logger
LOGGER = logger.get_logger(__name__)
class Modeller(object):
"""Implements the Modeller API."""
def __init__(self, config):
"""Initialize
Args:
config (object): ServiceConfig in server
"""
self.config = config
def create_model(self, source, name, inventory_index_id, background):
"""Creates a model from the import source.
Args:
source (str): The source of the model, \"inventory\" or \"empty\"
name (str): Model name to instantiate.
inventory_index_id (int64): Inventory id to import from
background (bool): Whether to run the model creation in background
Returns:
object: the created data model
"""
LOGGER.info('Creating model: %s, inventory_index_id = %s',
name, inventory_index_id)
model_manager = self.config.model_manager
model_handle = model_manager.create(name=name)
LOGGER.debug('Created model_handle: %s', model_handle)
scoped_session, data_access = model_manager.get(model_handle)
readonly_session = model_manager.get_readonly_session()
def do_import():
"""Import runnable."""
with scoped_session as session, readonly_session as ro_session:
importer_cls = importer.by_source(source)
LOGGER.debug('Importer class: %s', importer_cls)
import_runner = importer_cls(
session,
ro_session,
model_manager.model(model_handle, expunge=False),
data_access,
self.config,
inventory_index_id)
import_runner.run()
if background:
LOGGER.debug('Running importer in background.')
self.config.run_in_background(do_import)
else:
LOGGER.debug('Running importer in foreground.')
do_import()
return model_manager.model(model_handle, expunge= | True)
def list_model(self):
"""Lists all models.
Returns:
list: list of Models in dao
"""
model_manager = self.config.model_manager
| return model_manager.models()
def get_model(self, model):
"""Get details of a model by name or handle.
Args:
model (str): name or handle of the model to query
Returns:
Model: db Model instance dao
"""
model_manager = self.config.model_manager
return model_manager.get_model(model)
def delete_model(self, model_name):
"""Deletes a model.
Args:
model_name (str): name of the model to be deleted
"""
LOGGER.info('Deleting model: %s', model_name)
model_manager = self.config.model_manager
model_manager.delete(model_name)
|
fredex42/gnmvidispine | gnmvidispine/vs_user.py | Python | gpl-2.0 | 5,073 | 0.008673 | from .vidispine_api import VSApi,VSException,VSNotFound
import xml.etree.ElementTree as ET
import re
from pprint import pprint
class VSUserGroup(VSApi):
def __init__(self,*args,**kwargs):
super(VSUserGroup, self).__init__(*args,**kwargs)
self.dataContent = None
def populateFromXML(self,xmlNode):
self.dataContent = xmlNode
def __unicode__(self):
return '{0} {1}'.format(self.groupName,self.description)
@property
def groupName(self):
return self._nodeContentOrNone('groupName')
@property |
def description(self):
return self._nodeContentOrNone('description')
@property
def role(self):
#FIXME: not sure of schema under this node. might need more processing.
return self._nodeContentOrNone('role')
#return a dict of metadata
@property
def metadata(self):
ns = "{http://xml.vidispine.com/schema/vidispine}"
rtn = {} |
try:
for n in self.dataContent.find('{0}metadata'.format(ns)):
try:
key = n.find('{0}key'.format(ns))
val = n.find('{0}value'.format(ns))
rtn[key] = val
except:
continue
except:
pass
return rtn
@property
def originSite(self):
return self._nodeContentOrNone('origin')
class VSUser(VSApi):
def __init__(self,*args,**kwargs):
super(VSUser, self).__init__(*args,**kwargs)
self.dataContent = None
self.groupList = None
class NotPopulatedError(Exception):
pass
def populate(self,username):
if re.search(r'[\/?;]',username):
raise ValueError
response = self.request("/user/" + username)
self.populateFromXML(response)
def populateFromXML(self,xmlNode):
self.dataContent = xmlNode
self._populateGroupList()
def _populateGroupList(self):
ns = "{http://xml.vidispine.com/schema/vidispine}"
groupListNode = self.dataContent.find('{0}groupList'.format(ns))
#pprint(groupListNode)
if groupListNode is None:
return
if self.groupList is None:
self.groupList = []
for groupDef in groupListNode:
#ET.dump(groupDef)
g = VSUserGroup(self.host,self.port,self.user,self.passwd)
g.populateFromXML(groupDef)
self.groupList.append(g)
def __unicode__(self):
return '{0} ({1})'.format(self.userName,self.originSite)
def dump(self):
#ET.dump(self.dataContent)
print("\tUser name: %s" % self.userName)
print("\tReal name: %s" % self.realName)
print("\tOrignating site: %s" % self.originSite)
if self.groupList is not None:
print("\tGroup memberships:")
for g in self.groupList:
print("\t\t%s" % str(g))
#g.dump()
@property
def userName(self):
return self._nodeContentOrNone('userName')
@property
def realName(self):
return self._nodeContentOrNone('realName')
#return a dict of metadata
@property
def metadata(self):
ns = "{http://xml.vidispine.com/schema/vidispine}"
rtn = {}
try:
for n in self.dataContent.find('{0}metadata'.format(ns)):
try:
key = n.find('{0}key'.format(ns))
val = n.find('{0}value'.format(ns))
rtn[key] = val
except:
continue
except:
pass
return rtn
@property
def originSite(self):
return self._nodeContentOrNone('origin')
@property
def accountDisabled(self):
try:
return self.dataContent.attrib['disabled']
except:
return None
@property
def groups(self):
if self.groupList is None:
return
for g in self.groupList:
yield g
def isMemberOfGroup(self,groupname,caseSensitive=False):
if self.groupList is None:
return
if caseSensitive:
for g in self.groupList:
if g.groupName == groupname:
return True
else:
for g in self.groupList:
if g.groupName.lower() == groupname.lower():
return True
return False
def getAllUsers(pageSize=10,*args,**kwargs):
api = VSApi(*args,**kwargs)
ns = "{http://xml.vidispine.com/schema/vidispine}"
userRecord = api.request("/user",query={'first': 1,'number': 1})
nHits = int(userRecord.find('{0}hits'.format(ns)).text)
u = VSUser(*args,**kwargs)
u.populateFromXML(userRecord)
yield u
for n in range(2,nHits,pageSize):
response = api.request("/user",query={'first': n,'number': pageSize})
for userRecord in response.findall('{0}user'.format(ns)):
u = VSUser(*args,**kwargs)
u.populateFromXML(userRecord)
yield u |
ktbyers/scp_sidecar | ansible_modules/cisco_file_transfer.py | Python | apache-2.0 | 3,025 | 0.002975 | #!/usr/bin/python
"""Ansible module to transfer files to Cisco IOS devices."""
from ansible.module_utils.basic import *
from netmiko import ConnectHandler, FileTransfer
def main():
"""Ansible module to transfer files to Cisco IOS devices."""
module = AnsibleModule(
argument_spec=dict(
host=dict(type='str', required=True),
port=dict(type='int', default=22, required=False),
username=dict(type='str', required=True),
password=dict(type='str', required=True, no_log=True),
source_file=dict(type='str', required=True),
dest_file=dict(type='str', required=True),
dest_file_system=dict(type='str', required=False, default='flash:'),
enable_scp=dict(type='bool', required=False, default=False),
overwrite=dict(type='bool', required=False, default=True),
),
supports_check_mode=True
)
net_device = {
'device_type': 'cisco_ios',
'ip': module.params['host'],
'username': module.params['username'],
'password': module.params['password'],
'port': int(module.params['port']),
'verbose': False,
}
ssh_conn = ConnectHandler(**net_device)
| source_file = module.params['source_file']
dest_file = module.params['dest_file']
dest_file_system = module.params['dest_file_system']
enable_scp = module.boolean(module.params['enable_scp'])
overwrite = module.boolean(module.params['overwrite'])
check_mode = module.check_mode
scp_changed = False
with FileTransfer(ssh_conn, source_file, dest_file, file_system=dest_file_system) as scp_transfer:
# Check if file already exists and has | correct MD5
if scp_transfer.check_file_exists() and scp_transfer.compare_md5():
module.exit_json(msg="File exists and has correct MD5", changed=False)
if not overwrite and scp_transfer.check_file_exists():
module.fail_json(msg="File already exists and overwrite set to false")
if check_mode:
if not scp_transfer.verify_space_available():
module.fail_json(msg="Insufficient space available on remote device")
module.exit_json(msg="Check mode: file would be changed on the remote device",
changed=True)
# Verify space available on remote file system
if not scp_transfer.verify_space_available():
module.fail_json(msg="Insufficient space available on remote device")
# Transfer file
if enable_scp:
scp_transfer.enable_scp()
scp_changed = True
scp_transfer.transfer_file()
if scp_transfer.verify_file():
if scp_changed:
scp_transfer.disable_scp()
module.exit_json(msg="File successfully transferred to remote device",
changed=True)
module.fail_json(msg="File transfer to remote device failed")
if __name__ == "__main__":
main()
|
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/elan/Certified_Build.py | Python | gpl-3.0 | 2,028 | 0.0143 | def Send_Certified_By_Automation():
import smtplib
from elan import ElanSettings
import os
from elan.ElanSettings import Script_Runner_Log
def send_email(subject,
body,
user='kennyshay123test@gmail.com',
pwd='corebrands123',
recipient = ["kennyshay123@gmail.com","elantestertools@gmail.com"]):
#recipient = ["kennyshay123@gmail.com","ben.bickell@corebrands.com","elantestertools@gmail.com"]):
gmail_user = user
gmail_pwd = pwd
FROM = user
TO = recipient if type(recipient) is list else [recipient]
SUBJECT = subject
TEXT = body
# Prepare actual message
message = """From: %s\nTo: %s\nSubject: %s\n\n%s
""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
server = smtplib.SMTP("smtp.gmail.com", 587)
#server.ehlo()
server.starttls()
server.login(gmail_user, gmail_pwd)
server.sendmail(FROM, TO, message)
server.close()
print 'successfully sent the mail'
| #######################################################BUild number
with open(ElanSettings.Build_File_List) as f:
Elan_Build_List = f.readlines()
Elan_Build_List = [x.strip() for x in Elan_Build_List]
Elan_Build_List.sort()
try:
Elan_Build = Elan_Build_List[-1]
except:
Elan_Build = 'None'
#print("Build->" + Elan_Build)
##########################################################
|
with open(Script_Runner_Log, 'r') as myfile:
body_log=myfile.read()
bodylogList = body_log.split('\n')
bodylogList = bodylogList[::-1]
body_log = "\n".join(str(x) for x in bodylogList)
body_Text = body_log + '\n' + str(os.environ['COMPUTERNAME'])
compName = str(os.environ['COMPUTERNAME'])
astring = "CERTIFIED ( " + Elan_Build + ' ) by ' + compName
send_email(astring,"\n" + body_Text)
Send_Certified_By_Automation() |
canvasnetworks/canvas | website/canvas/after_signup.py | Python | bsd-3-clause | 698 | 0.008596 | import urllib
from canvas import util
def make_cookie_key(key):
return 'after_signup_' + str(key)
def _get(request, key):
key = make_cookie_key(key)
val = request.COOKIES.get(key)
if val is not N | one:
val = util.loads(urllib.unquote(val))
return (key, val,)
def get_posted_comment(request):
'''
Gets a comment waiting to be posted, if one exists.
Returns a pair containing the cookie key used to retrieve it and its deserialized JSON.
'''
#TODO use dcramer's django-cookies so that we don't rely on having the response object to mutate cookies.
# That would make this API much cleaner | and isolated.
return _get(request, 'post_comment')
|
jianglu/mojo | mojo/tools/mojo_shell.py | Python | bsd-3-clause | 446 | 0.017937 | #!/usr/bin/env python
# Copyright 2014 The C | hromium Authors. All rights reser | ved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
def main():
print 'Good news, the shell runner has moved! Please use: '
print ''
print ' mojo/devtools/common/mojo_shell'
print ''
print 'as you would use mojo_shell.py before.'
return -1
if __name__ == "__main__":
sys.exit(main())
|
MisterTofu/scrapy-midleware | middleware/TorProxy.py | Python | gpl-3.0 | 1,353 | 0.000739 | # Source:
# http://stackoverflow.com/questions/21839676/how-to-write-a-downloadhandler-for-scrapy-that-makes-requests-through-socksipy
from txsocksx.http import SOCKS5Agent
from scrapy.core.downloader.handlers.http11 import HTTP11DownloadHandler, ScrapyAgent
from twisted.internet import reactor
from scrapy.xlib.tx import TCP4ClientEndpoint
from scrapy.conf import settings
# pylint: disable=C0301
class TorProxyDownloadHandler(HTTP11DownloadHandler):
def download_request(self, request, spider):
"""Return a deferred for the HTTP download"""
agent = ScrapyTorAgent(contextFactory=self._contextFactory,
pool=self._pool)
return agent.download_request(re | quest)
class ScrapyTorAgent(ScrapyAgent):
def _get_agent(self, request, timeout):
bindaddress = request.meta.get('bindaddress') or self._bindAddress
proxy = request.meta.get('proxy')
self.use_tor = settings['TOR']
self.tor_server = settings['TOR_SERVER']
self.tor_port = settings['TOR_PORT']
proxyEndpoint = TCP4ClientEndpoint(reactor, self.tor_se | rver,
self.tor_port, timeout=timeout,
bindAddress=bindaddress)
agent = SOCKS5Agent(reactor, proxyEndpoint=proxyEndpoint)
return agent
|
jeremiah-c-leary/vhdl-style-guide | vsg/rules/generic/rule_002.py | Python | gpl-3.0 | 575 | 0 |
from vsg.rules import token_indent
from vsg import token
lTokens = []
lTokens.append(token.generic_clause.generic_keyword)
class rule_002(token_indent):
'''
This rule ch | ecks the indent of the **generic** keyword.
**Violation**
.. code-block:: vhdl
entity fifo is
generic (
entity fifo is
generic (
**Fix**
.. co | de-block:: vhdl
entity fifo is
generic (
entity fifo is
generic (
'''
def __init__(self):
token_indent.__init__(self, 'generic', '002', lTokens)
|
scality/scality-manila-utils | setup.py | Python | apache-2.0 | 663 | 0 | # Copyright (c) 2015 Scality SA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain | a copy of the License at
# |
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['pbr', ],
pbr=True,
)
|
Rctue/nao-lib | nao_temp.py | Python | gpl-2.0 | 64,405 | 0.01784 | ## Nao functions version 1.41
## change log:
## 1.02: Added class "Region"
## 1.02: Resolution stuff.
## 1.03: Detect() now returns an object of the class "Region()"
## 1.04: Added Aldebarans face detection NaoFaceLocation().
## 1.05: Added the gesture() function and EyeLED() function.
## 1.06: Now able to look for the correct haarcascade file within the pythonpath
## 1.07: Changed Track() function to better support different frame rates
## 1.08: Added ALTrack function
## 1.09: Added second gesture in Gesture()
## 1.10: Added InitPose
## 1.11: Added Move
## 1.12: Added Crouch
## 1.13: Removed Gesture(), instead use the gesture lib. Changed comments for Move()
## 1.14: Added Play() function for playing sound files
## 1.15: Added Record() function
## 1.16: Added WalkTo function
## 1.17: Added PlaySine function
## 1.18: Added function FindFace()
## 1.19: Added RunMovement() (19-09-2011 - Turin)
## 1.20: Added Stiffen() for stiffening the joints
## 1.21: Added RunLed() for running led scripts
## 1.22: GetAvailableLEDPatterns() and GetAvailableGestures() added.
## 1.23: speechProxy added
## 1.24: File existence check added in RunMovement, RunLed, RunSpeech
## 1.25: Fixed remove = remove.reverse() returning None error
## 1.26: Added InitSpeech() and DetectSpeech()
## 1.27: GetAvailableDialogs() added.
## 1.28: Added LoadDialog()
## 1.29: Changed searchpaths of RunLED, RunMovement and RunSpeech to include /led, /gestures and /tts subfolders, respectively.
## 1.30: Added possibility of sending port number to InitProxy
## 1.31: Added better error handling in several functions and made posting of text optional.
## 1.32: RunLED changed to read files with ; as delimiter and to deal with multi-line led-files
## 1.33: LoadDialog() reads files with ; as delimiter
## 1.34: Added functions MoveHead() to move nao's head and GetYaw() to request the yaw of nao's head
## 1.35: Added functions SetTTSVolume() and GetTTSVolume() for checking and controlling the volume of the Text to Speech
## 1.36: Added functions SetMusicVolume() and GetMusicVolume() for checking and controlling the volume of the Music
## 1.37: Updated FindFace to include arbitrary | offset and gain. Default changed to up -0.2.
## 1.38: Speed of GetImage() improved. Removed dependency on Python Image Library
## 1.39: Removed "from naoqi import xxx" statements.
## 1.40: Added ALRobotPosture proxy, GoToPosture and proper InitPose() and Crouch(); InitProxy rewritten
## 1.41: Added Landmark detection, Sound localization and So | und detection
import numpy as np
import cv2
from time import time
from time import sleep
#import Image
import random
import math
import sys
import os
import csv
import naoqi
from collections import deque
__naoqi_version__='2.1'
__nao_module_name__ ="Nao Library"
__version__='2.0'
gftt_list = list() # initialize good features to track for opencv
fast = 0 # initiliaze face detection state for opencv
time_q = deque([1,1,1,1,1,1,1,1,1,1])
old_time = time()
time_old_track = time()
#font = cv2.InitFont(cv2.FONT_HERSHEY_TRIPLEX, 0.5, 0.5, 0.0, 1)
## Find the *.xml file for face detection.
list_path = sys.path
for i in range (0,len(list_path)):
if os.path.exists(list_path[i]+"/haarcascade_frontalface_alt2.xml"):
break
#cascade_front = cv.Load(list_path[i]+"/haarcascade_frontalface_alt2.xml")
interpol_time=0.3
start_mov_t = time()
weights = list()
existence = list()
id_pose = None
alface_subscribed = False
xtargetold = 0
ytargetold = 0
class ResolutionCamera:
def __init__(self):
self.low = 0
self.medium = 1
self.high = 2
self.very_high=3
self.res_160x120 = 0 #kQQVGA
self.res_320x240 = 1 #kQVGA
self.res_640x480 = 2 #kVGA
self.res_1280x960 = 3 #k4VGA
self.resolutionar = [160,120],[320,240],[640,480],[1280,960]
self.framerate=30
resolution = ResolutionCamera()
class Region:
def __init__(self):
self.x = 0
self.y = 0
self.width = 0
self.height = 0
def Say(text, POST=True):
global tts
#print text
try:
#volume=GetTTSVolume()
#SetTTSVolume(0.99)
if POST:
tts.post.say(text)
else:
tts.say(text)
#SetTTSVolume(volume)
except NameError:
print 'ALTextToSpeech proxy undefined. Are you running a simulated naoqi?'
def HeadTouch():
head_touch = memoryProxy.getData("Device/SubDeviceList/Head/Touch/Front/Sensor/Value", 0)
return head_touch
#################################################################################
## Use this function, InitProxy, to initialise the proxy. As an argument give up
## the Ip of Nao
#################################################################################
def ConnectProxy(proxy_name, IP, PORT):
theProxy=None
try:
theProxy = naoqi.ALProxy(proxy_name, IP, PORT)
sleep(0.01)
except RuntimeError as e:
print "Error when creating ", proxy_name ," proxy:"
print str(e)
return theProxy
def InitProxy(IP="marvin.local", proxy=[0], PORT = 9559):
"""proxy: (list) 1->TTS, 2->audio, 3->motion, 4->memory, 5->face, 6->video, 7->LED's, 8->Track, 9->Speech, 10->Audioplayer, 11->VisionToolbox"""
global audioProxy
global motionProxy
global memoryProxy
global cameraProxy
global faceProxy
global ledProxy
global tts
global trackfaceProxy
global playProxy
global videoProxy
global asr
global speechProxy # same as asr for backward compatibility
global sonarProxy
global postureProxy
global landmarkProxy
global ALModuleList
global proxyDict
ALModuleList=["ALTextToSpeech","ALAudioDevice","ALMotion","ALMemory","ALFaceDetection","ALVideoDevice","ALLeds","ALFaceTracker","ALSpeechRecognition","ALAudioPlayer","ALVideoRecorder","ALSonar","ALRobotPosture","ALLandMarkDetection","ALSoundDetection","ALAudioSourceLocalization"]
proxyDict={}
#proxyList=[None]*(len(ALModuleList))
# check if list is empty
if len(proxy)==0:
proxy=range(1, len(ALModuleList)+1)
else:
#if not check whether it contains a 0
if 0 in proxy:
proxy=range(1, len(ALModuleList)+1)
for i in proxy:
proxyDict[ALModuleList[i-1]]=ConnectProxy(ALModuleList[i-1],IP, PORT)
#define globals
tts=proxyDict["ALTextToSpeech"]
audioProxy=proxyDict["ALAudioDevice"]
motionProxy=proxyDict["ALMotion"]
memoryProxy=proxyDict["ALMemory"]
faceProxy=proxyDict["ALFaceDetection"]
cameraProxy=proxyDict["ALVideoDevice"]
ledProxy=proxyDict["ALLeds"]
trackfaceProxy=proxyDict["ALFaceTracker"]
asr=proxyDict["ALSpeechRecognition"]
speechProxy=asr # for backward compatibility
playProxy=proxyDict["ALAudioPlayer"]
videoProxy=proxyDict["ALVideoRecorder"]
sonarProxy=proxyDict["ALSonar"]
postureProxy=proxyDict["ALRobotPosture"]
landmarkProxy=proxyDict["ALLandMarkDetection"]
soundProxy=proxyDict["ALSoundDetection"]
soundsourceProxy=proxyDict["ALAudioSourceLocalization"]
def InitSonar(flag=1):
#period = 100
#precision = 0.1
if flag:
#sonarProxy.subscribe("test4", period , precision )
sonarProxy.subscribe("test4" )
else:
try:
sonarProxy.unsubscribe("test4" )
flag=0
except:
print "Sonar already unsubscribed"
flag=0
return flag
#################################################################################
## Use this function, CloseProxy, to close the proxy. As an argument give up
## the Ip of Nao
#################################################################################
def CloseProxy(proxy=[0]):
"""proxy: (list) 1->TTS, 2->audio, 3->motion, 4->memory, 5->face, 6->video, 7->LED's, 8->Track, 9->Speech, 10->Audioplayer, 11->VisionToolbox"""
global ALModuleList
gl |
grplyler/netcmd | netcmd_actions.py | Python | gpl-2.0 | 231 | 0.008658 | __author | __ = 'ryanplyler'
def sayhi(config):
error = None
try:
server_output = "Executing action 'sayhi()'"
response = "HI THERE!"
except:
error = 1
return | server_output, response, error
|
liujuan118/Antiphishing | label_image.py | Python | gpl-3.0 | 1,615 | 0.013622 | import tensorflow as tf, sys
def label_iamge(image_path):
# image_path = sys.argv[1]
# Read in the image_data
image_data = tf.gfile.FastGFile(image_path, 'rb').read()
# Loads label file, strips off carriage return
label_lines = [line.rstrip() for line
in t | f.gfile.GFile("/files/retrained_labels.txt")]
# Unpersists graph from file
with tf.gfile.FastGFile("/files/retrained_graph.pb", 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
with tf.Session() as sess:
# | Feed the image_data as input to the graph and get first prediction
softmax_tensor = sess.graph.get_tensor_by_name('final_result:0')
predictions = sess.run(softmax_tensor, \
{'DecodeJpeg/contents:0': image_data})
# Sort to show labels of first prediction in order of confidence
top_k = predictions[0].argsort()[-len(predictions[0]):][::-1]
for node_id in top_k:
human_string = label_lines[node_id]
score = predictions[0][node_id]
print('%s (score = %.5f)' % (human_string, score))
max_str = label_lines[top_k[0]]
max_score = predictions[0][top_k[0]]
# print('liu: max_str:%s, max_score:%s'%(max_str, max_score))
with open('label_image.txt', 'a') as f:
f.write('\n%s,%s,%s' % (image_path, max_str, max_score))
f.close()
return image_path, max_str, max_score
if __name__ == "__main__":
image_path, max_str, max_score = label_iamge('test.jpg')
# print(image_path, max_str)
|
yamateh/robotframework | src/robot/utils/robotinspect.py | Python | apache-2.0 | 1,085 | 0 | # Copyright 2008-2013 Nokia Siemens Networks Oyj
#
# L | icensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with | the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
if sys.platform.startswith('java'):
from org.python.core import PyReflectedFunction, PyReflectedConstructor
def is_java_init(init):
return isinstance(init, PyReflectedConstructor)
def is_java_method(method):
func = method.im_func if hasattr(method, 'im_func') else method
return isinstance(func, PyReflectedFunction)
else:
def is_java_init(init):
return False
def is_java_method(method):
return False
|
eduNEXT/edunext-platform | import_shims/studio/third_party_auth/tests/specs/test_google.py | Python | agpl-3.0 | 449 | 0.008909 | """Deprecated import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-p | osition,wildcard-import,useless-suppression,line-too-long
from import_shims.warn import warn_deprecated_import
warn_deprecated | _import('third_party_auth.tests.specs.test_google', 'common.djangoapps.third_party_auth.tests.specs.test_google')
from common.djangoapps.third_party_auth.tests.specs.test_google import *
|
wangyang59/tf_models | video_prediction/prediction_input_flo_chair_old.py | Python | apache-2.0 | 4,801 | 0.008956 | # Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code for building the input for the prediction model."""
import os
import numpy as np
import tensorflow as tf
from tensorflow.python.platform import flags
from tensorflow.python.platform import gfile
import random
DATA_DIR = '/home/wangyang59/Data/ILSVRC2016_tf_chair'
#DATA_DIR = '/home/wangyang59/Data/ILSVRC2016_tf_stab/train'
FLAGS = flags.FLAGS
# Original image dimensions
ORIGINAL_WIDTH = 512
ORIGINAL_HEIGHT = 384
COLOR_CHAN = 3
def build_tfrecord_input(training=True, blacklist=[], num_epochs=None):
"""Create input tfrecord tensors.
Args:
training: training or validation data.
Returns:
list of tensors corresponding to images, actions, and states. The images
tensor is 5D, batch x time x height x width x channels. The state and
action tensors are 3D, batch x time x dimension.
Raises:
RuntimeError: if no files found.
"""
filenames = gfile.Glob(os.path.join(FLAGS.data_dir, '*'))
filenames = filter(lambda x: x.split("/")[-1] not in blacklist, filenames)
if not filenames:
raise RuntimeError('No data files found.')
index = int(np.floor(FLAGS.train_val_split * len(filenames)))
if training:
filenames = filenames[:index]
else:
filenames = filenames[index:]
filename_queue = tf.train.string_input_producer(filenames, shuffle=False, num_epochs=num_epochs)
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = {"image1_raw": tf.FixedLenFeature([1], tf.string),
"image2_raw": tf.FixedLenFeature([1], tf.string),
"flo": tf.FixedLenFeature([1], tf.string)}
features = tf.parse_single_example(serialized_example, features=features)
image1_buffer = tf.reshape(features["image1_raw"], shape=[])
image1 = tf.image.decode_jpeg(image1_buffer, channels=COL | OR_CHAN)
image1.set_shape([ORIGINAL_HEIGHT, ORIGINAL_WIDTH, COLOR_CHAN])
image1 = tf.cast(image1, tf.float32) / 255.0
image2_buffer = tf.reshape(features["image2_raw"], shape=[])
image2 = tf.image.decode_jpeg(image2_buffer, channels=COLOR_CHAN)
im | age2.set_shape([ORIGINAL_HEIGHT, ORIGINAL_WIDTH, COLOR_CHAN])
image2 = tf.cast(image2, tf.float32) / 255.0
flo = tf.decode_raw(features['flo'], tf.float32)
flo = tf.reshape(flo, [ORIGINAL_HEIGHT, ORIGINAL_WIDTH, 2])
if training:
images = tf.concat([image1, image2], axis=2)
images = tf.image.random_flip_left_right(images)
images = tf.image.random_flip_up_down(images)
images = tf.cond(tf.random_uniform([]) < 0.5, lambda: tf.image.rot90(images, 2), lambda: images)
images. set_shape([ORIGINAL_HEIGHT, ORIGINAL_WIDTH, COLOR_CHAN*2])
image1, image2 = tf.split(axis=2, num_or_size_splits=2, value=images)
# if random.random() < 0.5:
# image1 = tf.image.flip_left_right(image1)
# image2 = tf.image.flip_left_right(image2)
#
# if random.random() < 0.5:
# image1 = tf.image.flip_up_down(image1)
# image2 = tf.image.flip_up_down(image2)
# if random.random() < 0.5:
# image1 = tf.image.rot90(image1, 2)
# image2 = tf.image.rot90(image2, 2)
# brightness = random.gauss(0, 0.2)
# image1 = tf.clip_by_value(image1+brightness, 0.0, 1.0)
# image2 = tf.clip_by_value(image2+brightness, 0.0, 1.0)
# contrast = random.random()*1.6 + 0.2
# image1 = tf.image.adjust_contrast(image1, contrast)
# image2 = tf.image.adjust_contrast(image2, contrast)
#
# gamma = random.random()*0.6 + 0.7
# image1 = tf.image.adjust_gamma(image1, gamma)
# image2 = tf.image.adjust_gamma(image2, gamma)
if training:
image_batch = tf.train.shuffle_batch(
[image1, image2, flo],
FLAGS.batch_size,
num_threads=FLAGS.batch_size,
capacity=100 * FLAGS.batch_size,
min_after_dequeue=50 * FLAGS.batch_size,
enqueue_many=False)
else:
image_batch = tf.train.batch(
[image1, image2, flo],
FLAGS.batch_size / FLAGS.num_gpus,
#num_threads=FLAGS.batch_size / FLAGS.num_gpus,
num_threads=1,
capacity=10 * FLAGS.batch_size,
#min_after_dequeue=5 * FLAGS.batch_size,
enqueue_many=False)
return image_batch
|
kmatheussen/radium | bin/old/X11_MenuDialog.py | Python | gpl-2.0 | 1,375 | 0.055273 |
import sys,os,string
def GFX_MenuDialog(filename,*items):
file=open(filename,'w')
file.writelines(map(lambda x:x+"\n", items))
file.close()
os.system("python X11_MenuDialog.py "+filename);
if __name__=="__main__":
import qt,string
class WidgetView ( qt.QWidget ):
def __init__( self, *args ):
apply( qt.QWidget.__init__, (self,) + args )
self.topLayout = qt.QVBoxLayout( self, 10 )
self.grid = qt.QGridLayout( 0, 0 )
self.topLayout.addLayout( self.grid, 10 )
# Create a list box
self.lb = qt.QListBox( self, "listBox" )
file=open(sys.argv[1],'r')
self.dasitems=map(lambda x:string.rstrip(x),file.readlines())
file.close()
self.setCaption(self.dasitems.pop(0))
for item in self.dasitems:
self.lb.insertItem(item)
self.grid.addMultiCellWidget( self.lb, 0, 0, 0, 0 )
self.connect( self.lb, qt.SIGNAL("selected(int)"), self.listBoxItemSelected )
self.topLayout.activate()
def listBoxIte | mSelected( self, index ) | :
txt = qt.QString()
txt = "List box item %d selected" % index
print txt
file=open(sys.argv[1],'w')
file.write(self.dasitems[index])
file.close();
a.quit()
a = qt.QApplication( sys.argv )
w = WidgetView()
a.setMainWidget( w )
w.show()
a.exec_loop()
|
polyaxon/polyaxon | core/polyaxon/polypod/compiler/__init__.py | Python | apache-2.0 | 2,209 | 0 | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You m | ay obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License | is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional
from polyaxon.polyflow import V1CompiledOperation
from polyaxon.polypod.compiler import converter, resolver
def make(
owner_name: str,
project_name: str,
project_uuid: str,
run_name: str,
run_uuid: str,
run_path: str,
compiled_operation: V1CompiledOperation,
params: Optional[Dict],
default_sa: str = None,
internal_auth: bool = False,
default_auth: bool = False,
converters: Dict[str, Any] = converter.CORE_CONVERTERS,
):
resolver_obj, compiled_operation = resolver.resolve(
compiled_operation=compiled_operation,
owner_name=owner_name,
project_name=project_name,
project_uuid=project_uuid,
run_name=run_name,
run_path=run_path,
run_uuid=run_uuid,
params=params,
)
return converter.convert(
namespace=resolver_obj.namespace,
owner_name=resolver_obj.owner_name,
project_name=resolver_obj.project_name,
run_name=resolver_obj.run_name,
run_path=resolver_obj.run_path,
run_uuid=resolver_obj.run_uuid,
compiled_operation=compiled_operation,
connection_by_names=resolver_obj.connection_by_names,
internal_auth=internal_auth,
artifacts_store=resolver_obj.artifacts_store,
secrets=resolver_obj.secrets,
config_maps=resolver_obj.config_maps,
polyaxon_sidecar=resolver_obj.polyaxon_sidecar,
polyaxon_init=resolver_obj.polyaxon_init,
default_sa=default_sa,
converters=converters,
default_auth=default_auth,
)
|
httpdss/pinax-mobility | mobility/conf/settings.py | Python | mit | 1,051 | 0.002854 | from django.conf import settings
# PLEASE: Don't change anything here, use your site settings.py
USER_AGENTS = {
'jqtouch': r'AppleWebKit/.*Mobile/',
}
USER_AGENTS.update(getattr(settings, 'MOBILITY_USER_AGENTS', {}))
TEMPLATE_MAPPING = {
'index': ('index_template', 'index.html'),
'display_login_form': (' | login_template', 'login.html'),
'app_index': ('app_index_template', 'app_index.html'),
'render_change_form': ('change_form_template', 'change_form.html'),
'changelist_vi | ew': ('change_list_template', 'change_list.html'),
'delete_view': ('delete_confirmation_template', 'delete_confirmation.html'),
'history_view': ('object_history_template', 'object_history.html'),
'logout': ('logout_template', 'registration/logged_out.html'),
'password_change': ('password_change_template', 'registration/password_change_form.html'),
'password_change_done': ('password_change_done_template', 'registration/password_change_done.html'),
}
TEMPLATE_MAPPING.update(getattr(settings, 'MOBILITY_TEMPLATE_MAPPING', {}))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.