repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
patrickbeeson/diy-trainer
|
diytrainer/guides/urls.py
|
Python
|
mit
| 651
| 0.003072
|
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.generic import TemplateView
from . import views
urlpatterns = patterns('',
url(
regex=r'
|
^(?P<guide_version>\d+)/$',
view=views.EmailSignUpCreateView.as_view(),
name='email_signup'
),
url(
regex=r'^(?P<guide_version>\d+)/feedback/$',
view=views.FeedbackCreateView.as_view(),
name='guide_feedback'
),
url(
regex=r'^(?P<guide_version>\d+)/feedback/thanks/
|
$',
view=TemplateView.as_view(template_name='guides/feedback_submitted.html'),
name='guide_feedback_thanks'
)
)
|
Fisiu/calendar-oswiecim
|
webapp/calendars/migrations/0005_auto_20150718_1537.py
|
Python
|
agpl-3.0
| 451
| 0.002217
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('calendars', '0004_auto_20150718_1529'),
]
operations = [
migrations.AlterField(
model_name='event',
name='end_time',
field=models.DateTimeField(bl
|
ank=True, verbose_name='Koniec wydarzenia', nu
|
ll=True),
),
]
|
SCOAP3/scoap3
|
rawtext_search.py
|
Python
|
gpl-2.0
| 9,093
| 0.00176
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014, 2015 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import re
class RawTextSearch:
def __init__(self, searchstring, normal_search_delimiter="'",
regex_search_delimiter="/"):
self.searchstring = searchstring
self.normal_search_delimiter = normal_search_delimiter
self.regex_search_delimiter = regex_search_delimiter
self.operators = []
self.operators.append((" ", "OR"))
self.operators.append(("OR", "OR"))
self.operators.append(("-", "NEG"))
self.operators.append((self.normal_search_delimiter, "none"))
self.operators.append((self.regex_search_delimiter, "none"))
self.operators.append(("(", "LBR"))
self.operators.append((")", "RBR"))
self.rules = [
("NEGT", ("NEG", "ST")),
("ST", ("NEGT",), ("LBR", "OR", "RBR"), ("LBR", "ST", "RBR")),
("OR", ("ST", "OR", "ST"), ("OR", "OR", "ST"), ("ST", "OR", "OR"),
("OR", "OR", "OR"))
]
self.pick_rules = {
("NEGT",): (0,),
("NEG", "ST"): (1,),
("LBR", "OR", "RBR"): (1,),
("LBR", "ST", "RBR"): (1,),
("ST", "OR", "ST"): (0, 2),
("OR", "OR", "ST"): (0, 2),
("ST", "OR", "OR"): (0, 2),
("OR", "OR", "OR"): (0, 2)
}
self.operator_actions = {
"ST": self._st_action,
"OR": self._or_action,
"NEGT": self._neg_action
}
self.raw_splits = self._split()
self.assigned_splits = self._assign_meanings(self.raw_splits)
self.searchtree = self._build_search_tree(self.assigned_splits)
def _st_action(self, results):
return results[0]
def _or_action(self, results):
val = False
for result in results:
val = val or result
return val
def _neg_action(self, results):
return not results[0]
# SPLIT THE SEARCH INTO ATOMIC PARTS
def _get_operator_length(self, operator, index):
if operator[0] == self.normal_search_delimiter:
next_pos = self.searchstring.find(self.normal_search_delimiter,
index+1)
return next_pos-index+len(operator[0])
elif operator[0] == self.regex_search_delimiter:
next_pos = self.searchstring.find(self.regex_search_delimiter,
index+1)
return next_pos-index+len(operator[0])
else:
return len(operator[0])
def _operator_length(self, index):
operator_length = 0
for operator in self.operators:
if operator[0] == self.searchstring[index:index+len(operator[0])]:
return self._get_operator_length(operator, index)
return operator_length
def _split(self):
splits = []
i = 0
while i < len(self.searchstring):
operator_length = self._operator_length(i)
if operator_length == 0:
raise Exception(('Infinite loop while splitting the search '
'string. Check your search_delimiters '
'or the search string.'))
splits.append(self.searchstring[i:i+operator_length])
i += operator_length
for i in range(len(splits)):
if splits[i] == " ":
splits[i] = "OR"
return splits
# ASSIGN MEANING TO THE ATOMIC PARTS
def _assign_meanings(self, splits):
new_splits = []
for i in range(len(splits)):
searchterm = True
for operator in self.operators:
if splits[i] == operator[0]:
searchterm = False
new_splits.append((operator[1], splits[i]))
if searchterm:
new_splits.append(("ST", splits[i]))
return new_splits
#BUILD THE SEARCH TREE
def _fits_rule(self, subrule, index, new_splits):
fits = True
for i in range(len(subrule)):
try:
if subrule[i] != new_splits[index+i][0]:
fits = False
break
except Exception:
fits = False
break
return fits
def _combine(self, rule, subrule, index, new_splits):
tmp = new_splits
new_tuple_list = [rule[0]]
picks = self.pick_rules[subrule]
for i in picks:
new_tuple_list.append(new_splits[index+i])
for i in range(len(subrule)):
del(tmp[index])
tmp.insert(index, tuple(new_tuple_list))
return tmp
def _build_search_tree(self, splits):
new_splits = splits
start_over = False
while len(new_splits) != 1:
start_over = False
for rule in self.rules:
for subrule in rule[1:]:
for i in range(len(new_splits)):
if self._fits_rule(subrule, i, new_splits):
new_splits = self._combine(rule,
subrule,
i,
new_splits)
start_over = True
break
if start_over:
break
if start_over:
break
if not start_over:
raise Exception
return new_splits[0]
#PRINT TREE
def _print_tree(self, new_split, indentation_level=0):
indentation = ''
for i in range(indentation_level):
indentation += "\t"
print indentation + new_split[0]
for split in new_split[1:]:
if isinstance(split, (list, tuple)):
self._print_tree(split, indentation_level+1)
else:
print indentation + '\t' + split
def print_tree(self, indentation_level=0):
indentation = ''
for i in range(indentation_level):
indentation += "\t"
print indentation + self.searchtree[0]
for split in self.searchtree[1:]:
if isinstance(split, (list,
|
tuple)):
self._print_tree(split, indentation_level+1)
else:
print indentation + '\t' + split
#SEARCH STUFF
def _is_regex(self, searchterm):
if searchterm.startswith(self.regex_search_delimiter):
return True
return False
def _get_active_operator(self, operator_string):
return self.operator_actions[operator_string]
def _clean_searchterm(self, searchterm):
if searchterm.startswith(self.normal_s
|
earch_delimiter):
return searchterm.replace(self.normal_search_delimiter, '')
if searchterm.startswith(self.regex_search_delimiter):
return searchterm.replace(self.regex_search_delimiter, '')
return searchterm
def _perform_search(self, raw_text, searchterm):
if not self._is_regex(searchterm):
searchterm = self._clean_searchterm(searchterm)
return searchterm in raw_text
else:
searchterm = self._clean_searchterm(searchterm)
pattern = re.compile(searchterm, re.IGNORECASE)
return pattern.sea
|
Kazade/NeHe-Website
|
public/post_to_twitter.py
|
Python
|
bsd-3-clause
| 2,865
| 0.001396
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 Arthur Furlan <arthur.furlan@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# On Debian systems, you can find the full text of the license in
# /usr/share/common-licenses/GPL-2
import os
import twitter
import urllib, urllib2
from django.conf import settings
from django.contrib.sites.models import Site
TWITTER_MAXLENGTH = getattr(settings, 'TWITTER_MAXLENGTH', 140)
def post_to_twitter(sender, instance, *args, **kwargs):
"""
Post new saved objects to Twitter.
Example:
from django.db import models
class MyModel(models.Model):
text = models.CharField(max_length=255)
link = models.CharField(max_length=255)
def __unicode__(self):
|
return u'%s' % self.text
def get_absolute_url(self):
return self.link
# the following method is optional
def get_twitter_message(self):
return u'my-custom-twitter-me
|
ssage: %s - %s' \
% (self.text, self.link)
models.signals.post_save.connect(post_to_twitter, sender=MyModel)
"""
# avoid to post the same object twice
if not kwargs.get('created'):
return False
# check if there's a twitter account configured
try:
username = settings.TWITTER_USERNAME
password = settings.TWITTER_PASSWORD
except AttributeError:
print 'WARNING: Twitter account not configured.'
return False
# if the absolute url wasn't a real absolute url and doesn't contains the
# protocol and domain defineds, then append this relative url to the domain
# of the current site, emulating the browser's behaviour
url = instance.get_absolute_url()
if not url.startswith('http://') and not url.startswith('https://'):
domain = Site.objects.get_current().domain
url = u'http://%s%s' % (domain, url)
# tinyurl'ze the object's link
create_api = 'http://tinyurl.com/api-create.php'
data = urllib.urlencode(dict(url=url))
link = urllib2.urlopen(create_api, data=data).read().strip()
# create the twitter message
try:
text = instance.get_twitter_message()
except AttributeError:
text = unicode(instance)
mesg = u'%s - %s' % (text, link)
if len(mesg) > TWITTER_MAXLENGTH:
size = len(mesg + '...') - TWITTER_MAXLENGTH
mesg = u'%s... - %s' % (text[:-size], link)
try:
twitter_api = twitter.Api(username, password)
twitter_api.PostUpdate(mesg)
except urllib2.HTTPError, ex:
print 'ERROR:', str(ex)
return False
|
oduwsdl/ipwb
|
setup.py
|
Python
|
mit
| 2,128
| 0
|
#!/usr/bin/env python
from setuptools import setup
from ipwb import __version__
with open('README.md') as f:
long_description = f.read()
desc = """InterPlanetary Wayback (ipwb): Web Archive integration with IPFS"""
setup(
name='ipwb',
version=__version__,
url='https://github.com/oduwsdl/ipwb',
download_url="https://github.com/oduwsdl/ipwb",
author='Mat Kelly',
author_email='me@matkelly.com',
description=desc,
packages=['ipwb'],
license='MIT',
long_description=long_description,
long_description_content_type="text/markdown",
provides=[
'ipwb'
],
install_requires=[
'warcio>=1.5.3',
'ipfshttpclient>=0.8.0a',
'Flask==2.0.3',
'pycryptodome>=3.4.11',
'requests>=2.19.1',
'beautifulsoup4>=4.6.3',
'six==1.11.0',
'surt>=0.3.0'
],
tests_require=[
'flake8>=3.4',
'pytest>=3.6',
'pytest-cov',
'pytest-flake8'
],
entry_points="""
[console_scripts]
ipwb = ipwb.__main__:main
""",
package_data={
'ipwb': [
'assets/*.*',
'assets/favicons/*.*',
'templates/*.*'
]
},
zip_safe=False,
keywords='http web archives ipfs distributed odu wayback memento',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audienc
|
e :: Information Technology',
'Intended Audience :: Science/Research',
'Topic :: Internet :: WWW/HTTP',
'Topic :: System :: Archiving',
'Topic :: System :: Archiving :: Backup',
'Topic :: System :: Archiving :: Mirroring',
'Topic :: Utilities',
]
)
# Pu
|
blish to pypi:
# rm -rf dist; python setup.py sdist bdist_wheel; twine upload dist/*
|
WISDEM/pyFrame3DD
|
setup.py
|
Python
|
gpl-3.0
| 1,499
| 0.00934
|
# setup.py
# only if building in place: ``python setup.py build_ext --inplace``
import os
import sys
import platform
import glob
from setuptools import setup, find_packages
from numpy.distutils.core import setup, Extension
os.environ['NPY_DISTUTILS_APPEND_FLAGS'] = '1'
#if os.name == 'nt': # Windows.
# extra_compile_args = ['/TC', '/D', 'ANSI'] # for msvs
# # TODO: Not with Anaconda MINGW
#else:
#extra_compile_args = ''
froot = 'pyframe3dd' + os.sep + 'src' + os.sep
pyframeExt = Extension('pyframe3dd._pyframe3dd', sources=[froot+'py_HPGmatrix.c',
froot+'HPGutil.c',
froot+'NRutil.c',
froot+'coordtrans.c',
froot+'preframe.c',
froot+'py_eig.c',
froot+'py_frame3dd.c',
froot+'
|
py_io.c',
froot+'py_main.c'])
setup(
name='pyFrame3DD',
version='1.1.1',
description='Python bindings to Frame3DD',
|
author='NREL WISDEM Team',
author_email='systems.engineering@nrel.gov',
#package_dir={'': 'src'},
#py_modules=['pyframe3dd'],
package_data={'pyframe3dd': []},
packages=['pyframe3dd'],
license='Apache License, Version 2.0',
ext_modules=[pyframeExt],
zip_safe=False
)
|
Smart-Torvy/torvy-home-assistant
|
homeassistant/components/alarm_control_panel/demo.py
|
Python
|
mit
| 462
| 0
|
"""
Demo platform that has two fake alarm control panels.
For more details about this pla
|
tform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
import homeassistant.components.alarm_control_panel.manual
|
as manual
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Demo alarm control panel platform."""
add_devices([
manual.ManualAlarm(hass, 'Alarm', '1234', 5, 10, False),
])
|
googlecreativelab/beat-blender
|
main.py
|
Python
|
apache-2.0
| 859
| 0.004657
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License
|
is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import webapp2
import urllib2
c
|
lass Redirect( webapp2.RequestHandler ):
def get(self):
self.redirect('/ai/beat-blender/view/')
app = webapp2.WSGIApplication([
('/ai/beat-blender/view', Redirect),
('/', Redirect),
], debug=True)
|
teitei-tk/ice-pick
|
tests/test_recorder.py
|
Python
|
mit
| 2,731
| 0
|
import unittest
from nose.tools import ok_, eq_
import datetime
from pymongo import MongoClient
from tests.config import DB_HOST, DB_PORT, DB_NAME
from icePick.recorder import get_database, Structure, Recorder
db = get_database(DB_NAME, DB_HOST, DB_PORT)
class TestStructureModel(Structure):
pass
class TestRecorderModel(Recorder):
struct = TestStructureModel(
string=str(),
intger=int(),
boolean=bool(),
list_=list(),
dictionary=dict(),
dt=datetime.datetime.now()
)
class Meta:
database = db
class TestRecorder(unittest.TestCase):
def setUp(self):
self.record = TestRecorderModel.new()
def tearDown(self):
m = MongoClient(DB_HOST, DB_PORT)
m.drop_database(DB_NAME)
def test_colname(self):
eq_('test_recorder_model', self.record.colname())
def test_attrs(self):
new_str = "test_setattr"
self.record.string = new_str
eq_(new_str, self.record.string)
def test_new(self):
eq_(None, self.record.key())
new_record = TestRecorderModel.new({
"string": "new_string"
})
eq_("new_string", new_record.string)
def test_insert(self):
eq_(None, self.record.key())
self.record.string = "new_str"
self.record.insert()
eq_("new_str", self.record.string)
self.assertNotEqual(None, self.record.key())
def test_update(self):
self.record.insert()
self.assertNotEqual(None, self.record.key())
self.record.string = "new_str"
self.record.update()
eq_("new_str", self.record.string)
def test_save(self):
self.record.string = "new_str"
self.record.save()
self.assertNotEqual(None, self.record.key())
eq_("new_str", self.
|
record.string)
self.record.string = "update"
self.record.save()
eq_("update", self.record.string)
def test_get(self):
self.record.string = "new_str"
self.record.save()
exist_record = TestRecorderModel.get(self.rec
|
ord.key())
eq_(exist_record.key(), self.record.key())
eq_(exist_record.string, self.record.string)
def test_find(self):
result = TestRecorderModel.find()
eq_(0, result.__len__())
self.record.save()
result = TestRecorderModel.find()
eq_(1, result.__len__())
entity = result[0]
eq_(entity.key(), self.record.key())
def test_delete(self):
self.record.save()
result = TestRecorderModel.find()
eq_(1, result.__len__())
self.record.delete()
result = TestRecorderModel.find()
eq_(0, result.__len__())
|
jodal/comics
|
comics/comics/rutetid.py
|
Python
|
agpl-3.0
| 442
| 0
|
from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
|
name = "Rutetid"
language = "no"
url = "http://www.dagbladet.no/tegneserie/rutetid/"
rights = "Frode Øverli"
active = False
class Crawler(DagbladetC
|
rawlerBase):
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
return self.crawl_helper("rutetid", pub_date)
|
arcticio/ice-bloc-hdr
|
handlers/gablog/contact.py
|
Python
|
mit
| 4,409
| 0.016103
|
# The MIT License
#
# Copyright (c) 2008 William T. Katz
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
"""
contact.py
This module provides a simple form for entering a message and the
handlers for receiving the message through a HTTP POST.
"""
__author__ = 'William T. Katz'
import aio
import config
import logging
import string
import time
import view
import os
from google.appengine.api import users
from handlers import restful
RANDOM_TOKEN = '08yzek30krn4l' + config.APP['base_url']
def getReferer(req):
hkeys = req.headers.keys()
if 'Referer' in hkeys:
return req.headers['Referer']
else :
if 'HTTP_REFERER' in os.environ :
return os.environ['HTTP_REFERER']
return ""
class ContactHandler(restful.Controller):
def get(self):
user = users.get_current_user()
referer = getReferer(self.request)
refererOK = "localhost" in referer or "arctic.io" in referer or "ice-bloc" in referer
aio.debug("contact: referer.check '%s', %s", referer, refererOK)
## Fraud prevention
## if not refererOK :
## self.redirect("403.html")
## return
# Don't use cache since we want to get current time for each post.
view.ViewPage(cache_time=0).render(self, {
'email': user.email() if user else 'Required',
'nickname': user.nickname() if user else '',
'token': RANDOM_TOKEN,
'curtime': time.time(),
"title": config.APP["title"] + " - Contact & Feedback",
'warning': self.request.get('info'),
'referer': getReferer(self.request)
})
def post(self):
from google.appengine.api import mail
## validation
# if self.request.get('token') != RANDOM_TOKEN or \
# time.time() - string.atof(self.request.get('curtime')) < 2.0 :
# logging.warn("Aborted contact mailing because form submission was less than 2 seconds.")
# self.error(403)
referer = getReferer(self.request)
refererOK = "localhost" in referer or "arctic.io" in referer or "ice-bloc" in referer
aio.debug("contact: referer.check %s, %s", referer, refererOK)
if not refererOK :
aio.debug("ContactHandler.post: referer failed: %s", referer)
self.redirect("/contact/?info=no tricks, please")
elif self.request.get('email') == "" :
aio.debug("ContactHandler.post: no email")
self.redirect("/contact/?info=no email given")
elif self.request.get('subject') == "" :
aio.debug("ContactHandler.post: no subject")
self.redirect("/contact/?info=no subject given")
elif self.request.get('message') == "" :
aio.debug("ContactHandler.post: no message")
self.redirect("/contact/?info=no message given")
else :
user = users.get_current_user()
sender = user.email() if user else config.APP['email']
reply_to = self.request.get('email') or (user_email() if user else 'anonymous@unknown.com')
mail.send_mail(
sender = sender,
reply_to = self.request.get('author') + '<' + reply_to + '>',
to = config.APP['email'],
subject = "[a.io.contact] " + self.reque
|
st.get('subject')
|
or 'No Subject Given',
body = (reply_to + " wrote:\n\n" + self.request.get('message')) or 'No Message Given'
)
logging.info("MAIL: %s, ref: %s", reply_to, referer)
view.ViewPage(cache_time=36000).render(self)
|
adamnew123456/jqueue
|
test_all.py
|
Python
|
bsd-2-clause
| 1,476
| 0.003388
|
"""
This tests both the server and the client, by uppercasing the content of
different files.
"""
import os
import tempfile
import threading
import time
from jqueue import quickstart, server
# These are the names of the jobs, as well as the
|
ir content
IN_FILES = {
'A': 'a lowercase test',
'B': 'miXeD-caSe',
'C': 'UPPER CASE'
}
OUT_FILES = {
'A.result': 'A LOWERCASE TEST',
'B.result': 'MIXED-CASE',
'C.result': 'UPPER-CASE'
}
svr = server.Server()
def server_thread_runner():
svr.run([fname.encode('ascii') for fname in IN_FILES])
def client_thread_runner():
def handler(data):
return data.upper()
quic
|
kstart.process_jobs('localhost', handler, ttl=5)
with tempfile.TemporaryDirectory() as tmpdir:
os.chdir(tmpdir)
for fname in IN_FILES:
with open(fname, 'w') as fstream:
fstream.write(IN_FILES[fname])
server_thread = threading.Thread(target=server_thread_runner, name='Server')
client_threads = [
threading.Thread(target=client_thread_runner, name='Client')
for _ in range(2)]
server_thread.start()
for thread in client_threads:
thread.start()
server_thread.join()
for thread in client_threads:
thread.join()
for fname in OUT_FILES:
with open(fname) as fstream:
content = fstream.read()
print('[{}]'.format(fname), repr(content), '==', repr(OUT_FILES[fname]))
input('Press Enter to continue')
|
chetan51/nupic.research
|
projects/rsm/util.py
|
Python
|
gpl-3.0
| 11,475
| 0.00061
|
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
import matplotlib.pyplot as plt
import numpy as np
import torch
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.lines import Line2D
from sklearn.metrics import confusion_matrix
from torch.nn.functional import cosine_similarity
def square_size(n):
side = int(np.sqrt(n))
if side ** 2 < n:
side += 1
return side
def activity_square(vector):
n = len(vector)
side = square_size(n)
square = torch.zeros(side ** 2)
square[:n] = vector
return square.view(side, side)
def fig2img(fig):
canvas = FigureCanvas(fig)
canvas.draw()
width, height = fig.get_size_inches() * fig.get_dpi()
img = np.fromstring(canvas.tostring_rgb(), dtype="uint8").reshape(
int(height), int(width), 3
)
return img
def plot_confusion_matrix(
y_true, y_pred, classes, normalize=False, title=None, cmap=plt.cm.Blues
):
"""
This function plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
# Compute confusion matrix
cm = confusion_matrix(y_true.cpu(), y_pred.cpu())
if normalize:
cm = cm.astype("float") / cm.sum(axis=1)[:, np.newaxis]
fig = Figure()
ax = fig.gca()
im = ax.imshow(cm, interpolation="nearest", cmap=cmap)
ax.figure.colorbar(im, ax=ax)
# We want to show all ticks...
ax.set(
xticks=np.arange(cm.shape[1]),
yticks=np.arange(cm.shape[0]),
# ... and label them with the respective list entries
xticklabels=classes,
yticklabels=classes,
title=title,
ylabel="True label",
xlabel="Predicted label",
)
# Rotate the tick labels and set their alignment.
plt.setp(ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor")
# Loop over data dimensions and create text annotations.
fmt = ".2f" if normalize else "d"
thresh = cm.max() / 2.0
for i in range(cm.shape[0]):
for j in range(cm.shape[1]):
ax.text(
j,
i,
format(cm[i, j], fmt),
ha="center",
va="center",
color="white" if cm[i, j] > thresh else "black",
)
return ax, fig
def plot_activity_grid(distrs, n_labels=10):
"""
For flattened models, plot cell activations for each combination of
input and actual next input
"""
fig, axs = plt.subplots(
n_labels,
n_labels,
dpi=300,
gridspec_kw={"hspace": 0.7, "wspace": 0.7},
sharex=True,
sharey=True,
)
for i in range(n_labels):
for j in range(n_labels):
key = "%d-%d" % (i, j)
if ke
|
y in distrs:
activity_arr = distrs[key]
dist = torch.stack(activity_arr)
ax = axs[i][j]
mean_act = activity_square(dist.mean(dim=0).cpu())
side = mean_act.size(0)
ax.imshow(mean_act, origin
|
="bottom", extent=(0, side, 0, side))
else:
ax.set_visible(False)
ax.axis("off")
ax.set_title(key, fontsize=5)
return fig
def plot_activity(distrs, n_labels=10, level="column"):
"""
Plot column activations for each combination of input and actual next input
Should show mini-column union activity (subsets of column-level activity
which predict next input) in the RSM model.
"""
n_plots = len(distrs.keys())
fig, axs = plt.subplots(n_plots, 1, dpi=300, gridspec_kw={"hspace": 0.7})
pi = 0
for i in range(n_labels):
for j in range(n_labels):
key = "%d-%d" % (i, j)
if key in distrs:
activity_arr = distrs[key]
dist = torch.stack(activity_arr)
ax = axs[pi]
pi += 1
bsz, m, n = dist.size()
no_columns = n == 1
col_act = dist.max(dim=2).values
if level == "column" or no_columns:
act = col_act
elif level == "cell":
col = col_act.view(bsz, m, 1)
act = torch.cat((dist, col), 2).view(bsz, m, n + 1)
mean_act = act.mean(dim=0).cpu()
if no_columns:
mean_act = activity_square(mean_act)
side = mean_act.size(0)
ax.imshow(mean_act, origin="bottom", extent=(0, side, 0, side))
else:
ax.imshow(
mean_act.t(), origin="bottom", extent=(0, m - 1, 0, n + 1)
)
ax.plot([0, m - 1], [n, n], linewidth=0.4)
ax.axis("off")
ax.set_title(key, fontsize=5)
return fig
def _repr_similarity_grid(
ax,
activity_arr,
cmap=plt.cm.Blues,
normalize=False,
labels=None,
title=None,
tick_fontsize=2,
fontsize=1.2,
):
n_labels = len(labels)
grid = torch.zeros(n_labels, n_labels)
# Compute grid (cosine similarity)
for i, act1 in enumerate(activity_arr):
for j, act2 in enumerate(activity_arr):
if j > i:
break
if act1 is not None and act2 is not None:
sim = cosine_similarity(act1, act2, dim=0)
grid[i, j] = grid[j, i] = sim
ax.imshow(grid, interpolation="nearest", cmap=cmap, vmin=0, vmax=1)
# ax.figure.colorbar(im, ax=ax)
# We want to show all ticks...
ax.set(
xticks=np.arange(grid.shape[1]),
yticks=np.arange(grid.shape[0]),
# ... and label them with the respective list entries
xticklabels=labels,
yticklabels=labels,
title=title,
)
ax.tick_params(labelsize=tick_fontsize)
# Rotate the tick labels and set their alignment.
plt.setp(ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor")
# Loop over data dimensions and create text annotations.
thresh = grid.max() / 2.0
for i in range(grid.shape[0]):
for j in range(grid.shape[1]):
ax.text(
j,
i,
format(grid[i, j], ".2f"),
ha="center",
va="center",
fontsize=fontsize,
color="white" if grid[i, j] > thresh else "black",
)
def plot_representation_similarity(
distrs, n_labels=10, title=None, save=None, fontsize=1.6
):
"""
Plot grid showing representation similarity between distributions passed
into distrs dict.
"""
fig, axs = plt.subplots(1, 2, dpi=300)
ax_id = 0
col_activities = []
cell_activities = []
labels = []
for i in range(n_labels):
for j in range(n_labels):
key = "%d-%d" % (i, j)
col_act = cell_act = None
if key in distrs:
activity_arr = distrs[key]
dist = torch.stack(activity_arr)
ax_id += 1
size = dist.size()
if len(size) == 3:
bsz, m, n = size
tc = m * n
else:
bsz, m = size
tc = m
if m != tc:
c
|
ACRMGroup/canonicals
|
extras/src/python/create_pdb_file_list.py
|
Python
|
gpl-3.0
| 2,213
| 0.009038
|
#!/usr/bin/env python
#************************************************************#
# #
# Author: jacob Hurst #
# File name: create_pdb_file_list.py #
# Date: Wednesday 18 Mar 2009 #
# Description: creates a list from saxs of the PDB #
# antibodies. #
# #
#************************************************************#
import sys
#************************************************************#
class CreateList(object):
def __init__(self, saxsfilename, pdblocation, outfilename):
self.pdb_ids = []
self.location = pdblocation
self.parse_saxs_file(saxsfilename)
self.write_file(outfilename)
###******************************************************#
def parse_saxs_file(self, filename):
""" parses the sax file to extract pdb ids. """
fIn = open(filename)
lines = fIn.readlines()
for line in lines:
line = line.rstrip("\n")
# search for the tag <antibody pdb=
pos = line.find("<antibody pdb=")
if pos == 0:
pdb_file_name = line[15:-2].lower()
pdb_file_name = "pdb" + pdb_file_name + ".ent"
self.pdb_ids.
|
append(pdb_file_name)
fIn.close()
###******************************************************#
def write_file(self, outfilename):
""" files are written """
fOut = open(outfilename, "w")
for pdb_id in self.pdb_ids:
fOut.write("%s/%s\n" %(self.location, pdb_id))
fOut.close()
#******************************
|
******************************#
def usage():
print "./create_pdb_file_list.py <saxsfile> <pdblocation> <outfilename>"
sys.exit(1)
#************************************************************#
if __name__ == "__main__":
if len(sys.argv)!=4:
usage()
cL = CreateList(sys.argv[1], sys.argv[2], sys.argv[3])
#************************************************************#
|
utmi-2014/utmi-ros-enshu
|
enshu20141204/scripts/group15/test_detect_face.py
|
Python
|
mit
| 2,428
| 0.00557
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# test_detect_face.py
import rospy
from std_msgs.msg import Bool
from sensor_msgs.msg import CompressedImage
import sys
import cv2
import numpy as np
class DetectFace:
def __init__(self):
rospy.init_node('detect_face')
self.pub = rospy.Publisher('/enshu/detect_face', Bool)
rospy.Subscriber('/image_raw/compressed', CompressedImage, self.callback)
self.detected = False
self.times = 0
def callback(self, data):
self.times += 1
if self.times % 7 != 0:
return
jpeg = data.data
byte_array = bytearray(jpeg)
file_bytes = np.array(byte_array)
image = cv2.imdecode(file_bytes, cv2.CV_LOAD_IMAGE_UNCHANGED)
# print image
cascade_path = "/opt/ros/hydro/share/OpenCV/haarcascades/haarcascade_frontalface_alt.xml"
# cascade_path = "/opt/ros/hydro/share/OpenCV/haarcascades/haarcascade_frontalface_alt2.xml"
# cascade_path = "/opt/ros/hydro/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml"
# color = (255, 255, 255) #白
#グレースケール変換
image_gray = cv2.cvtColor(image, cv2.cv.CV_BGR2GRAY)
#カスケード分類器の特徴量を取得する
cascade = cv2.CascadeClassifier(cascade_path)
# print cascade
#facerect = cascade.detectMultiScale(image, scaleFactor=1.1, minNeighbors=1, minSize=(1, 1))
facerect = cascade.detectMultiScale(image_gray, scaleFactor=1.1, minNeighbors=3,
minSize=(10, 10), flags = cv2.cv.CV_HAAR_SCALE_IMAGE)
# pr
|
int facerect
# 認識結果の保存
cv2.imwrite("/tmp/test_detect_face.jpeg", image_gray)
if len(facerect) <= 0:
self.detected = False
print "callback: ", False
return
# 検出した顔を囲む矩形の作成
# for rect in facerect:
# cv2.rectangle(image, tuple(rect[0:2]),tuple(rect[0:2]+rect[2:4]), color, thickness=2)
# print "face detected!"
self.detected = True
print
|
"callback: ", True
def main():
detect_face = DetectFace()
while not rospy.is_shutdown():
detect_face.pub.publish(Bool(detect_face.detected))
print "main: ", detect_face.detected
rospy.sleep(0.5)
if __name__ == '__main__':
main()
|
mwhit74/moving_loads
|
ml/mlob.py
|
Python
|
mit
| 30,601
| 0.005784
|
# -*- coding: utf-8 -*-
"""The mlob module calculates the maximum effects of a vehicle on a simply
supported span including the pier reaction for two adjacent simply supported
spans of differing lengths.
"""
import pdb
def analyze_vehicle(axle_spacing, axle_wt, span_length1, span_length2,
num_user_nodes, space_to_trailing_load, distributed_load,
point_load_spacing=0.5):
"""Calculates the max shear and moment at each analysis node in 1 or 2 spans.
This function calculates the maximum shear and moment at each analysis node
in one or two spans of equal or unequal lengths. This is accomplished by
iterating through each analysis node, at each analysis node each axle of
vehicle is placed on the analysis node and the location of all the other
axles is determined, the moment and shear are calculated for this instance of
axle locations. This operation is repeated for each axle of the vehicle and
for each analysis node. The axles are incremented left to right and right to
left to cover all possible axle locations in either direction.
Args:
axle_spacing (list of floats): the spacing between each axle
axle_wt (list of floats): weight of each axle
span_length1 (float): length of span 1
span_length2 (float): length of span 2
num_user_nodes (int): number of analysis nodes input by the user
space_to_trailing_load (float): distance from last discrete axle to
beginning of distributed load
distributed_load (float): uniformly distributed trailing load magnitude
point_load_spacing (float, optional): spacing of approximate discretely
spaced point loads,
defaults to 0.5
Returns:
node_loc_ltr (list of floats): coordinate location of analysis nodes in
order ltr
V_max1 (list of floats): maximum shear at each analysis node in span 1
M_corr1 (list of floats): corresponding moment to maximum shear at each
analysis node in span 1
M_max1 (list of floats): maximum moment at each analysis n
|
ode in span 1
V_corr1 (list of floats): corresponding shear to maximum moment at each
analysis node in span 1
V_max2 (list of f
|
loats): maximum moment at each analysis node in span 2
M_corr22 (list of floats): corresponding moment to maximum shear at each
analysis node in span 2
M_max2 (list of floats): maximum moment at each analysis node in span 2
V_corr2 (list of floats): corresponding shear to maximum moment at each
analysis node in span 2
Rmax_pier (float): maximum pier reaction, returns None if span length 2
is not entered by user
span1_begin (float): coordinate location of beginning of span 1
span2_begin (float): coordinate location of beginning of span 2
Notes:
Placing each axle directly at the analysis node ensures that the maximum
shear and moment is calculated for each axle and corresponding axle
locations. While the maximum shear and moment will be calculated for that
specific analysis node location, the overall maximum shear and moment of the
span may not be calculated if there is not enough discretization of analysis
nodes, i.e. not enough analysis nodes in the span to accurately describe the
shear and moment behavior.
"""
#calculates for a full track (2 rails)
V_max1 = []
M_corr1 = []
V_max1_axle = []
M_max1 = []
V_corr1 = []
M_max1_axle = []
V_max2 = []
M_corr2 = []
V_max2_axle = []
M_max2 = []
V_corr2 = []
M_max2_axle = []
Rmax_pier = 0.0
Rmax_pier_axle = [None, None]
(span1_begin,
span1_end,
span2_begin,
span2_end) = span_begin_end_coords(span_length1, span_length2)
node_loc_ltr = node_location(span1_begin, span1_end, span2_begin,
span2_end, num_user_nodes)
node_loc_rtl = list(reversed(node_loc_ltr))
mod_axle_spacing, mod_axle_wt = add_trailing_load(axle_spacing,
axle_wt,
space_to_trailing_load,
distributed_load,
span1_begin,
span2_end,
point_load_spacing)
mod_axle_spacing.insert(0, 0.0) #insert a dummy spacing for the first axle
num_axles = len(mod_axle_wt) #number of axles in the pattern
axle_num = number_axles(num_axles) #numbered axles
#pdb.set_trace()
for node_loc,direction in zip([node_loc_ltr,
node_loc_rtl],
["ltr","rtl"]):
#pdb.set_trace()
num_analysis_nodes = len(node_loc)
#initialize span index id value
if direction == "ltr":
span1_index_id = -1
span2_index_id = -1
elif direction == "rtl":
span1_index_id = num_user_nodes
span2_index_id = num_user_nodes
#loop thru analysis node locations
for x,i in zip(node_loc, range(num_analysis_nodes)):
Ve1 = 0.0
M1 = 0.0
Ve2 = 0.0
M2 = 0.0
#calculate span index id value
if x >= span1_begin and x <= span1_end:
if direction == "ltr":
span1_index_id = span1_index_id + 1
elif direction == "rtl":
span1_index_id = span1_index_id - 1
if span_length2 != 0.0 and x >= span2_begin and x <= span2_end:
if direction == "ltr":
span2_index_id = span2_index_id + 1
elif direction == "rtl":
span2_index_id = span2_index_id - 1
#loop thru each axle in the axle configuration placing each axle at
#current analysis node
for axle_id in axle_num:
#calculate location of each axle based on the axle currently
#over the analysis node
if axle_id == 1:
cur_axle_loc = get_abs_axle_location(mod_axle_spacing, x,
direction)
else:
prev_axle_loc = cur_axle_loc
cur_axle_loc = move_axle_loc(mod_axle_spacing, axle_id,
prev_axle_loc, num_axles,
direction)
Pt1, xt1, Pl1, xl1, Pr1, xr1 = calc_load_and_loc(cur_axle_loc,
mod_axle_wt, x, span1_begin, span1_end, num_axles)
Pt2, xt2, Pl2, xl2, Pr2, xr2 = calc_load_and_loc(cur_axle_loc,
mod_axle_wt, x, span2_begin, span2_end, num_axles)
Rpier, R1, R2 = calc_pier_reaction(cur_axle_loc, mod_axle_wt, span1_begin,
span1_end, span2_begin, span2_end,
num_axles)
Rmax_pier, Rmax_pier_axle = envelope_pier_reaction(Rmax_pier,
Rpier,
Rmax_pier_axle,
axle_id,
direction, R1, R2)
if x >= span1_begin and x <= span1_end:
Rb1, Re1 = calc_reactions(Pt1, xt1, span1_begin, span1_end, direction)
Ve1 = calc_shear(Rb1, Pr1, Pl1, direction)
M1 = calc_moment(x,
|
openstack/manila
|
manila/tests/network/linux/test_ovs_lib.py
|
Python
|
apache-2.0
| 2,893
| 0
|
# Copyright 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from manila.network.linux import ovs_lib
from manila import test
|
class OVS_Lib_Test(test.TestCase):
"""A test suite to exercise the OVS libraries."""
def setUp(self):
super(OVS_Lib_Test, self).setUp()
self.BR_NAME = "br-int"
self.TO = "--timeout=2"
self.br = ovs_lib
|
.OVSBridge(self.BR_NAME)
self.execute_p = mock.patch('manila.utils.execute')
self.execute = self.execute_p.start()
def tearDown(self):
self.execute_p.stop()
super(OVS_Lib_Test, self).tearDown()
def test_reset_bridge(self):
self.br.reset_bridge()
self.execute.assert_has_calls([mock.call("ovs-vsctl", self.TO, "--",
"--if-exists", "del-br",
self.BR_NAME,
run_as_root=True),
mock.call("ovs-vsctl", self.TO,
"add-br",
self.BR_NAME,
run_as_root=True)])
def test_delete_port(self):
pname = "tap5"
self.br.delete_port(pname)
self.execute.assert_called_once_with("ovs-vsctl", self.TO, "--",
"--if-exists", "del-port",
self.BR_NAME, pname,
run_as_root=True)
def test_port_id_regex(self):
result = ('external_ids : {attached-mac="fa:16:3e:23:5b:f2",'
' iface-id="5c1321a7-c73f-4a77-95e6-9f86402e5c8f",'
' iface-status=active}\nname :'
' "dhc5c1321a7-c7"\nofport : 2\n')
match = self.br.re_id.search(result)
vif_mac = match.group('vif_mac')
vif_id = match.group('vif_id')
port_name = match.group('port_name')
ofport = int(match.group('ofport'))
self.assertEqual('fa:16:3e:23:5b:f2', vif_mac)
self.assertEqual('5c1321a7-c73f-4a77-95e6-9f86402e5c8f', vif_id)
self.assertEqual('dhc5c1321a7-c7', port_name)
self.assertEqual(2, ofport)
|
timj/scons
|
test/update-release-info/update-release-info.py
|
Python
|
mit
| 7,263
| 0.014457
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Test bin/update-release-info.py. Also verify that the original files
have the appropriate triggers to cause the modifications.
"""
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os, sys, time
import TestRuntest
# Needed to ensure we're using the correct year
this_year=time.localtime()[0]
TestSCons = 'QMTest/TestSCons.py' .split('/')
README = 'README.rst' .split('/')
ReleaseConfig = 'ReleaseConfig' .split('/')
SConstruct = 'SConstruct' .split('/')
Announce = 'src/Announce.txt' .split('/')
CHANGES = 'src/CHANGES.txt' .split('/')
RELEASE = 'src/RELEASE.txt' .split('/')
Main = 'src/engine/SCons/Script/Main.py' .split('/')
main_in = 'doc/user/main.in' .split('/')
main_xml = 'doc/user/main.xml' .split('/')
test = TestRuntest.TestRuntest(
program = os.path.join('bin', 'update-release-info.py'),
things_to_copy = ['bin']
)
if not os.path.exists(test.program):
test.skip_test("update-release-info.py is not distributed in this package\n")
test.run(arguments = 'bad', status = 1)
# Strings to go in ReleaseConfig
combo_strings = [
# Index 0: version tuple with bad release level
"""version_tuple = (2, 0, 0, 'bad', 0)
""",
# Index 1: Python version tuple
"""unsupported_python_version = (2, 6)
""",
# Index 2: Python version tuple
"""deprecated_python_version = (2, 7)
""",
# Index 3: alpha version tuple
"""version_tuple = (2, 0, 0, 'alpha', 0)
""",
# Index 4: final version tuple
"""version_tuple = (2, 0, 0, 'final', 0)
""",
# Index 5: bad release date
"""release_date = (%d, 12)
"""%this_year,
# Index 6: release date (hhhh, mm, dd)
"""release_date = (%d, 12, 21)
"""%this_year,
# Index 7: release date (hhhh, mm, dd, hh, mm, ss)
"""release_date = (%d, 12, 21, 12, 21, 12)
"""%this_year,
]
combo_error = \
"""ERROR: Config file must contain at least version_tuple,
\tunsupported_python_version, and deprecated_python_version.
"""
def combo_fail(*args, **kw):
kw.setdefault('status', 1)
combo_run(*args, **kw)
def combo_run(*args, **kw):
t = '\n'
for a in args:
t += combo_strings[a]
test.write(ReleaseConfig, t)
kw.setdefault('stdout', combo_error)
test.run(**kw)
combo_fail()
combo_fail(0)
combo_fail(1)
combo_fail(2)
combo_fail(0, 1)
combo_fail(0, 2)
combo_fail(1, 2)
combo_fail(0, 1, 2, stdout =
"""ERROR: `bad' is not a valid release type in version tuple;
\tit must be one of alpha, beta, candidate, or final
""")
# We won't need this entry again, so put in a default
combo_strings[0] = combo_strings[1] + combo_strings[2] + combo_strings[3]
combo_fail(0, 5, stdout =
"""ERROR: Invalid release date (%d, 12)
"""%this_year )
def pave(path):
path = path[:-1]
if not path or os.path.isdir(os.path.join(*path)):
return
pave(path)
test.subdir(path)
def pave_write(file, contents):
pave(file)
test.write(file, contents)
pave_write(CHANGES, """
RELEASE It doesn't matter what goes here...
""")
pave_write(RELEASE, """
This file has a 3.2.1.beta.20121221 version string in it
""")
pave_write(Announce, """
RELEASE
|
It doesn't matter what goes here...
""")
pave_write(SConstruct, """
month_year = 'March 1945'
copyright_years = '2001, 2002, 2003, 2004, 2005, 2006, 2007'
default_version = '0.98.97'
""")
pave_write(README, """
These files are a part of 33.22.11:
scon
|
s-33.22.11.tar.gz
scons-33.22.11.win32.exe
scons-33.22.11.zip
scons-33.22.11.rpm
scons-33.22.11.deb
scons-33.22.11.beta.20012122112.suffix
""")
pave_write(TestSCons, """
copyright_years = Some junk to be overwritten
default_version = More junk
python_version_unsupported = Yep, more junk
python_version_deprecated = And still more
""")
pave_write(Main, """
unsupported_python_version = Not done with junk
deprecated_python_version = It goes on forever
""")
pave_write(main_in, """
TODO
""")
pave_write(main_xml, """
TODO
""")
def updating_run(*args):
stdout = ''
for file in args:
stdout += 'Updating %s...\n' % os.path.join(*file)
combo_run(0, 7, stdout = stdout)
updating_run(CHANGES, RELEASE, Announce, SConstruct, README, TestSCons, Main)
test.must_match(CHANGES, """
RELEASE 2.0.0.alpha.yyyymmdd - NEW DATE WILL BE INSERTED HERE
""", mode = 'r')
test.must_match(RELEASE, """
This file has a 2.0.0.alpha.yyyymmdd version string in it
""", mode = 'r')
test.must_match(Announce, """
RELEASE 2.0.0.alpha.yyyymmdd - NEW DATE WILL BE INSERTED HERE
""", mode = 'r')
years = '2001 - %d'%(this_year + 1)
test.must_match(SConstruct, """
month_year = 'MONTH YEAR'
copyright_years = %s
default_version = '2.0.0.alpha.yyyymmdd'
""" % repr(years), mode = 'r')
test.must_match(README, """
These files are a part of 33.22.11:
scons-2.0.0.alpha.yyyymmdd.tar.gz
scons-2.0.0.alpha.yyyymmdd.win32.exe
scons-2.0.0.alpha.yyyymmdd.zip
scons-2.0.0.alpha.yyyymmdd.rpm
scons-2.0.0.alpha.yyyymmdd.deb
scons-2.0.0.alpha.yyyymmdd.suffix
""", mode = 'r')
# should get Python floors from TestSCons module.
test.must_match(TestSCons, """
copyright_years = '%s'
default_version = '2.0.0.alpha.yyyymmdd'
python_version_unsupported = (2, 6)
python_version_deprecated = (2, 7)
"""%years, mode = 'r')
# should get Python floors from TestSCons module.
test.must_match(Main, """
unsupported_python_version = (2, 6)
deprecated_python_version = (2, 7)
""", mode = 'r')
#TODO: Release option
#TODO: ==============
#TODO:
#TODO: Dates in beta/candidate flow
#TODO:
#TODO: Dates in final flow
#TODO:
#TODO: Post option
#TODO: ===========
#TODO:
#TODO: Dates in post flow
#TODO:
#TODO: Update minor or micro version
#TODO:
#TODO: ReleaseConfig - new version tuple
#TODO:
#TODO: CHANGES - new section
#TODO:
#TODO: RELEASE - new template
#TODO:
#TODO: Announce - new section
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
MLnick/spark
|
python/pyspark/sql/tests.py
|
Python
|
apache-2.0
| 110,593
| 0.002297
|
# -*- encoding: utf-8 -*-
#
# Licensed to the Apache Soft
|
ware Fou
|
ndation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for pyspark.sql; additional tests are implemented as doctests in
individual modules.
"""
import os
import sys
import subprocess
import pydoc
import shutil
import tempfile
import pickle
import functools
import time
import datetime
import py4j
try:
import xmlrunner
except ImportError:
xmlrunner = None
if sys.version_info[:2] <= (2, 6):
try:
import unittest2 as unittest
except ImportError:
sys.stderr.write('Please install unittest2 to test with Python 2.6 or earlier')
sys.exit(1)
else:
import unittest
from pyspark import SparkContext
from pyspark.sql import SparkSession, SQLContext, HiveContext, Column, Row
from pyspark.sql.types import *
from pyspark.sql.types import UserDefinedType, _infer_type
from pyspark.tests import ReusedPySparkTestCase, SparkSubmitTests
from pyspark.sql.functions import UserDefinedFunction, sha2, lit
from pyspark.sql.window import Window
from pyspark.sql.utils import AnalysisException, ParseException, IllegalArgumentException
class UTCOffsetTimezone(datetime.tzinfo):
"""
Specifies timezone in UTC offset
"""
def __init__(self, offset=0):
self.ZERO = datetime.timedelta(hours=offset)
def utcoffset(self, dt):
return self.ZERO
def dst(self, dt):
return self.ZERO
class ExamplePointUDT(UserDefinedType):
"""
User-defined type (UDT) for ExamplePoint.
"""
@classmethod
def sqlType(self):
return ArrayType(DoubleType(), False)
@classmethod
def module(cls):
return 'pyspark.sql.tests'
@classmethod
def scalaUDT(cls):
return 'org.apache.spark.sql.test.ExamplePointUDT'
def serialize(self, obj):
return [obj.x, obj.y]
def deserialize(self, datum):
return ExamplePoint(datum[0], datum[1])
class ExamplePoint:
"""
An example class to demonstrate UDT in Scala, Java, and Python.
"""
__UDT__ = ExamplePointUDT()
def __init__(self, x, y):
self.x = x
self.y = y
def __repr__(self):
return "ExamplePoint(%s,%s)" % (self.x, self.y)
def __str__(self):
return "(%s,%s)" % (self.x, self.y)
def __eq__(self, other):
return isinstance(other, self.__class__) and \
other.x == self.x and other.y == self.y
class PythonOnlyUDT(UserDefinedType):
"""
User-defined type (UDT) for ExamplePoint.
"""
@classmethod
def sqlType(self):
return ArrayType(DoubleType(), False)
@classmethod
def module(cls):
return '__main__'
def serialize(self, obj):
return [obj.x, obj.y]
def deserialize(self, datum):
return PythonOnlyPoint(datum[0], datum[1])
@staticmethod
def foo():
pass
@property
def props(self):
return {}
class PythonOnlyPoint(ExamplePoint):
"""
An example class to demonstrate UDT in only Python
"""
__UDT__ = PythonOnlyUDT()
class MyObject(object):
def __init__(self, key, value):
self.key = key
self.value = value
class DataTypeTests(unittest.TestCase):
# regression test for SPARK-6055
def test_data_type_eq(self):
lt = LongType()
lt2 = pickle.loads(pickle.dumps(LongType()))
self.assertEqual(lt, lt2)
# regression test for SPARK-7978
def test_decimal_type(self):
t1 = DecimalType()
t2 = DecimalType(10, 2)
self.assertTrue(t2 is not t1)
self.assertNotEqual(t1, t2)
t3 = DecimalType(8)
self.assertNotEqual(t2, t3)
# regression test for SPARK-10392
def test_datetype_equal_zero(self):
dt = DateType()
self.assertEqual(dt.fromInternal(0), datetime.date(1970, 1, 1))
# regression test for SPARK-17035
def test_timestamp_microsecond(self):
tst = TimestampType()
self.assertEqual(tst.toInternal(datetime.datetime.max) % 1000000, 999999)
def test_empty_row(self):
row = Row()
self.assertEqual(len(row), 0)
class SQLTests(ReusedPySparkTestCase):
@classmethod
def setUpClass(cls):
ReusedPySparkTestCase.setUpClass()
cls.tempdir = tempfile.NamedTemporaryFile(delete=False)
os.unlink(cls.tempdir.name)
cls.spark = SparkSession(cls.sc)
cls.testData = [Row(key=i, value=str(i)) for i in range(100)]
cls.df = cls.spark.createDataFrame(cls.testData)
@classmethod
def tearDownClass(cls):
ReusedPySparkTestCase.tearDownClass()
cls.spark.stop()
shutil.rmtree(cls.tempdir.name, ignore_errors=True)
def test_sqlcontext_reuses_sparksession(self):
sqlContext1 = SQLContext(self.sc)
sqlContext2 = SQLContext(self.sc)
self.assertTrue(sqlContext1.sparkSession is sqlContext2.sparkSession)
def tearDown(self):
super(SQLTests, self).tearDown()
# tear down test_bucketed_write state
self.spark.sql("DROP TABLE IF EXISTS pyspark_bucket")
def test_row_should_be_read_only(self):
row = Row(a=1, b=2)
self.assertEqual(1, row.a)
def foo():
row.a = 3
self.assertRaises(Exception, foo)
row2 = self.spark.range(10).first()
self.assertEqual(0, row2.id)
def foo2():
row2.id = 2
self.assertRaises(Exception, foo2)
def test_range(self):
self.assertEqual(self.spark.range(1, 1).count(), 0)
self.assertEqual(self.spark.range(1, 0, -1).count(), 1)
self.assertEqual(self.spark.range(0, 1 << 40, 1 << 39).count(), 2)
self.assertEqual(self.spark.range(-2).count(), 0)
self.assertEqual(self.spark.range(3).count(), 3)
def test_duplicated_column_names(self):
df = self.spark.createDataFrame([(1, 2)], ["c", "c"])
row = df.select('*').first()
self.assertEqual(1, row[0])
self.assertEqual(2, row[1])
self.assertEqual("Row(c=1, c=2)", str(row))
# Cannot access columns
self.assertRaises(AnalysisException, lambda: df.select(df[0]).first())
self.assertRaises(AnalysisException, lambda: df.select(df.c).first())
self.assertRaises(AnalysisException, lambda: df.select(df["c"]).first())
def test_column_name_encoding(self):
"""Ensure that created columns has `str` type consistently."""
columns = self.spark.createDataFrame([('Alice', 1)], ['name', u'age']).columns
self.assertEqual(columns, ['name', 'age'])
self.assertTrue(isinstance(columns[0], str))
self.assertTrue(isinstance(columns[1], str))
def test_explode(self):
from pyspark.sql.functions import explode
d = [Row(a=1, intlist=[1, 2, 3], mapfield={"a": "b"})]
rdd = self.sc.parallelize(d)
data = self.spark.createDataFrame(rdd)
result = data.select(explode(data.intlist).alias("a")).select("a").collect()
self.assertEqual(result[0][0], 1)
self.assertEqual(result[1][0], 2)
self.assertEqual(result[2][0], 3)
result = data.select(explode(data.mapfield).alias("a", "b")).select("a", "b").collect()
self.assertEqual(result[0][0], "a")
self.assertEqual(result[0][1], "b")
def test_and_in_expression(self):
self.assertEqual(4, self.df.filter((self.df.key <= 10) & (self.df.value <= "2")).count())
self.assertRaises(ValueE
|
Xarthisius/girder
|
girder/api/v1/user.py
|
Python
|
apache-2.0
| 19,521
| 0.000922
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import base64
import cherrypy
import datetime
from ..describe import Description, autoDescribeRoute
from girder.api import access
from girder.api.rest import Resource, filtermodel, setCurrentUser
from girder.constants import AccessType, SettingKey, TokenScope
from girder.exceptions import RestException, AccessException
from girder.models.password import Password
from girder.models.setting import Setting
from girder.models.token import Token
from girder.models.user import User as UserModel
from girder.utility import mail_utils
class User(Resource):
"""API Endpoint for users in the system."""
def __init__(self):
super(User, self).__init__()
self.resourceName = 'user'
self._model = UserModel()
self.route('DELETE', ('authentication',), self.logout)
self.route('DELETE', (':id',), self.deleteUser)
self.route('GET', (), self.find)
self.route('GET', ('me',), self.getMe)
self.route('GET', ('authentication',), self.login)
self.route('GET', (':id',), self.getUser)
self.route('GET', (':id', 'details'), self.getUserDetails)
self.route('GET', ('details',), self.getUsersDetails)
self.route('POST', (), self.createUser)
self.route('PUT', (':id',), self.updateUser)
self.route('PUT', ('password',), self.changePassword)
self.route('PUT', (':id', 'password'), self.changeUserPassword)
self.route('GET', ('password', 'temporary', ':id'),
self.checkTemporaryPassword)
self.route('PUT', ('password', 'temporary'),
self.generateTemporaryPassword)
self.route('POST', (':id', 'otp'), self.initializeOtp)
self.route('PUT', (':id', 'otp'), self.finalizeOtp)
self.route('DELETE', (':id', 'otp'), self.removeOtp)
self.route('PUT', (':id', 'verification'), self.verifyEmail)
self.route('POST', ('verification',), self.sendVerificationEmail)
@access.public
@filtermodel(model=UserModel)
@autoDescribeRoute(
Description('List or search for users.')
.responseClass('User', array=True)
.param('text', "Pass this to perform a full text search for items.", required=False)
.pagingParams(defaultSort='lastName')
)
def find(self, text, limit, offset, sort):
return list(self._model.search(
text=text, user=self.getCurrentUser(), offset=offset, limit=limit, sort=sort))
@access.public(scope=TokenScope.USER_INFO_READ)
@filtermodel(model=UserModel)
@autoDescribeRoute(
Description('Get a user by ID.')
.responseClass('User')
.modelParam('id', model=UserModel, level=AccessType.READ)
.errorResponse('ID was invalid.')
.errorResponse('You do not have permission to see this user.', 403)
)
def getUser(self, user):
return user
@access.public(scope=TokenScope.USER_INFO_READ)
@filtermodel(model=UserModel)
@autoDescribeRoute(
Description('Retrieve the currently logged-in user information.')
.responseClass('User')
)
def getMe(self):
return self.getCurrentUser()
@access.public
@autoDescribeRoute(
Description('Log in to the system.')
.notes('Pass your username and password using HTTP Basic Auth. Sends'
' a cookie that should be passed back in future requests.')
.param('Girder-OTP', 'A one-time password for this user', paramType='header',
required=False)
.errorResponse('Missing Authorization header.', 401)
.errorResponse('Invalid login or password.', 403)
)
def login(self):
if not Setting().get(SettingKey.ENABLE_PASSWORD_LOGIN):
raise RestException('Password login is disabled on this instance.')
user, token = self.getCurrentUser(returnToken=True)
# Only create and send new cookie if user isn't already s
|
ending a valid one.
if not user:
authHeader = cherrypy.request.headers.ge
|
t('Girder-Authorization')
if not authHeader:
authHeader = cherrypy.request.headers.get('Authorization')
if not authHeader or not authHeader[0:6] == 'Basic ':
raise RestException('Use HTTP Basic Authentication', 401)
try:
credentials = base64.b64decode(authHeader[6:]).decode('utf8')
if ':' not in credentials:
raise TypeError
except Exception:
raise RestException('Invalid HTTP Authorization header', 401)
login, password = credentials.split(':', 1)
otpToken = cherrypy.request.headers.get('Girder-OTP')
user = self._model.authenticate(login, password, otpToken)
setCurrentUser(user)
token = self.sendAuthTokenCookie(user)
return {
'user': self._model.filter(user, user),
'authToken': {
'token': token['_id'],
'expires': token['expires'],
'scope': token['scope']
},
'message': 'Login succeeded.'
}
@access.user
@autoDescribeRoute(
Description('Log out of the system.')
.responseClass('Token')
.notes('Attempts to delete your authentication cookie.')
)
def logout(self):
token = self.getCurrentToken()
if token:
Token().remove(token)
self.deleteAuthTokenCookie()
return {'message': 'Logged out.'}
@access.public
@filtermodel(model=UserModel, addFields={'authToken'})
@autoDescribeRoute(
Description('Create a new user.')
.responseClass('User')
.param('login', "The user's requested login.")
.param('email', "The user's email address.")
.param('firstName', "The user's first name.")
.param('lastName', "The user's last name.")
.param('password', "The user's requested password")
.param('admin', 'Whether this user should be a site administrator.',
required=False, dataType='boolean', default=False)
.errorResponse('A parameter was invalid, or the specified login or'
' email already exists in the system.')
)
def createUser(self, login, email, firstName, lastName, password, admin):
currentUser = self.getCurrentUser()
regPolicy = Setting().get(SettingKey.REGISTRATION_POLICY)
if not currentUser or not currentUser['admin']:
admin = False
if regPolicy == 'closed':
raise RestException(
'Registration on this instance is closed. Contact an '
'administrator to create an account for you.')
user = self._model.createUser(
login=login, password=password, email=email, firstName=firstName,
lastName=lastName, admin=admin)
if not currentUser and self._model.canLogin(user):
setCurrentUser(user)
token = self.sendAuthTokenCookie(user)
user['authToken'] = {
'token': token['_id'],
'expires': token['expires']
}
return user
@access.user
@autoDescribeRoute(
Description('Delete a user by ID.')
.modelParam('id', model=UserModel, level=AccessType.ADMIN)
.errorResponse
|
pcmoritz/ray-1
|
rllib/examples/partial_gpus.py
|
Python
|
apache-2.0
| 152
| 0
|
# File
|
has been renamed.
raise DeprecationWarning("This file has been renamed to `fractional_gpus.py` "
|
"in the same folder!")
|
eadgarchen/tensorflow
|
tensorflow/contrib/cluster_resolver/python/training/tpu_cluster_resolver_test.py
|
Python
|
apache-2.0
| 5,570
| 0.003232
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for TPUClusterResolver."""
from __future__ import absolute_import
from __future__ import division
from __fu
|
ture__ import print_function
from tensorflow.contrib.cluster_resolver.python.training.tpu_cluster_resolver import TPUClusterResolver
from tensorflow.python.platform import tes
|
t
from tensorflow.python.training import server_lib
mock = test.mock
class MockRequestClass(object):
def __init__(self, name, tpu_map):
self._name = name
self._tpu_map = tpu_map
def execute(self):
if self._name in self._tpu_map:
return self._tpu_map[self._name]
else:
raise KeyError('Resource %s was not found' % self._name)
class MockNodeClass(object):
def __init__(self, tpu_map):
self._tpu_map = tpu_map
def get(self, name):
return MockRequestClass(name, self._tpu_map)
class TPUClusterResolverTest(test.TestCase):
def _verifyClusterSpecEquality(self, cluster_spec, expected_proto):
"""Verifies that the ClusterSpec generates the correct proto.
We are testing this four different ways to ensure that the ClusterSpec
returned by the TPUClusterResolver behaves identically to a normal
ClusterSpec when passed into the generic ClusterSpec libraries.
Args:
cluster_spec: ClusterSpec returned by the TPUClusterResolver
expected_proto: Expected protobuf
"""
self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def())
self.assertProtoEquals(
expected_proto, server_lib.ClusterSpec(cluster_spec).as_cluster_def())
self.assertProtoEquals(
expected_proto,
server_lib.ClusterSpec(cluster_spec.as_cluster_def()).as_cluster_def())
self.assertProtoEquals(
expected_proto,
server_lib.ClusterSpec(cluster_spec.as_dict()).as_cluster_def())
def mock_service_client(
self,
tpu_map=None):
if tpu_map is None:
tpu_map = {}
mock_locations = mock.MagicMock()
mock_locations.nodes.return_value = MockNodeClass(tpu_map)
mock_project = mock.MagicMock()
mock_project.locations.return_value = mock_locations
mock_client = mock.MagicMock()
mock_client.projects.return_value = mock_project
return mock_client
def testSimpleSuccessfulRetrieval(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'ipAddress': '10.1.2.3',
'port': '8470'
}
}
tpu_cluster_resolver = TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu_names=['test-tpu-1'],
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
actual_cluster_spec = tpu_cluster_resolver.cluster_spec()
expected_proto = """
job { name: 'tpu_worker' tasks { key: 0 value: '10.1.2.3:8470' } }
"""
self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto)
def testMultipleSuccessfulRetrieval(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'ipAddress': '10.1.2.3',
'port': '8470'
},
'projects/test-project/locations/us-central1-c/nodes/test-tpu-2': {
'ipAddress': '10.4.5.6',
'port': '8470'
}
}
tpu_cluster_resolver = TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu_names=['test-tpu-2', 'test-tpu-1'],
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
actual_cluster_spec = tpu_cluster_resolver.cluster_spec()
expected_proto = """
job { name: 'tpu_worker' tasks { key: 0 value: '10.4.5.6:8470' }
tasks { key: 1 value: '10.1.2.3:8470' } }
"""
self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto)
def testGetMasterMultipleEntries(self):
tpu_map = {
'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': {
'ipAddress': '10.1.2.3',
'port': '8470'
},
'projects/test-project/locations/us-central1-c/nodes/test-tpu-2': {
'ipAddress': '10.4.5.6',
'port': '8470'
}
}
tpu_cluster_resolver = TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu_names=['test-tpu-2', 'test-tpu-1'],
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
self.assertEqual('grpc://10.4.5.6:8470', tpu_cluster_resolver.get_master())
def testGetMasterNoEntries(self):
tpu_map = {}
tpu_cluster_resolver = TPUClusterResolver(
project='test-project',
zone='us-central1-c',
tpu_names=[],
credentials=None,
service=self.mock_service_client(tpu_map=tpu_map))
with self.assertRaises(ValueError):
tpu_cluster_resolver.get_master()
if __name__ == '__main__':
test.main()
|
mfem/PyMFEM
|
mfem/_par/geom.py
|
Python
|
bsd-3-clause
| 22,071
| 0.004621
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 4.0.2
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError("Python 2.7 or later required")
# Import the low-level C/C++ module
if __package__ or "." in __name__:
from . import _geom
else:
import _geom
try:
import builtins as __builtin__
except ImportError:
import __builtin__
_swig_new_instance_method = _geom.SWIG_PyInstanceMethod_New
_swig_new_static_method = _geom.SWIG_PyStaticMethod_New
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_instance_variable(set):
def set_instance_attr(self, name, value):
if name == "thisown":
self.this.own(value)
elif name == "this":
set(self, name, value)
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
set(self, name, value)
else:
raise AttributeError("You cannot add instance attributes to %s" % self)
return set_instance_attr
def _swig_setattr_nondynamic_class_variable(set):
def set_class_attr(cls, name, value):
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
set(cls, name, value)
else:
raise AttributeError("You cannot add class attributes to %s" % cls)
return set_class_attr
def _swig_add_metaclass(metaclass):
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
def wrapper(cls):
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
return wrapper
class _SwigNonDynamicMeta(type):
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
import weakref
import mfem._par.intrules
import mfem._par.array
import mfem._par.mem_manager
import mfem._par.densemat
import mfem._par.vector
import mfem._par.operators
import mfem._par.matrix
class Geometry(object):
r"""Proxy of C++ mfem::Geometry class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
INVALID = _geom.Geometry_INVALID
|
POINT = _geom.Geometry_POINT
SEGMENT = _geom.Geometry_SEGMENT
TRIANGLE = _geom.Geometry_TRIANGLE
SQUARE = _geom.Geometry_SQUARE
TETRAHEDRON = _geom.Geometry_TETRAHEDRON
CUBE = _geom.Geometry_CUBE
PRISM = _geom.Geometry_PRISM
PYRAMID = _geom.Geometry_PYRAMID
NUM_GEOMETRIES = _geo
|
m.Geometry_NUM_GEOMETRIES
NumGeom = _geom.Geometry_NumGeom
MaxDim = _geom.Geometry_MaxDim
Name = property(_geom.Geometry_Name_get, _geom.Geometry_Name_set, doc=r"""Name : a(mfem::Geometry::NumGeom).p.q(const).char""")
def __init__(self):
r"""__init__(Geometry self) -> Geometry"""
_geom.Geometry_swiginit(self, _geom.new_Geometry())
__swig_destroy__ = _geom.delete_Geometry
def GetVertices(self, GeomType):
r"""GetVertices(Geometry self, int GeomType) -> IntegrationRule"""
return _geom.Geometry_GetVertices(self, GeomType)
GetVertices = _swig_new_instance_method(_geom.Geometry_GetVertices)
def GetCenter(self, GeomType):
r"""GetCenter(Geometry self, int GeomType) -> IntegrationPoint"""
return _geom.Geometry_GetCenter(self, GeomType)
GetCenter = _swig_new_instance_method(_geom.Geometry_GetCenter)
@staticmethod
def GetRandomPoint(GeomType, ip):
r"""GetRandomPoint(int GeomType, IntegrationPoint ip)"""
return _geom.Geometry_GetRandomPoint(GeomType, ip)
GetRandomPoint = _swig_new_static_method(_geom.Geometry_GetRandomPoint)
@staticmethod
def CheckPoint(*args):
r"""
CheckPoint(int GeomType, IntegrationPoint ip) -> bool
CheckPoint(int GeomType, IntegrationPoint ip, double eps) -> bool
"""
return _geom.Geometry_CheckPoint(*args)
CheckPoint = _swig_new_static_method(_geom.Geometry_CheckPoint)
@staticmethod
def ProjectPoint(*args):
r"""
ProjectPoint(int GeomType, IntegrationPoint beg, IntegrationPoint end) -> bool
ProjectPoint(int GeomType, IntegrationPoint ip) -> bool
"""
return _geom.Geometry_ProjectPoint(*args)
ProjectPoint = _swig_new_static_method(_geom.Geometry_ProjectPoint)
def GetGeomToPerfGeomJac(self, GeomType):
r"""GetGeomToPerfGeomJac(Geometry self, int GeomType) -> DenseMatrix"""
return _geom.Geometry_GetGeomToPerfGeomJac(self, GeomType)
GetGeomToPerfGeomJac = _swig_new_instance_method(_geom.Geometry_GetGeomToPerfGeomJac)
def GetPerfGeomToGeomJac(self, GeomType):
r"""GetPerfGeomToGeomJac(Geometry self, int GeomType) -> DenseMatrix"""
return _geom.Geometry_GetPerfGeomToGeomJac(self, GeomType)
GetPerfGeomToGeomJac = _swig_new_instance_method(_geom.Geometry_GetPerfGeomToGeomJac)
def GetPerfPointMat(self, GeomType, pm):
r"""GetPerfPointMat(Geometry self, int GeomType, DenseMatrix pm)"""
return _geom.Geometry_GetPerfPointMat(self, GeomType, pm)
GetPerfPointMat = _swig_new_instance_method(_geom.Geometry_GetPerfPointMat)
def JacToPerfJac(self, GeomType, J, PJ):
r"""JacToPerfJac(Geometry self, int GeomType, DenseMatrix J, DenseMatrix PJ)"""
return _geom.Geometry_JacToPerfJac(self, GeomType, J, PJ)
JacToPerfJac = _swig_new_instance_method(_geom.Geometry_JacToPerfJac)
@staticmethod
def IsTensorProduct(geom):
r"""IsTensorProduct(mfem::Geometry::Type geom) -> bool"""
return _geom.Geometry_IsTensorProduct(geom)
IsTensorProduct = _swig_new_static_method(_geom.Geometry_IsTensorProduct)
@staticmethod
def TensorProductGeometry(dim):
r"""TensorProductGeometry(int dim) -> mfem::Geometry::Type"""
return _geom.Geometry_TensorProductGeometry(dim)
TensorProductGeometry = _swig_new_static_method(_geom.Geometry_TensorProductGeometry)
def NumBdr(self, GeomType):
r"""NumBdr(Geometry self, int GeomType) -> int"""
return _geom.Geometry_NumBdr(self, GeomType)
NumBdr = _swig_new_instance_method(_geom.Geometry_NumBdr)
# Register Geometry in _geom:
_geom.Geometry_swigregister(Geometry)
cvar = _geom.cvar
Geometry.NumBdrArray = _geom.cvar.Geometry_NumBdrArray
Geometry.Volume = _geom.cvar.Geometry_Volume
Geometry.Dimension = _geom.cvar.Geometry_Dimension
Geometry.DimStart = _geom.cvar.Geometry_DimStart
Geometry.NumVerts = _geom.cvar.Geometry_NumVerts
Geometry.NumEdges = _geom.cvar.Geometry_NumEdges
Geometry.NumFaces = _geom.cvar.Geometry_NumFaces
def Geometry_GetRandomPoint(GeomType, ip):
r"""Geometry_GetRandomPoint(int GeomType, IntegrationPoint ip)"""
return _geom.Geometry_GetRandomPoint(GeomType, ip)
Geometry_GetRandomPoint = _geom.Geometry_GetRandomPoint
def Geometry_CheckPoint(*args):
r"""
Geometry_CheckPoint(int GeomType, IntegrationPoint ip) -> bool
Geometry_CheckPoint(int GeomType, IntegrationPoint ip, double eps) -> bool
"""
return _geom.Geometry_CheckPoint(*args)
Geometry_CheckPoint = _geom.Geometry_CheckPoint
def Geometry_ProjectPoint(*args):
r"""
Geometry_ProjectPoint(int GeomType, IntegrationPoint beg, IntegrationPoint end) -> bool
Geometry_ProjectPoint(int GeomType, IntegrationPoint ip) -> bool
"""
return _geom.Geometry_ProjectPoint(*args)
Geometry_ProjectPoint = _geom.Geometry_ProjectPoint
def Geometry_IsTensorProduct(geom):
r"""Geometry_IsTensorProduct(mfem::Geometry::Type geom) -> bool"""
return _geom.Geometry_IsTensorProduct(geom)
Geometry_IsTensorProduct = _geom.Geometry_IsTensorProduct
def Geometry_TensorProductGeometry(dim):
r"""Geometry_TensorProd
|
jburel/openmicroscopy
|
examples/Training/python/Json_Api/Login.py
|
Python
|
gpl-2.0
| 3,163
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016-2017 University of Dundee & Open Microscopy Environment.
# All Rights Reserved.
# Use is subject to license terms supplied in LICENSE.txt
#
import requests
from Parse_OMERO_Properties import USERNAME, PASSWORD, OMERO_WEB_HOST, \
SERVER_NAME
session = requests.Session()
# Start by getting supported versions from the base url...
api_url = '%s/api/' % OMERO_WEB_HOST
print "Starting at:", api_url
r = session.get(api_url)
# we get a list of versions
v
|
ersions = r.json()['data']
# use most recent version...
version = versions[-1]
# get the 'base' url
base_ur
|
l = version['url:base']
r = session.get(base_url)
# which lists a bunch of urls as starting points
urls = r.json()
servers_url = urls['url:servers']
login_url = urls['url:login']
projects_url = urls['url:projects']
save_url = urls['url:save']
schema_url = urls['url:schema']
# To login we need to get CSRF token
token_url = urls['url:token']
token = session.get(token_url).json()['data']
print 'CSRF token', token
# We add this to our session header
# Needed for all POST, PUT, DELETE requests
session.headers.update({'X-CSRFToken': token,
'Referer': login_url})
# List the servers available to connect to
servers = session.get(servers_url).json()['data']
print 'Servers:'
for s in servers:
print '-id:', s['id']
print ' name:', s['server']
print ' host:', s['host']
print ' port:', s['port']
# find one called SERVER_NAME
servers = [s for s in servers if s['server'] == SERVER_NAME]
if len(servers) < 1:
raise Exception("Found no server called '%s'" % SERVER_NAME)
server = servers[0]
# Login with username, password and token
payload = {'username': USERNAME,
'password': PASSWORD,
# 'csrfmiddlewaretoken': token, # Using CSRFToken in header instead
'server': server['id']}
r = session.post(login_url, data=payload)
login_rsp = r.json()
assert r.status_code == 200
assert login_rsp['success']
eventContext = login_rsp['eventContext']
print 'eventContext', eventContext
# Can get our 'default' group
groupId = eventContext['groupId']
# With successful login, request.session will contain
# OMERO session details and reconnect to OMERO on
# each subsequent call...
# List projects:
# Limit number of projects per page
payload = {'limit': 2}
data = session.get(projects_url, params=payload).json()
assert len(data['data']) < 3
print "Projects:"
for p in data['data']:
print ' ', p['@id'], p['Name']
# Create a project:
projType = schema_url + '#Project'
# Need to specify target group
url = save_url + '?group=' + str(groupId)
r = session.post(url, json={'Name': 'API TEST foo', '@type': projType})
assert r.status_code == 201
project = r.json()['data']
project_id = project['@id']
print 'Created Project:', project_id, project['Name']
# Get project by ID
project_url = projects_url + str(project_id) + '/'
r = session.get(project_url)
project = r.json()
print project
# Update a project
project['Name'] = 'API test updated'
r = session.put(save_url, json=project)
# Delete a project:
r = session.delete(project_url)
|
Staffjoy/client_python
|
staffjoy/resources/role.py
|
Python
|
mit
| 1,528
| 0.000654
|
from staffjoy.resource import Resource
from staffjoy.resources.worker import Worker
from staffjoy.resources.schedule import Schedule
from staffjoy.resources.shift import Shift
from staffjoy.resources.shift_query import ShiftQuery
from staffjoy.resources.recurring_shift import RecurringShift
class Role(Resource):
PATH = "organizations/{organization_id}/locat
|
ions/{location_id}/roles/{role_id}"
ID_NAME = "role_id"
def get_workers(self, **kwargs):
return Worker.get_all(parent=self, **kwargs)
def get_worker(self, id=id):
return Worker.get(parent=self, id=id)
def create_worker(self, **kwargs):
return Worker.create(parent=self, **kwargs)
def get_schedules(self, **kwargs):
return Schedule.get_all(parent=self, **kwargs)
def get_schedule(self, id):
return Sc
|
hedule.get(parent=self, id=id)
def get_shifts(self, **kwargs):
return Shift.get_all(parent=self, **kwargs)
def get_shift(self, id):
return Shift.get(parent=self, id=id)
def create_shift(self, **kwargs):
return Shift.create(parent=self, **kwargs)
def get_shift_query(self, **kwargs):
return ShiftQuery.get_all(parent=self, **kwargs)
def get_recurring_shifts(self, **kwargs):
return RecurringShift.get_all(parent=self, **kwargs)
def get_recurring_shift(self, id):
return RecurringShift.get(parent=self, id=id)
def create_recurring_shift(self, **kwargs):
return RecurringShift.create(parent=self, **kwargs)
|
code-for-india/sahana_shelter_worldbank
|
controllers/org.py
|
Python
|
mit
| 10,621
| 0.005932
|
# -*- coding: utf-8 -*-
"""
Organization Registry - Controllers
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
return s3db.cms_index(module, alt_function="index_alt")
# -----------------------------------------------------------------------------
def index_alt():
"""
Module homepage for non-Admin users when no CMS content found
"""
# @ToDo: Move this to the Template (separate deployment_setting or else a customise for non-REST controllers)
template = settings.get_template()
if template == "SandyRelief":
# Just redirect to the Facilities
redirect(URL(f="facility"))
else:
# Just redirect to the list of Organisations
redirect(URL(f="organisation"))
# -----------------------------------------------------------------------------
def group():
""" RESTful CRUD controller """
return s3_rest_controller(rheader = s3db.org_rheader)
# -----------------------------------------------------------------------------
def region():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def sector():
""" RESTful CRUD controller """
# Pre-processor
def prep(r):
# Location Filter
s3db.gis_location_filter(r)
return True
s3.prep = prep
return s3_rest_controller()
# -----------------------------------------------------------------------------
def subsector():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def site():
"""
RESTful CRUD controller
- used by S3SiteAutocompleteWidget
which doesn't yet support filtering to just updateable sites
- used by site_contact_person()
- used by S3OptionsFilter (e.g. Asset Log)
"""
# Pre-processor
def prep(r):
if r.representation != "json" and \
r.method not in ("search_ac", "search_address_ac", "site_contact_person"):
return False
# Location Filter
s3db.gis_location_filter(r)
return True
s3.prep = prep
return s3_rest_controller()
# -----------------------------------------------------------------------------
def sites_for_org():
"""
Used to provide the list of Sites for an Organisation
- used in User Registration
"""
try:
org = request.args[0]
except:
result = current.xml.json_message(False, 400, "No Org provided!")
else:
stable = s3db.org_site
if settings.get_org_branches():
# Find all branches for this Organisation
btable = s3db.org_organisation_branch
query = (btable.organisation_id == org) & \
(btable.deleted != True)
rows = db(query).select(btable.branch_id)
org_ids = [row.branch_id for row in rows] + [org]
query = (stable.organisation_id.belongs(org_ids)) & \
(stable.deleted != True)
else:
query = (stable.organisation_id == org) & \
(stable.deleted != True)
rows = db(query).select(stable.site_id,
stable.name,
orderby=stable.name)
result = rows.json()
finally:
response.headers["Content-Type"] = "application/json"
return result
# -----------------------------------------------------------------------------
def facility():
""" RESTful CRUD controller """
return s3db.org_facility_controller()
# -----------------------------------------------------------------------------
def facility_type():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def office_type():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def organisation_type():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def organisation():
""" RESTful CRUD controller """
# Defined in the Model for use from M
|
ultiple Controllers
|
for unified menus
return s3db.org_organisation_controller()
# -----------------------------------------------------------------------------
def org_search():
"""
Organisation REST controller
- limited to just search_ac for use in Autocompletes
- allows differential access permissions
"""
s3.prep = lambda r: r.method == "search_ac"
return s3_rest_controller(module, "organisation")
# -----------------------------------------------------------------------------
def organisation_list_represent(l):
organisation_represent = s3db.org_organisation_represent
if l:
max_length = 4
if len(l) > max_length:
return "%s, etc" % \
organisation_represent.multiple(l[:max_length])
else:
return organisation_represent.multiple(l)
else:
return NONE
# -----------------------------------------------------------------------------
def office():
""" RESTful CRUD controller """
# Defined in the Model for use from Multiple Controllers for unified menus
return s3db.org_office_controller()
# -----------------------------------------------------------------------------
def person():
""" Person controller for AddPersonWidget """
def prep(r):
if r.representation != "s3json":
# Do not serve other representations here
return False
else:
current.xml.show_ids = True
return True
s3.prep = prep
return s3_rest_controller("pr", "person")
# -----------------------------------------------------------------------------
def room():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def mailing_list():
""" RESTful CRUD controller """
tablename = "pr_group"
table = s3db[tablename]
# Only groups with a group_type of 5
s3.filter = (table.group_type == 5)
table.group_type.writable = False
table.group_type.readable = False
table.name.label = T("Mailing List Name")
s3.crud_strings[tablename] = s3.pr_mailing_list_crud_strings
# define the list_fields
list_fields = s3db.configure(tablename,
list_fields = ["id",
"name",
"description",
])
# Components
_rheader = s3db.pr_rheader
_tabs = [(T("Organization"), "organisation/"),
(T("Mailing List Details"), None),
]
if len(request.args) > 0:
_tabs.append((T("Members"), "group_membership"))
if "viewing" in request.vars:
tablename, record_id = request.vars.viewing.rsplit(".", 1)
if tablename == "org_organisation":
table = s3db[tablename]
_rheader = s3db.org_rheader
_tabs = []
s3db.add_components("pr_group", pr_group_membership="group_id")
rheader = lambda r: _rheader(r, tabs = _tabs)
return s3_rest_controller("pr",
"group",
rheader=rheader)
# -----------------------------------------------------------------------------
def donor():
""" RESTful CRUD controller """
tablename = "org_donor"
table = s3db[tablename]
tablename = "org_donor"
s3.crud_strings[tablename] = Storage(
label_create = ADD_DONOR,
title_display = T("Donor Details"),
title_list = T("Donors Report"),
|
rfreiberger/Automate-the-Boring-Stuff
|
ch15/countdown.py
|
Python
|
bsd-2-clause
| 317
| 0.0347
|
#! python3
# countdown.py - A simple countdown script.
import time, subprocess
timeLeft = 60
while
|
timeLeft > 0:
print(timeLeft, end='')
time.sleep(1)
timeLeft = t
|
imeLeft - 1
# TODO: At the end of the countdown, play a sound file.
subprocess.Popen(['start', 'alarm.wav'], shell=True)
|
hufeiya/leetcode
|
python/73_Set_Matrix_Zeroes.py
|
Python
|
gpl-2.0
| 1,004
| 0.002988
|
class Sol
|
ution(object):
def setZeroes(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: void Do not return anything, modify matrix in-place instead.
"""
width,height = len(matrix[0]),len(matrix)
for i in xrange(height):
foundzero = False
for j in xrange(width):
|
if matrix[i][j] == 0:
foundzero = True
matrix[i][j] = float("inf")
if not foundzero:
continue
for j in xrange(width):
if matrix[i][j] != float("inf"):
matrix[i][j] = 0
for i in xrange(width):
foundtarget = False
for j in xrange(height):
if matrix[j][i] == float("inf"):
foundtarget = True
break
if not foundtarget:
continue
for j in xrange(height):
matrix[j][i] = 0
|
syoamakase/Re-ROS
|
re_environments/src/soccer_PK/soccer_PK_reward.py
|
Python
|
apache-2.0
| 1,763
| 0.003971
|
#!/usr/bin/env python
import rospy
from std_msgs.msg import Float32
import numpy as np
import soccer_PK.utils
rospy.init_node("reward")
pub = rospy.Publisher("reward", Float32, queue_size=10)
rate = rospy.Rate(3)
rospy.wait_for_service('/gazebo/get_model_state')
soccer_PK.utils.reset_world()
# intial postion
ball_prev = 3.25
episode = 1
while not rospy.is_shutdown():
tic = rospy.get_time()
toc = tic
prev_reward = None
while toc - tic < 10:
done = False
# pub.publish(reward)
ball_locationx ,ball_locationy = soccer_PK.utils.get_ball_location()
# Goal
if ball_locationx > 4.5:
rospy.lo
|
ginfo("GOAL!!!")
#
|
save log file ($HOME/.ros/)
f = open('episode_result.log', 'a')
f.write('episode'+str(episode)+': 4.5\n')
f.close()
# reset
episode += 1
reward = 10
done = True
rospy.set_param("reward_value",[reward, done])
tic = rospy.get_time()
soccer_PK.utils.reset_world()
rospy.sleep(1)
# if the ball don't achieve goal
reward = (ball_prev - ball_locationx) / ball_prev
if prev_reward != reward:
rospy.set_param("reward_value",[reward, done])
prev_reward = reward
toc = rospy.get_time()
reward = -10
done = True
prev_reward = reward
# pub.publish(reward)
rospy.set_param("reward_value",[reward, done])
ball_locationx ,ball_locationy = soccer_PK.utils.get_ball_location()
f = open('episode_result.log', 'a')
f.write('episode'+str(episode)+': '+str(ball_locationx)+'\n')
f.close()
episode += 1
soccer_PK.utils.reset_world()
rospy.sleep(1)
rate.sleep()
|
wyrdmeister/OnlineAnalysis
|
OAGui/src/Control/Ui/Ui_OAMultiplot.py
|
Python
|
gpl-3.0
| 6,385
| 0.002819
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/media/Home/Documents/SoftProjects/LDM/OAGui/src/Control/Ui/OAMultiplot.ui'
#
# Created: Tue Apr 16 14:32:36 2013
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_OAMultiplot(object):
def setupUi(self, OAMultiplot):
OAMultiplot.setObjectName(_fromUtf8("OAMultiplot"))
OAMultiplot.resize(773, 489)
OAMultiplot.setMinimumSize(QtCore.QSize(731, 489))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/Images/oa_editor.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
OAMultiplot.setWindowIcon(icon)
self.plot_area = QtGui.QWidget(OAMultiplot)
self.plot_area.setGeometry(QtCore.QRect(10, 10, 591, 431))
self.plot_area.setObjectName(_fromUtf8("plot_area"))
self.close_button = QtGui.QPushButton(OAMultiplot)
self.close_button.setGeometry(QtCore.QRect(660, 450, 98, 27))
self.close_button.setObjectName(_fromUtf8("close_button"))
self.control_frame = QtGui.QFrame(OAMultiplot)
self.control_frame.setGeometry(QtCore.QRect(610, 10, 151, 381))
self.control_frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.control_frame.setFrameShadow(QtGui.QFrame.Raised)
self.control_frame.setObjectName(_fromUtf8("control_frame"))
self.plot_selector = QtGui.QComboBox(self.control_frame)
self.plot_selector.setGeometry(QtCore.QRect(10, 10, 131, 27))
self.plot_selector.setObjectName(_fromUtf8("plot_selector"))
self.plot_scatter = QtGui.QPushButton(self.control_frame)
self.plot_scatter.setGeometry(QtCore.QRect(10, 90, 131, 27))
self.plot_scatter.setObjectName(_fromUtf8("plot_scatter"))
self.plot_hist2d = QtGui.QPushButton(self.control_frame)
self.plot_hist2d.setGeomet
|
ry(QtCore.QRect(10, 130, 131, 27))
self.plot_hist2d.setObjectName(_fromUtf8("plot_hist2d"))
self.zmin_slider = QtGui.QSlider(self.control_frame)
self.zmin_slider.setGeometry(QtCore.QRect(10, 270, 131, 29))
self.zmin_slider.setOrientation(QtCore.Qt.Horizontal)
self.zmin_slider.setObjectName(_fromU
|
tf8("zmin_slider"))
self.zmax_slider = QtGui.QSlider(self.control_frame)
self.zmax_slider.setGeometry(QtCore.QRect(10, 320, 131, 29))
self.zmax_slider.setProperty("value", 99)
self.zmax_slider.setOrientation(QtCore.Qt.Horizontal)
self.zmax_slider.setObjectName(_fromUtf8("zmax_slider"))
self.en_xautoscale = QtGui.QCheckBox(self.control_frame)
self.en_xautoscale.setGeometry(QtCore.QRect(10, 200, 121, 22))
self.en_xautoscale.setObjectName(_fromUtf8("en_xautoscale"))
self.label = QtGui.QLabel(self.control_frame)
self.label.setGeometry(QtCore.QRect(10, 300, 41, 17))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(self.control_frame)
self.label_2.setGeometry(QtCore.QRect(10, 350, 41, 17))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.zmin_value = QtGui.QLabel(self.control_frame)
self.zmin_value.setGeometry(QtCore.QRect(70, 300, 66, 17))
self.zmin_value.setObjectName(_fromUtf8("zmin_value"))
self.zmax_value = QtGui.QLabel(self.control_frame)
self.zmax_value.setGeometry(QtCore.QRect(70, 350, 66, 17))
self.zmax_value.setObjectName(_fromUtf8("zmax_value"))
self.en_yautoscale = QtGui.QCheckBox(self.control_frame)
self.en_yautoscale.setGeometry(QtCore.QRect(10, 220, 121, 22))
self.en_yautoscale.setObjectName(_fromUtf8("en_yautoscale"))
self.en_zautoscale = QtGui.QCheckBox(self.control_frame)
self.en_zautoscale.setGeometry(QtCore.QRect(10, 240, 121, 22))
self.en_zautoscale.setObjectName(_fromUtf8("en_zautoscale"))
self.plot_reset = QtGui.QPushButton(self.control_frame)
self.plot_reset.setGeometry(QtCore.QRect(10, 50, 131, 27))
self.plot_reset.setObjectName(_fromUtf8("plot_reset"))
self.hist_xbin = QtGui.QSpinBox(self.control_frame)
self.hist_xbin.setGeometry(QtCore.QRect(10, 160, 60, 27))
self.hist_xbin.setMaximum(1000)
self.hist_xbin.setObjectName(_fromUtf8("hist_xbin"))
self.hist_ybin = QtGui.QSpinBox(self.control_frame)
self.hist_ybin.setGeometry(QtCore.QRect(80, 160, 60, 27))
self.hist_ybin.setMaximum(1000)
self.hist_ybin.setObjectName(_fromUtf8("hist_ybin"))
self.retranslateUi(OAMultiplot)
QtCore.QMetaObject.connectSlotsByName(OAMultiplot)
def retranslateUi(self, OAMultiplot):
OAMultiplot.setWindowTitle(QtGui.QApplication.translate("OAMultiplot", "OA Data Viewer", None, QtGui.QApplication.UnicodeUTF8))
self.close_button.setText(QtGui.QApplication.translate("OAMultiplot", "Close", None, QtGui.QApplication.UnicodeUTF8))
self.plot_scatter.setText(QtGui.QApplication.translate("OAMultiplot", "Scatter", None, QtGui.QApplication.UnicodeUTF8))
self.plot_hist2d.setText(QtGui.QApplication.translate("OAMultiplot", "2D histogram", None, QtGui.QApplication.UnicodeUTF8))
self.en_xautoscale.setText(QtGui.QApplication.translate("OAMultiplot", "X Autoscale", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("OAMultiplot", "Zmin", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("OAMultiplot", "Zmax", None, QtGui.QApplication.UnicodeUTF8))
self.zmin_value.setText(QtGui.QApplication.translate("OAMultiplot", "0.0", None, QtGui.QApplication.UnicodeUTF8))
self.zmax_value.setText(QtGui.QApplication.translate("OAMultiplot", "0.0", None, QtGui.QApplication.UnicodeUTF8))
self.en_yautoscale.setText(QtGui.QApplication.translate("OAMultiplot", "Y Autoscale", None, QtGui.QApplication.UnicodeUTF8))
self.en_zautoscale.setText(QtGui.QApplication.translate("OAMultiplot", "Z Autoscale", None, QtGui.QApplication.UnicodeUTF8))
self.plot_reset.setText(QtGui.QApplication.translate("OAMultiplot", "Reset plot", None, QtGui.QApplication.UnicodeUTF8))
import OAControl_rc
|
ckc6cz/osf.io
|
tests/test_sanitize.py
|
Python
|
apache-2.0
| 2,081
| 0.002883
|
import unittest
from nose.tools import * # flake8: noqa
from website.util import sanitize
class TestSanitize(unittest.TestCase):
def test_escape_html(self):
assert_equal(
sanitize.clean_tag('<script> evil code </script>'),
'<script> evil code </script>',
)
assert_equal(
sanitize.clean_tag('<img src=javascript:moreevil><img>'),
'<img src="javascript:moreevil"><img>',
)
assert_equal(
sanitize.clean_tag('<iframe src=evilsite>'),
'<iframe src="evilsite">',
)
assert_equal(
sanitize.clean_tag(');</span><script></script><span>'),
');</span><script></script><span>',
)
def test_clean_tag(self):
assert_equal(
sanitize.clean_tag('\'\'\'\'\'"""""""<script></script>'),
''''''"""""""<script></script>',
)
def test_strip_html(self):
assert_equal(
sanitize.strip_html('<foo>bar</foo>'),
'bar'
)
def test_unescape_html(self):
assert_equal(
sanitize.unescape_entities('<> diamonds & diamonds <>'),
'<> diamonds & diamonds <>'
)
assert_equal(
sanitize.unescape_entities(['<>&'])[0],
'<>&'
)
assert_equal(
sanitize.unescape_entities(('<>&', ))[0],
'<>&'
)
assert_equal(
sanitize.unescape_entities({'key': '<>&'})['key'],
'<>&'
|
)
def test_safe_json(self):
"""Add escaping of forward slashes, but only where string literal contains closing markup"""
assert_equal(
sanitize.safe_json("I'm a string with / containing </closingtags>"),
|
'"I\'m a string with / containing <\\/closingtags>"'
)
|
SuperV1234/scelta
|
conanfile.py
|
Python
|
mit
| 777
| 0.003861
|
from conans import ConanFile, tools, CMake
import os
class SceltaConan(ConanFile):
name = "scelta"
version = "0.1"
|
url = "https://github.com/SuperV1234/scelta.git"
build_policy = "missing"
settings = "os", "compiler", "build_type", "arch"
def source(self):
self.run("git clone https://github.com/SuperV1234/scelta.git")
self.run("cd scelta && git checkout v0.1 && git submodule update --init")
def build(self):
cmake = CMake(self)
self.run('cmake %s/scelta %s' % (self.source_folder, cmake.command_line))
self.run("cmake --b
|
uild . %s" % cmake.build_config)
def package(self):
self.copy("*.hpp", dst="include", src="scelta/include")
def package_info(self):
self.info.header_only()
|
Ever-Never/smalisca
|
smalisca/controller/controller_parser.py
|
Python
|
mit
| 10,504
| 0.000476
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# -----------------------------------------------------------------------------
# File: controller/controller_parser.py
# Created: 2015-01-17
# Purpose: Controll commandline arguments for parsing files
#
# Copyright
# -----------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2015 Victor Dorneanu <info AAET dornea DOT nu>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""CLI controller for parsing files"""
import smalisca.core.smalisca_config as config
from smalisca.core.smalisca_app import App
from smalisca.core.smalisca_logging import log
from smalisca.modules.module_sql_models import AppSQLModel
from smalisca.modules.module_smali_parser import SmaliParser
import multiprocessing
import os
from cement.core import controller
from cement.core.controller import CementBaseController
class SmaliParserProcess(multiprocessing.Process):
"""Implements a multiprocessing.Process
Attributes:
dirs (list): List of directory paths
files (list): List of file paths
"""
def __init__(self, dirs, suffix, result_queue):
multiprocessing.Process.__init__(self)
self.result_queue = result_queue
self.dirs = dirs
self.suffix = suffix
def run(self):
"""Runs the process"""
c = 0
for d in self.dirs:
log.info("%s %d/%d Parsing %s ... " % (self.name, c, len(self.dirs), d))
# Parse directory
parser = SmaliParser(d, self.suffix)
parser.run()
# Get and save results
res = parser.get_results()
self.result_queue.put(res)
c += 1
class ConcurrentParser():
"""Implements concurrency features
Attributes:
processes (list): List of processes/workers
location (str): Path location
suffix (str): File suffix
jobs (int): Number of max allowed workers
depth (int): Recursion level for directories depth
result_queue (Queue): Proxy to some thread-safe queue
"""
# Use a manager to proxy access to the real queue
multimanager = multiprocessing.Manager()
result_queue = multimanager.Queue()
processes = []
def __init__(self, location, suffix, jobs, depth=3):
self.location = location
self.suffix = suffix
self.jobs = jobs
self.depth = depth - 1
def walk_location(self):
"""Walk through location and return lists of files and directories
Args:
location (str): Location path where to lookup for files and dirs
Returns:
tuple: (<list of dirs>, <list of files>)
"""
file_list = []
dirs_list = []
startinglevel = self.location.count(os.sep)
# "Walk" through location
for root, dirs, files in os.walk(self.location):
depth = root.count(os.sep) - startinglevel
# Collect dirs
for d in dirs:
dirpath = os.path.join(root, d)
if (os.path.isd
|
ir(dirpath)) and (depth == self.depth):
|
log.info("Adding %s to list" % dirpath)
dirs_list.append(dirpath)
# Collect files
for filename in files:
filepath = os.path.join(root, filename)
if os.path.isfile(filepath):
file_list.append(filepath)
# Save results
self.dirs = dirs_list
self.files = file_list
def run(self):
"""Parallelize parsing
Split input list into sublists according to the number of
specified jobs. Create new processes/workers and let them
do the parsing job.
"""
# Create sub-lists
for i in range(0, self.jobs):
sub_list = [self.dirs[j] for j in range(0, len(self.dirs))
if j % self.jobs == i]
# Create new process
if len(sub_list) > 0:
p = SmaliParserProcess(sub_list, self.suffix, self.result_queue)
self.processes.append(p)
# Start processes
for p in self.processes:
p.start()
# Exit the completed processes
for p in self.processes:
p.join()
# Get results
def get_results(self):
"""Merges results"""
results = []
queue_elements = [self.result_queue.get() for p in self.processes]
for e in queue_elements:
for r in e:
results.append(r)
return results
class ParserController(CementBaseController):
"""CLI Controller for parsing Smali files
Iterate through files and extract data from files:
* classes (name, type)
* class properties (name, type)
* methods (name, type, arguments, return value)
* calls (source, destination, arguments)
After extracting the information, the controller will
save the results either as **JSON** or **SQLite DB**.
Attributes:
location (str): Path where to lookup for files and dirs
suffix (str): File name suffix to lookup
jobs (int): Number of jobs to be created (default: 1)
"""
class Meta:
label = 'parser'
stacked_on = 'base'
stacked_type = 'nested'
description = config.HelpMessage.PARSER_HELP
arguments = config.COMMON_ARGS + [
(['-j', '--jobs'],
dict(help="Number of jobs/processes to be used", type=int)),
(['-l', '--location'],
dict(help="Set location (required)", required=True)),
(['-d', '--depth'],
dict(help="Path location depth", type=int)),
(['-s', '--suffix'],
dict(help="Set file suffix (required)", required=True)),
(['-f', '--format'],
dict(dest="fileformat", help="Files format",
choices=config.PARSER_OUTPUT_CHOICES)),
(['-o', '--output'],
dict(help="Specify output file")),
]
@controller.expose(hide=True, aliases=['run'])
def default(self):
"""Default command"""
if self.app.pargs.location and self.app.pargs.suffix:
self.location = self.app.pargs.location
self.suffix = self.app.pargs.suffix
# How many jobs (workers)?
if self.app.pargs.jobs and self.app.pargs.jobs > 0:
self.jobs = self.app.pargs.jobs
else:
self.jobs = multiprocessing.cpu_count()
# Walk location to which depth?
if self.app.pargs.depth and self.app.pargs.depth > 0:
self.depth = self.app.pargs.depth
else:
self.depth = 1
# Create new concurrent parser instance
concurrent_parser = ConcurrentParser(
self.location, self.suffix,
self.jobs, self.depth)
concurrent_parser.walk_location()
concurrent_parser.run()
# Output results
if (self.ap
|
Dlyma/keras-plus
|
keras/datasets/stock.py
|
Python
|
mit
| 1,315
| 0.008365
|
# -*- coding: utf-8 -*-
import cPickle
import sys, os
import numpy as np
# written by zhaowuxia @ 2015/5/22
# used for generate datasets for the adding problem
def generate_data(pkl_path, T, norm):
dataset = []
for f in os.listdir(pkl_path):
data = cPickle.load(open(os.path.join(pkl_path, f), 'rb'))
if len(data) > T:
data = data[:T+1]
data.reverse()
dataset.append(dat
|
a)
dataset = np.array(dataset) #[sz, T, nfea]
mins = []
maxs = []
if norm == 'minmax':
mins = dataset.min(axis=1, keepdims=True)
maxs = dataset.max(axis=1, keepdims=True) + 1e-4
dataset = (dataset-mins.repeat(T+1, 1)) / np.repeat(maxs - mins, T+1, 1)
dataset[dataset>1] = 0
# add noise [0, 0.01]
dataset
|
+= np.random.random(dataset.shape)/100
X = dataset[:, :T, :]
Y = dataset[:, -1, :]
return (X, Y, mins, maxs)
def load_data(pkl_path, T, path='stock.pkl', norm='minmax'):
data = []
if not os.path.exists(path):
print(path, 'not exists', T)
data = generate_data(pkl_path, T, norm)
cPickle.dump(data, open(path, 'wb'))
else:
print(path, 'exists', T)
data = cPickle.load(open(path, 'rb'))
assert(data[0].shape[1] == T)
return data #(X, Y)
|
robin900/sqlalchemy
|
test/orm/inheritance/test_basic.py
|
Python
|
mit
| 94,636
| 0.005653
|
import warnings
from sqlalchemy.testing import eq_, is_, assert_raises, assert_raises_message
from sqlalchemy import *
from sqlalchemy import exc as sa_exc, util, event
from sqlalchemy.orm import *
from sqlalchemy.orm.util import instance_str
from sqlalchemy.orm import exc as orm_exc, attributes
from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, RegexSQL, Or
from sqlalchemy.sql import table, column
from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy import inspect
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.testing.util import gc_collect
class O2MTest(fixtures.MappedTest)
|
:
"""deals with inheritance and one-to-many relationships"""
@classmethod
def define_tables(cls, metadata):
global foo, bar, blub
foo = Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincr
|
ement=True),
Column('data', String(20)))
bar = Table('bar', metadata,
Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
Column('bar_data', String(20)))
blub = Table('blub', metadata,
Column('id', Integer, ForeignKey('bar.id'), primary_key=True),
Column('foo_id', Integer, ForeignKey('foo.id'), nullable=False),
Column('blub_data', String(20)))
def test_basic(self):
class Foo(object):
def __init__(self, data=None):
self.data = data
def __repr__(self):
return "Foo id %d, data %s" % (self.id, self.data)
mapper(Foo, foo)
class Bar(Foo):
def __repr__(self):
return "Bar id %d, data %s" % (self.id, self.data)
mapper(Bar, bar, inherits=Foo)
class Blub(Bar):
def __repr__(self):
return "Blub id %d, data %s" % (self.id, self.data)
mapper(Blub, blub, inherits=Bar, properties={
'parent_foo':relationship(Foo)
})
sess = create_session()
b1 = Blub("blub #1")
b2 = Blub("blub #2")
f = Foo("foo #1")
sess.add(b1)
sess.add(b2)
sess.add(f)
b1.parent_foo = f
b2.parent_foo = f
sess.flush()
compare = ','.join([repr(b1), repr(b2), repr(b1.parent_foo),
repr(b2.parent_foo)])
sess.expunge_all()
l = sess.query(Blub).all()
result = ','.join([repr(l[0]), repr(l[1]),
repr(l[0].parent_foo), repr(l[1].parent_foo)])
eq_(compare, result)
eq_(l[0].parent_foo.data, 'foo #1')
eq_(l[1].parent_foo.data, 'foo #1')
class PolyExpressionEagerLoad(fixtures.DeclarativeMappedTest):
run_setup_mappers = 'once'
__dialect__ = 'default'
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class A(fixtures.ComparableEntity, Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
discriminator = Column(String(50), nullable=False)
child_id = Column(Integer, ForeignKey('a.id'))
child = relationship('A')
p_a = case([
(discriminator == "a", "a"),
], else_="b")
__mapper_args__ = {
'polymorphic_identity': 'a',
"polymorphic_on": p_a,
}
class B(A):
__mapper_args__ = {
'polymorphic_identity': 'b'
}
@classmethod
def insert_data(cls):
A = cls.classes.A
session = Session(testing.db)
session.add_all([
A(id=1, discriminator='a'),
A(id=2, discriminator='b', child_id=1),
A(id=3, discriminator='c', child_id=1),
])
session.commit()
def test_joinedload(self):
A = self.classes.A
B = self.classes.B
session = Session(testing.db)
result = session.query(A).filter_by(child_id=None).\
options(joinedload('child')).one()
eq_(
result,
A(id=1, discriminator='a', child=[B(id=2), B(id=3)]),
)
class PolymorphicResolutionMultiLevel(fixtures.DeclarativeMappedTest,
testing.AssertsCompiledSQL):
run_setup_mappers = 'once'
__dialect__ = 'default'
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
class B(A):
__tablename__ = 'b'
id = Column(Integer, ForeignKey('a.id'), primary_key=True)
class C(A):
__tablename__ = 'c'
id = Column(Integer, ForeignKey('a.id'), primary_key=True)
class D(B):
__tablename__ = 'd'
id = Column(Integer, ForeignKey('b.id'), primary_key=True)
def test_ordered_b_d(self):
a_mapper = inspect(self.classes.A)
eq_(
a_mapper._mappers_from_spec(
[self.classes.B, self.classes.D], None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
def test_a(self):
a_mapper = inspect(self.classes.A)
eq_(
a_mapper._mappers_from_spec(
[self.classes.A], None),
[a_mapper]
)
def test_b_d_selectable(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D, self.classes.B]
eq_(
a_mapper._mappers_from_spec(
spec,
self.classes.B.__table__.join(self.classes.D.__table__)
),
[inspect(self.classes.B), inspect(self.classes.D)]
)
def test_d_selectable(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D]
eq_(
a_mapper._mappers_from_spec(
spec,
self.classes.B.__table__.join(self.classes.D.__table__)
),
[inspect(self.classes.D)]
)
def test_reverse_d_b(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D, self.classes.B]
eq_(
a_mapper._mappers_from_spec(
spec, None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
mappers, selectable = a_mapper._with_polymorphic_args(spec=spec)
self.assert_compile(selectable,
"a LEFT OUTER JOIN b ON a.id = b.id "
"LEFT OUTER JOIN d ON b.id = d.id")
def test_d_b_missing(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D]
eq_(
a_mapper._mappers_from_spec(
spec, None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
mappers, selectable = a_mapper._with_polymorphic_args(spec=spec)
self.assert_compile(selectable,
"a LEFT OUTER JOIN b ON a.id = b.id "
"LEFT OUTER JOIN d ON b.id = d.id")
def test_d_c_b(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D, self.classes.C, self.classes.B]
ms = a_mapper._mappers_from_spec(spec, None)
eq_(
ms[-1], inspect(self.classes.D)
)
eq_(ms[0], a_mapper)
eq_(
set(ms[1:3]), set(a_mapper._inheriting_mappers)
)
class PolymorphicOnNotLocalTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('x', String(10)),
Column('q', String(10)))
t2 = Table('t2', metadata,
Column('t2id', Integer, primary_key=True,
test_needs_autoincrement=True),
|
public-ink/public-ink
|
server/appengine/lib/graphql_relay/connection/arrayconnection.py
|
Python
|
gpl-3.0
| 4,642
| 0.00237
|
from promise import Promise
from ..utils import base64, unbase64, is_str
from .connectiontypes import Connection, PageInfo, Edge
def connection_from_list(data, args=None, **kwargs):
'''
A simple function that accepts an array and connection arguments, and returns
a connection object for use in GraphQL. It uses array offsets as pagination,
so pagination will only work if the array is static.
'''
_len = len(data)
return connection_from_list_slice(
data,
args,
slice_start=0,
list_length=_len,
list_slice_length=_len,
**kwargs
)
def connection_from_promised_list(data_promise, args=None, **kwargs):
'''
A version of `connectionFromArray` that takes a promised array, and returns a
promised connection.
'''
return data_promise.then(lambda data: connection_from_list(data, args, **kwargs))
def connection_from_list_slice(list_slice, args=None, connection_type=None,
edge_type=None, pageinfo_type=None,
slice_start=0, list_length=0, list_slice_length=None):
'''
Given a slice (subset) of an array, returns a connection object for use in
GraphQL.
This function is similar to `connectionFromArray`, but is intended for use
cases where you know the cardinality of the connection, consider it too large
to materialize the entire array, and instead wish pass in a slice of the
total result large enough to cover the range specified in `args`.
'''
connection_type = connection_type or Connection
edge_type = edge_type or Edge
pageinfo_type = pageinfo_type or PageInfo
args = args or {}
before = args.get('before')
after = args.get('after')
first = args.get('first')
last = args.get('last
|
')
if list_slice_length is None:
list_slice_length = len(list_slice)
slice_end = slice_start + list_slice_length
before_offset =
|
get_offset_with_default(before, list_length)
after_offset = get_offset_with_default(after, -1)
start_offset = max(
slice_start - 1,
after_offset,
-1
) + 1
end_offset = min(
slice_end,
before_offset,
list_length
)
if isinstance(first, int):
end_offset = min(
end_offset,
start_offset + first
)
if isinstance(last, int):
start_offset = max(
start_offset,
end_offset - last
)
# If supplied slice is too large, trim it down before mapping over it.
_slice = list_slice[
max(start_offset - slice_start, 0):
list_slice_length - (slice_end - end_offset)
]
edges = [
edge_type(
node=node,
cursor=offset_to_cursor(start_offset + i)
)
for i, node in enumerate(_slice)
]
first_edge_cursor = edges[0].cursor if edges else None
last_edge_cursor = edges[-1].cursor if edges else None
lower_bound = after_offset + 1 if after else 0
upper_bound = before_offset if before else list_length
return connection_type(
edges=edges,
page_info=pageinfo_type(
start_cursor=first_edge_cursor,
end_cursor=last_edge_cursor,
has_previous_page=isinstance(last, int) and start_offset > lower_bound,
has_next_page=isinstance(first, int) and end_offset < upper_bound
)
)
PREFIX = 'arrayconnection:'
def connection_from_promised_list_slice(data_promise, args=None, **kwargs):
return data_promise.then(lambda data: connection_from_list_slice(data, args, **kwargs))
def offset_to_cursor(offset):
'''
Creates the cursor string from an offset.
'''
return base64(PREFIX + str(offset))
def cursor_to_offset(cursor):
'''
Rederives the offset from the cursor string.
'''
try:
return int(unbase64(cursor)[len(PREFIX):])
except:
return None
def cursor_for_object_in_connection(data, _object):
'''
Return the cursor associated with an object in an array.
'''
if _object not in data:
return None
offset = data.index(_object)
return offset_to_cursor(offset)
def get_offset_with_default(cursor=None, default_offset=0):
'''
Given an optional cursor and a default offset, returns the offset
to use; if the cursor contains a valid offset, that will be used,
otherwise it will be the default.
'''
if not is_str(cursor):
return default_offset
offset = cursor_to_offset(cursor)
try:
return int(offset)
except:
return default_offset
|
shiftcontrol/UnityOpenCV
|
opencv/tests/swig_python/highgui/size_bmp32.py
|
Python
|
gpl-3.0
| 739
| 0.016238
|
#! /usr/bin/env python
"""
This script checks HighGUI's cvGetCaptureProperty functionality for correct return
of the frame width and height of an .avi file containing uncompressed 32bit Bitmap frames.
|
"""
# name if this test and it's requirements
TESTNAME = "size_bmp32"
REQUIRED = []
# needed for sys.exit(int), .works file handling and check routine
import sys
import works
import size_test
# check requirements and delete old flag file, if it exists
if not works.check_files(REQUIRED,TESTNAME):
sys.exit(77)
# name of file we check here
FILENAME='bmp32.avi'
# run check routine
result=size_test.size_ok(FILENAME)
#
|
create flag file for following tests
works.set_file(TESTNAME)
# return result of test routine
sys.exit(result)
|
mancoast/CPythonPyc_test
|
fail/301_test_decimal.py
|
Python
|
gpl-3.0
| 53,155
| 0.006095
|
# Copyright (c) 2004 Python Software Foundation.
# All rights reserved.
# Written by Eric Price <eprice at tjhsst.edu>
# and Facundo Batista <facundo at taniquetil.com.ar>
# and Raymond Hettinger <python at rcn.com>
# and Aahz (aahz at pobox.com)
# and Tim Peters
"""
These are the test cases for the Decimal module.
There are two groups of tests, Arithmetic and Behaviour. The former test
the Decimal arithmetic using the tests provided by Mike Cowlishaw. The latter
test the pythonic behaviour according to PEP 327.
Cowlishaw's tests can be downloaded from:
www2.hursley.ibm.com/decimal/dectest.zip
This test module can be called from command line with one parameter (Arithmetic
or Behaviour) to test each part, or without parameter to test both parts. If
you're working through IDLE, you can import this test module and call test_main()
with the corresponding argument.
"""
import glob
import math
import os, sys
import pickle, copy
import unittest
from decimal import *
import numbers
from test.support import (TestSkipped, run_unittest, run_doctest,
is_resource_enabled)
import random
try:
import threading
except ImportError:
threading = None
# Useful Test Constant
Signals = tuple(getcontext().flags.keys())
# Tests are built around these assumed context defaults.
# test_main() restores the original context.
def init():
global ORIGINAL_CONTEXT
ORIGINAL_CONTEXT = getcontext().copy()
DefaultTestContext = Context(
prec = 9,
rounding = ROUND_HALF_EVEN,
traps = dict.fromkeys(Signals, 0)
)
setcontext(DefaultTestContext)
TESTDATADIR = 'decimaltestdata'
if __name__ == '__main__':
file = sys.argv[0]
else:
file = __file__
testdir = os.path.dirname(file) or os.curdir
directory = testdir + os.sep + TESTDATADIR + os.sep
skip_expected = not os.path.isdir(directory)
# Make sure it actually raises errors when not expected and caught in flags
# Slower, since it runs some things several times.
EXTENDEDERRORTEST = False
#Map the test cases' error names to the actual errors
ErrorNames = {'clamped' : Clamped,
'conversion_syntax' : InvalidOperation,
'division_by_zero' : DivisionByZero,
'division_impossible' : InvalidOperation,
'division_undefined' : InvalidOperation,
'inexact' : Inexact,
'invalid_context' : InvalidOperation,
'invalid_operation' : InvalidOperation,
'overflow' : Overflow,
'rounded' : Rounded,
'subnormal' : Subnormal,
'underflow' : Underflow}
def Nonfunction(*args):
"""Doesn't do anything."""
return None
RoundingDict = {'ceiling' : ROUND_CEILING, #Maps test-case names to roundings.
'down' : ROUND_DOWN,
'floor' : ROUND_FLOOR,
'half_down' : ROUND_HALF_DOWN,
'half_even' : ROUND_HALF_EVEN,
'half_up' : ROUND_HALF_UP,
'up' : ROUND_UP,
'05up' : ROUND_05UP}
# Name adapter to be able to change the Decimal and Context
# interface without changing the test files from Cowlishaw
nameAdapter = {'and':'logical_and',
'apply':'_apply',
'class':'number_class',
'comparesig':'compare_signal',
'comparetotal':'compare_total',
'comparetotmag':'compare_total_mag',
'copy':'copy_decimal',
'copyabs':'copy_abs',
'copynegate':'copy_negate',
'copysign':'copy_sign',
'divideint':'divide_int',
'invert':'logical_invert',
'iscanonical':'is_canonical',
'isfinite':'is_finite',
'isinfinite':'is_infinite',
'isnan':'is_nan',
'isnormal':'is_normal',
'isqnan':'is_qnan',
'issigned':'is_signed',
'issnan':'is_snan',
'issubnormal':'is_subnormal',
'iszero':'is_zero',
'maxmag':'max_mag',
'minmag':'min_mag',
'nextminus':'next_minus',
'nextplus':'next_plus',
'nexttoward':'next_toward',
'or':'logical_or',
'reduce':'normalize',
'remaindernear':'remainder_near',
'samequantum':'same_quantum',
'squareroot':'sqrt',
'toeng':'to_eng_string',
'tointegral':'to_integral_value',
'tointegralx':'to_integral_exact',
'tosci':'to_sci_string',
'xor':'logical_xor',
}
# The following functions return True/False rather than a Decimal instance
LOGICAL_FUNCTIONS = (
'is_canonical',
'is_finite',
'is_infinite',
'is_nan',
'is_normal',
'is_qnan',
'is_signed',
'is_snan',
'is_subnormal',
'is_zero',
'same_quantum',
)
# For some operations (currently exp, ln, log10, power), the decNumber
# reference implementation imposes additional restrictions on the
# context and operands. These restrictions are not part of the
# specification; however, the effect of these restrictions does show
# up in some of the testcases. We skip testcases that violate these
# restrictions, since Decimal behaves differently from decNumber for
# these testcases so these testcases would otherwise fail.
decNumberRestricted = ('power', 'ln', 'log10', 'exp')
DEC_MAX_MATH = 999999
def outside_decNumber_bounds(v, context):
if (context.prec > DEC_MAX_MA
|
TH or
context.Emax > DEC_MAX_MATH or
-context.Emin > DEC_MAX_MATH):
return True
if not v._is_special an
|
d v and (
len(v._int) > DEC_MAX_MATH or
v.adjusted() > DEC_MAX_MATH or
v.adjusted() < 1-2*DEC_MAX_MATH):
return True
return False
class DecimalTest(unittest.TestCase):
"""Class which tests the Decimal class against the test cases.
Changed for unittest.
"""
def setUp(self):
self.context = Context()
self.ignore_list = ['#']
# Basically, a # means return NaN InvalidOperation.
# Different from a sNaN in trim
self.ChangeDict = {'precision' : self.change_precision,
'rounding' : self.change_rounding_method,
'maxexponent' : self.change_max_exponent,
'minexponent' : self.change_min_exponent,
'clamp' : self.change_clamp}
def eval_file(self, file):
global skip_expected
if skip_expected:
raise TestSkipped
return
for line in open(file):
line = line.replace('\r\n', '').replace('\n', '')
#print line
try:
t = self.eval_line(line)
except DecimalException as exception:
#Exception raised where there shoudn't have been one.
self.fail('Exception "'+exception.__class__.__name__ + '" raised on line '+line)
return
def eval_line(self, s):
if s.find(' -> ') >= 0 and s[:2] != '--' and not s.startswith(' --'):
s = (s.split('->')[0] + '->' +
s.split('->')[1].split('--')[0]).strip()
else:
s = s.split('--')[0].strip()
for ignore in self.ignore_list:
if s.find(ignore) >= 0:
#print s.split()[0], 'NotImplemented--', ignore
return
if not s:
return
elif ':' in s:
return self.eval_directive(s)
else:
return self.eval_equation(s)
def eval_directive(self, s):
funct, value = (x.strip().lower() for x in s.split(':'))
if funct == 'rounding':
value = RoundingDict[value]
else:
try:
value = int(value)
except ValueError:
pass
funct = self.ChangeDict.get(funct, Nonfunction)
funct(value)
def eval_equation(self, s):
#global DEFAULT_PRECISION
#print DEFAULT_PRECISION
if not TEST_ALL and rand
|
jpbarrette/moman
|
finenight/python/iadfaTest.py
|
Python
|
mit
| 146
| 0.013699
|
from iadfa import
|
Inc
|
rementalAdfa
f = ["append", "appendice", "bappend"]
fsa = IncrementalAdfa(f, sorted = True)
fsa.graphVizExport("test.dot")
|
johnnyliu27/openmc
|
openmc/plotter.py
|
Python
|
mit
| 38,526
| 0.000182
|
from numbers import Integral, Real
from itertools import chain
import string
import numpy as np
import openmc.checkvalue as cv
import openmc.data
# Supported keywords for continuous-energy cross section plotting
PLOT_TYPES = ['total', 'scatter', 'elastic', 'inelastic', 'fission',
'absorption', 'capture', 'nu-fission', 'nu-scatter', 'unity',
'slowing-down power', 'damage']
# Supported keywoards for multi-group cross section plotting
PLOT_TYPES_MGXS = ['total', 'absorption', 'scatter', 'fission',
'kappa-fission', 'nu-fission', 'prompt-nu-fission',
'deleyed-nu-fission', 'chi', 'chi-prompt', 'chi-delayed',
'inverse-velocity', 'beta', 'decay rate', 'unity']
# Create a dictionary which can be used to convert PLOT_TYPES_MGXS to the
# openmc.XSdata attribute name needed to access the data
_PLOT_MGXS_ATTR = {line: line.replace(' ', '_').replace('-', '_')
for line in PLOT_TYPES_MGXS}
_PLOT_MGXS_ATTR['scatter'] = 'scatter_matrix'
# Special MT values
UNITY_MT = -1
XI_MT = -2
# MTs to combine to generate associated plot_types
_INELASTIC = [mt for mt in openmc.data.SUM_RULES[3] if mt != 27]
PLOT_TYPES_MT = {'total': openmc.data.SUM_RULES[1],
'scatter': [2] + _INELASTIC,
'elastic': [2],
'inelastic': _INELASTIC,
'fission': [18],
'absorption': [27], 'capture': [101],
'nu-fission': [18],
'nu-scatter': [2] + _INELASTIC,
'unity': [UNI
|
TY_MT],
'slowing-down power': [2] + _INELASTIC + [XI_MT],
'damage': [444]}
# Operations to use when combining MTs the first np.add is used in reference
# to zero
PLOT_TYPES_OP = {'total': (np.add,),
'scatter': (np.add,) * (len(PLOT_TYPES_MT['scatter']) - 1),
'elastic': (),
'inelastic': (np.add,) * (len(PLOT_TYPES_MT['inelastic']) - 1),
'fission': (),
|
'absorption': (),
'capture': (), 'nu-fission': (),
'nu-scatter': (np.add,) * (len(PLOT_TYPES_MT['nu-scatter']) - 1),
'unity': (),
'slowing-down power':
(np.add,) * (len(PLOT_TYPES_MT['slowing-down power']) - 2) + (np.multiply,),
'damage': ()}
# Types of plots to plot linearly in y
PLOT_TYPES_LINEAR = {'nu-fission / fission', 'nu-scatter / scatter',
'nu-fission / absorption', 'fission / absorption'}
# Minimum and maximum energies for plotting (units of eV)
_MIN_E = 1.e-5
_MAX_E = 20.e6
def plot_xs(this, types, divisor_types=None, temperature=294., data_type=None,
axis=None, sab_name=None, ce_cross_sections=None,
mg_cross_sections=None, enrichment=None, plot_CE=True, orders=None,
divisor_orders=None, **kwargs):
"""Creates a figure of continuous-energy cross sections for this item.
Parameters
----------
this : str or openmc.Material
Object to source data from
types : Iterable of values of PLOT_TYPES
The type of cross sections to include in the plot.
divisor_types : Iterable of values of PLOT_TYPES, optional
Cross section types which will divide those produced by types
before plotting. A type of 'unity' can be used to effectively not
divide some types.
temperature : float, optional
Temperature in Kelvin to plot. If not specified, a default
temperature of 294K will be plotted. Note that the nearest
temperature in the library for each nuclide will be used as opposed
to using any interpolation.
data_type : {'nuclide', 'element', 'material', 'macroscopic'}, optional
Type of object to plot. If not specified, a guess is made based on the
`this` argument.
axis : matplotlib.axes, optional
A previously generated axis to use for plotting. If not specified,
a new axis and figure will be generated.
sab_name : str, optional
Name of S(a,b) library to apply to MT=2 data when applicable; only used
for items which are instances of openmc.Element or openmc.Nuclide
ce_cross_sections : str, optional
Location of cross_sections.xml file. Default is None.
mg_cross_sections : str, optional
Location of MGXS HDF5 Library file. Default is None.
enrichment : float, optional
Enrichment for U235 in weight percent. For example, input 4.95 for
4.95 weight percent enriched U. Default is None. This is only used for
items which are instances of openmc.Element
plot_CE : bool, optional
Denotes whether or not continuous-energy will be plotted. Defaults to
plotting the continuous-energy data.
orders : Iterable of Integral, optional
The scattering order or delayed group index to use for the
corresponding entry in types. Defaults to the 0th order for scattering
and the total delayed neutron data. This only applies to plots of
multi-group data.
divisor_orders : Iterable of Integral, optional
Same as orders, but for divisor_types
**kwargs
All keyword arguments are passed to
:func:`matplotlib.pyplot.figure`.
Returns
-------
fig : matplotlib.figure.Figure
If axis is None, then a Matplotlib Figure of the generated
cross section will be returned. Otherwise, a value of
None will be returned as the figure and axes have already been
generated.
"""
import matplotlib.pyplot as plt
cv.check_type("plot_CE", plot_CE, bool)
if data_type is None:
if isinstance(this, openmc.Nuclide):
data_type = 'nuclide'
elif isinstance(this, openmc.Element):
data_type = 'element'
elif isinstance(this, openmc.Material):
data_type = 'material'
elif isinstance(this, openmc.Macroscopic):
data_type = 'macroscopic'
elif isinstance(this, str):
if this[-1] in string.digits:
data_type = 'nuclide'
else:
data_type = 'element'
else:
raise TypeError("Invalid type for plotting")
if plot_CE:
# Calculate for the CE cross sections
E, data = calculate_cexs(this, data_type, types, temperature, sab_name,
ce_cross_sections, enrichment)
if divisor_types:
cv.check_length('divisor types', divisor_types, len(types))
Ediv, data_div = calculate_cexs(this, divisor_types, temperature,
sab_name, ce_cross_sections,
enrichment)
# Create a new union grid, interpolate data and data_div on to that
# grid, and then do the actual division
Enum = E[:]
E = np.union1d(Enum, Ediv)
data_new = np.zeros((len(types), len(E)))
for line in range(len(types)):
data_new[line, :] = \
np.divide(np.interp(E, Enum, data[line, :]),
np.interp(E, Ediv, data_div[line, :]))
if divisor_types[line] != 'unity':
types[line] = types[line] + ' / ' + divisor_types[line]
data = data_new
else:
# Calculate for MG cross sections
E, data = calculate_mgxs(this, data_type, types, orders, temperature,
mg_cross_sections, ce_cross_sections,
enrichment)
if divisor_types:
cv.check_length('divisor types', divisor_types, len(types))
Ediv, data_div = calculate_mgxs(this, data_type, divisor_types,
divisor_orders, temperature,
mg_cross_sections,
ce_cross_sections, enrichment)
# Perform the division
for line in range(len(types)):
data[line, :] /= data_div[line, :
|
KiChjang/servo
|
tests/wpt/web-platform-tests/tools/manifest/manifest.py
|
Python
|
mpl-2.0
| 18,236
| 0.001206
|
import io
import os
import sys
from atomicwrites import atomic_write
from copy import deepcopy
from multiprocessing import Pool, cpu_count
from six import ensure_text
from . import jsonlib
from . import vcs
from .item import (ConformanceCheckerTest,
CrashTest,
ManifestItem,
ManualTest,
PrintRefTest,
RefTest,
SupportFile,
TestharnessTest,
VisualTest,
WebDriverSpecTest)
from .log import get_logger
from .sourcefile import SourceFile
from .typedata import TypeData
MYPY = False
if MYPY:
# MYPY is set to True when run under Mypy.
from logging import Logger
from typing import Any
from typing import Container
from typing import Dict
from typing import IO
from typing import Iterator
from typing import Iterable
from typing import Optional
from typing import Set
from typing import Text
from typing import Tuple
from typing import Type
from typing import Union
CURRENT_VERSION = 8 # type: int
class ManifestError(Exception):
pass
class ManifestVersionMismatch(ManifestError):
pass
class InvalidCacheError(Exception):
pass
item_classes = {u"testharness": TestharnessTest,
u"reftest": RefTest,
u"print-reftest": PrintRefTest,
u"crashtest": CrashTest,
u"manual": ManualTest,
u"wdspec": WebDriverSpecTest,
u"conformancechecker": ConformanceCheckerTest,
u"visual": VisualTest,
u"support": SupportFile} # type: Dict[Text, Type[ManifestItem]]
def compute_manifest_items(source_file):
# type: (SourceFile) -> Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]
rel_path_parts = source_file.rel_path_parts
new_type, manifest_items = source_file.manifest_items()
file_hash = source_file.hash
return rel_path_parts, new_type, set(manifest_items), file_hash
if MYPY:
ManifestDataType = Dict[Any, TypeData]
else:
ManifestDataType = dict
class ManifestData(ManifestDataType):
def __init__(self, manifest):
# type: (Manifest) -> None
"""Dictionary subclass containing a TypeData instance for each test type,
keyed by type name"""
self.initialized = False # type: bool
for key, value in item_classes.items():
self[key] = TypeData(manifest, value)
self.initialized = True
self.json_obj = None # type: None
def __setitem__(self, key, value):
# type: (Text, TypeData) -> None
if self.initialized:
raise AttributeError
dict.__setitem__(self, key, value)
def paths(self):
# type: () -> Set[Text]
"""Get a list of all paths containing test items
without actually constructing all the items"""
rv = set() # type: Set[Text]
for item_data in self.values():
for item in item_data:
rv.add(os.path.sep.join(item))
return rv
def type_by_path(self):
# type: () -> Dict[Tuple[Text, ...], Text]
rv = {}
for item_type, item_data in self.items():
for item in item_data:
rv[item] = item_type
return rv
class Manifest(object):
def __init__(self, tests_root, url_base="/"):
# type: (Text, Text) -> None
assert url_base is not None
self._data = ManifestData(self) # type: ManifestData
self.tests_root = tests_root # type: Text
self.url_base = url_base # type: Text
def __iter__(self):
# type: () -> Iterator[Tuple[Text, Text, Set[ManifestItem]]]
return self.itertypes()
def itertypes(self, *types):
# type: (*Text) -> Iterator[Tuple[Text, Text, Set[ManifestItem]]]
for item_type in (types or sorted(self._data.keys())):
for path in self._data[item_type]:
rel_path = os.sep.join(path)
tests = self._data[item_type][path]
yield item_type, rel_path, tests
def iterpath(self, path):
# type: (Text) -> Iterable[ManifestItem]
tpath = tuple(path.split(os.path.sep))
for type_tests in self._data.values():
i = type_tests.get(tpath, set())
assert i is not None
for test in i:
yield test
def iterdir(self, dir_name):
# type: (Text) -> Iterable[ManifestItem]
tpath = tuple(dir_name.split(os.path.sep))
tpath_len = len(tpath)
for type_tests in self._data.values():
for path, tests in type_tests.items():
if path[:tpath_len] == tpath:
for test in tests:
yield test
def update(self, tree, parallel=True):
# type: (Iterable[Tuple[Text, Optional[Text], bool]], bool) -> bool
"""Update the manifest given an iterable of items that make up the updated manifest.
The iterable must either generate tuples of the form (SourceFile, True) for paths
that are to be updated, or (path, False) for items that are not to be updated. This
unusual API is designed as an optimistaion meaning that SourceFile items need not be
constructed in the case we are not updating a path, but the absence of an item from
the iterator may be used to remove defunct entries from the manifest."""
logger = get_logger()
changed = False
# Create local variable references to these dicts so we avoid the
# attribute access in the hot loop below
data = self._data
types = data.type_by_path()
remaining_manifest_paths = set(types)
to_update = []
for path, file_hash, updated in tree:
path_parts = tuple(path.split(os.path.sep))
is_new = path_parts not in remaining_manifest_paths
if not updated and is_new:
# This is kind of a bandaid; if we ended up here the cache
# was invalid but we've been using it anyway. That's obviously
# bad; we should fix the underlying issue that we sometimes
# use an invalid cache. But at least this fixes the immediate
# problem
raise InvalidCacheError
if not updated:
remaining_manifest_paths.remove(path_parts)
else:
|
assert self.tests_root is not None
source_file = SourceFile(self.tests_root,
path,
self.url_base,
file_hash)
hash_changed = False # type: bool
if not is_new:
if file_hash is
|
None:
file_hash = source_file.hash
remaining_manifest_paths.remove(path_parts)
old_type = types[path_parts]
old_hash = data[old_type].hashes[path_parts]
if old_hash != file_hash:
hash_changed = True
del data[old_type][path_parts]
if is_new or hash_changed:
to_update.append(source_file)
if to_update:
logger.debug("Computing manifest update for %s items" % len(to_update))
changed = True
# 25 items was derived experimentally (2020-01) to be approximately the
# point at which it is quicker to create a Pool and parallelize update.
pool = None
if parallel and len(to_update) > 25 and cpu_count() > 1:
# On Python 3 on Windows, using >= MAXIMUM_WAIT_OBJECTS processes
# causes a crash in the multiprocessing module. Whilst this enum
# can technically have any value, it is usually 64. For safety,
# restrict manifest regeneration to 48 processes on Windows.
#
# See https://bugs.python.org/issue26903 and https://bugs.python.org/issue40263
processes = cpu_co
|
joansalasoler/auale
|
src/auale/gui/actors/label.py
|
Python
|
gpl-3.0
| 3,526
| 0
|
# -*- coding: utf-8 -*-
# Aualé oware graphic user interface.
# Copyright (C) 2014-2020 Joan Sala Soler <contact@joansala.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import cairo as Cairo
from gi.repository import Clutter
from gi.rep
|
ository import GObject
from gi.repository import Pango
from gi.repository import PangoCairo
class Label(Clutter.Canvas):
"""Draws a canvas containing some text"""
__gtype_name__ = 'Label'
def __init__(self):
super(Label, self).__init__()
self._markup = ''
self._font = Pango.font_description_from_string('Ubuntu Bold 14')
self._stroke_color = (0.20, 0.20, 0.20, 1.0)
self._shad
|
ow_color = (0.10, 0.10, 0.10, 1.0)
self._text_color = (1.00, 1.00, 1.00, 1.0)
self.connect('draw', self.on_draw_request)
def get_markup(self):
"""Current text to display"""
return self._markup
def set_color(self, red, green, blue, alpha=1.0):
"""Color for the label's text"""
self._text_color = (red, green, blue, alpha)
self.invalidate()
def set_markup(self, markup):
"""Text to display"""
self._markup = markup
self.invalidate()
def set_text_size(self, size):
"""Font size in fractional points"""
self._font.set_size(size * Pango.SCALE)
self.invalidate()
def on_draw_request(self, canvas, context, width, height):
"""Draws the loaded glyph on the canvas"""
line_width = 0.125 * self._font.get_size() / Pango.SCALE
layout = PangoCairo.create_layout(context)
layout.set_alignment(Pango.Alignment.CENTER)
layout.set_ellipsize(Pango.EllipsizeMode.END)
layout.set_font_description(self._font)
layout.set_height(height * Pango.SCALE)
layout.set_width(width * Pango.SCALE)
layout.set_markup(self._markup, -1)
self._setup_context(context)
self._clear_context(context)
context.save()
context.translate(0.0, 2.0)
PangoCairo.layout_path(context, layout)
context.set_source_rgba(*self._shadow_color)
context.fill()
context.restore()
PangoCairo.layout_path(context, layout)
context.set_source_rgba(*self._stroke_color)
context.set_line_width(line_width)
context.stroke_preserve()
context.set_source_rgba(*self._text_color)
PangoCairo.show_layout(context, layout)
def _clear_context(self, context):
"""Clears a drawing context"""
context.save()
context.set_operator(Cairo.Operator.CLEAR)
context.paint()
context.restore()
def _setup_context(self, context):
"""Configure the drawing context"""
context.set_antialias(Cairo.Antialias.BEST)
markup = GObject.Property(setter=set_markup, type=str)
text_size = GObject.Property(setter=set_text_size, type=int)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/parcoords/dimension/_tickvals.py
|
Python
|
mit
| 473
| 0.002114
|
import _plotly_utils.basevalidators
class TickvalsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(
self, pl
|
otly_name="tickvals", parent_name="parcoords.dimension", **kwargs
):
super(TickvalsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "data"),
**kwa
|
rgs
)
|
karlnapf/kameleon-mcmc
|
kameleon_mcmc/experiments/scripts/glass_ard/glass_ard_ground_truth.py
|
Python
|
bsd-2-clause
| 4,033
| 0.011654
|
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
Written (W) 2013 Heiko Strathmann
"""
from kameleon_mcmc.distribution.Gaussian import Gaussian
from kameleon_mcmc.experiments.SingleChainExperiment import SingleChainExperiment
from kameleon_mcmc.gp.GPData import GPData
from kameleon_mcmc.gp.mcmc.PseudoMarginalHyperparameterDistribution import PseudoMarginalHyperparameterDistribution
from kameleon_mcmc.kernel.GaussianKernel import GaussianKernel
from kameleon_mcmc.mcmc.MCMCChain import MCMCChain
from kameleon_mcmc.mcmc.MCMCParams import MCMCParams
from kameleon_mcmc.mcmc.output.PlottingOutput import PlottingOutput
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis
from numpy.lib.twodim_base import eye
from numpy.linalg.linalg import cholesky
from numpy.ma.core import mean, ones, shape, asarray, zeros
from numpy.ma.extras import cov
from numpy.random import permutation, seed
from scipy.linalg.basic import solve_triangular
from kameleon_mcmc.experiments.ClusterTools import ClusterTools
import os
import sys
if __name__ == '__main__':
if len(sys.argv) != 3:
print "usage:", str(sys.argv[0]).split(os.sep)[-1], "<experiment_dir_base> <number_of_experiments>"
|
print "example:"
prin
|
t "python " + str(sys.argv[0]).split(os.sep)[-1] + " /nfs/nhome/live/ucabhst/kameleon_experiments/ 3"
exit()
experiment_dir_base = str(sys.argv[1])
n = int(str(sys.argv[2]))
# loop over parameters here
experiment_dir = experiment_dir_base + str(os.path.abspath(sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
print "running experiments", n, "times at base", experiment_dir
# load data
data,labels=GPData.get_glass_data()
# normalise and whiten dataset
data-=mean(data, 0)
L=cholesky(cov(data.T))
data=solve_triangular(L, data.T, lower=True).T
dim=shape(data)[1]
# prior on theta and posterior target estimate
theta_prior=Gaussian(mu=0*ones(dim), Sigma=eye(dim)*5)
distribution=PseudoMarginalHyperparameterDistribution(data, labels, \
n_importance=100, prior=theta_prior, \
ridge=1e-3)
sigma = 23.0
print "using sigma", sigma
kernel = GaussianKernel(sigma=sigma)
for i in range(n):
mcmc_samplers = []
burnin=50000
num_iterations=500000
#mcmc_samplers.append(KameleonWindowLearnScale(distribution, kernel, stop_adapt=burnin))
#mean_est = zeros(distribution.dimension, dtype="float64")
#cov_est = 1.0 * eye(distribution.dimension)
#cov_est[0, 0] = distribution.V
#mcmc_samplers.append(AdaptiveMetropolisLearnScale(distribution, mean_est=mean_est, cov_est=cov_est))
#mcmc_samplers.append(AdaptiveMetropolis(distribution, mean_est=mean_est, cov_est=cov_est))
mcmc_samplers.append(StandardMetropolis(distribution))
start = zeros(distribution.dimension, dtype="float64")
mcmc_params = MCMCParams(start=start, num_iterations=num_iterations, burnin=burnin)
mcmc_chains = [MCMCChain(mcmc_sampler, mcmc_params) for mcmc_sampler in mcmc_samplers]
for mcmc_chain in mcmc_chains:
mcmc_chain.append_mcmc_output(StatisticsOutput())
experiments = [SingleChainExperiment(mcmc_chain, experiment_dir) for mcmc_chain in mcmc_chains]
for experiment in experiments:
ClusterTools.submit_experiment(experiment)
|
nkgilley/home-assistant
|
homeassistant/components/homekit/type_thermostats.py
|
Python
|
apache-2.0
| 25,247
| 0.00103
|
"""Class to hold all thermostat accessories."""
import logging
from pyhap.const import CATEGORY_THERMOSTAT
from homeassistant.components.climate.const import (
ATTR_CURRENT_HUMIDITY,
ATTR_CURRENT_TEMPERATURE,
ATTR_HUMIDITY,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODE,
ATTR_HVAC_MODES,
ATTR_MAX_TEMP,
ATTR_MIN_HUMIDITY,
ATTR_MIN_TEMP,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_DRY,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
DEFAULT_MAX_TEMP,
DEFAULT_MIN_HUMIDITY,
DEFAULT_MIN_TEMP,
DOMAIN as DOMAIN_CLIMATE,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
SERVICE_SET_HUMIDITY,
SERVICE_SET_HVAC_MODE as SERVICE_SET_HVAC_MODE_THERMOSTAT,
SERVICE_SET_TEMPERATURE as SERVICE_SET_TEMPERATURE_THERMOSTAT,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.components.water_heater import (
DOMAIN as DOMAIN_WATER_HEATER,
SERVICE_SET_TEMPERATURE as SERVICE_SET_TEMPERATURE_WATER_HEATER,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
ATTR_TEMPERATURE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
UNIT_PERCENTAGE,
)
from homeassistant.core import callback
from .accessories import TYPES, HomeAccessory
from .const import (
CHAR_COOLING_THRESHOLD_TEMPERATURE,
CHAR_CURRENT_HEATING_COOLING,
CHAR_CURRENT_HUMIDITY,
CHAR_CURRENT_TEMPERATURE,
CHAR_HEATING_THRESHOLD_TEMPERATURE,
CHAR_TARGET_HEATING_COOLING,
CHAR_TARGET_HUMIDITY,
CHAR_TARGET_T
|
EMPERATURE,
CHAR_TEMP_DISPLAY_UNITS,
DEFAULT_
|
MAX_TEMP_WATER_HEATER,
DEFAULT_MIN_TEMP_WATER_HEATER,
PROP_MAX_VALUE,
PROP_MIN_VALUE,
SERV_THERMOSTAT,
)
from .util import temperature_to_homekit, temperature_to_states
_LOGGER = logging.getLogger(__name__)
HC_HOMEKIT_VALID_MODES_WATER_HEATER = {"Heat": 1}
UNIT_HASS_TO_HOMEKIT = {TEMP_CELSIUS: 0, TEMP_FAHRENHEIT: 1}
HC_HEAT_COOL_OFF = 0
HC_HEAT_COOL_HEAT = 1
HC_HEAT_COOL_COOL = 2
HC_HEAT_COOL_AUTO = 3
HC_MIN_TEMP = 10
HC_MAX_TEMP = 38
UNIT_HOMEKIT_TO_HASS = {c: s for s, c in UNIT_HASS_TO_HOMEKIT.items()}
HC_HASS_TO_HOMEKIT = {
HVAC_MODE_OFF: HC_HEAT_COOL_OFF,
HVAC_MODE_HEAT: HC_HEAT_COOL_HEAT,
HVAC_MODE_COOL: HC_HEAT_COOL_COOL,
HVAC_MODE_AUTO: HC_HEAT_COOL_AUTO,
HVAC_MODE_HEAT_COOL: HC_HEAT_COOL_AUTO,
HVAC_MODE_DRY: HC_HEAT_COOL_COOL,
HVAC_MODE_FAN_ONLY: HC_HEAT_COOL_COOL,
}
HC_HOMEKIT_TO_HASS = {c: s for s, c in HC_HASS_TO_HOMEKIT.items()}
HC_HASS_TO_HOMEKIT_ACTION = {
CURRENT_HVAC_OFF: HC_HEAT_COOL_OFF,
CURRENT_HVAC_IDLE: HC_HEAT_COOL_OFF,
CURRENT_HVAC_HEAT: HC_HEAT_COOL_HEAT,
CURRENT_HVAC_COOL: HC_HEAT_COOL_COOL,
CURRENT_HVAC_DRY: HC_HEAT_COOL_COOL,
CURRENT_HVAC_FAN: HC_HEAT_COOL_COOL,
}
HEAT_COOL_DEADBAND = 5
@TYPES.register("Thermostat")
class Thermostat(HomeAccessory):
"""Generate a Thermostat accessory for a climate."""
def __init__(self, *args):
"""Initialize a Thermostat accessory object."""
super().__init__(*args, category=CATEGORY_THERMOSTAT)
self._unit = self.hass.config.units.temperature_unit
self._state_updates = 0
self.hc_homekit_to_hass = None
self.hc_hass_to_homekit = None
min_temp, max_temp = self.get_temperature_range()
# Homekit only supports 10-38, overwriting
# the max to appears to work, but less than 0 causes
# a crash on the home app
hc_min_temp = max(min_temp, 0)
hc_max_temp = max_temp
min_humidity = self.hass.states.get(self.entity_id).attributes.get(
ATTR_MIN_HUMIDITY, DEFAULT_MIN_HUMIDITY
)
# Add additional characteristics if auto mode is supported
self.chars = []
state = self.hass.states.get(self.entity_id)
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if features & SUPPORT_TARGET_TEMPERATURE_RANGE:
self.chars.extend(
(CHAR_COOLING_THRESHOLD_TEMPERATURE, CHAR_HEATING_THRESHOLD_TEMPERATURE)
)
if features & SUPPORT_TARGET_HUMIDITY:
self.chars.extend((CHAR_TARGET_HUMIDITY, CHAR_CURRENT_HUMIDITY))
serv_thermostat = self.add_preload_service(SERV_THERMOSTAT, self.chars)
# Current mode characteristics
self.char_current_heat_cool = serv_thermostat.configure_char(
CHAR_CURRENT_HEATING_COOLING, value=0
)
self._configure_hvac_modes(state)
# Must set the value first as setting
# valid_values happens before setting
# the value and if 0 is not a valid
# value this will throw
self.char_target_heat_cool = serv_thermostat.configure_char(
CHAR_TARGET_HEATING_COOLING, value=list(self.hc_homekit_to_hass)[0]
)
self.char_target_heat_cool.override_properties(
valid_values=self.hc_hass_to_homekit
)
# Current and target temperature characteristics
self.char_current_temp = serv_thermostat.configure_char(
CHAR_CURRENT_TEMPERATURE, value=21.0
)
self.char_target_temp = serv_thermostat.configure_char(
CHAR_TARGET_TEMPERATURE,
value=21.0,
# We do not set PROP_MIN_STEP here and instead use the HomeKit
# default of 0.1 in order to have enough precision to convert
# temperature units and avoid setting to 73F will result in 74F
properties={PROP_MIN_VALUE: hc_min_temp, PROP_MAX_VALUE: hc_max_temp},
)
# Display units characteristic
self.char_display_units = serv_thermostat.configure_char(
CHAR_TEMP_DISPLAY_UNITS, value=0
)
# If the device supports it: high and low temperature characteristics
self.char_cooling_thresh_temp = None
self.char_heating_thresh_temp = None
if CHAR_COOLING_THRESHOLD_TEMPERATURE in self.chars:
self.char_cooling_thresh_temp = serv_thermostat.configure_char(
CHAR_COOLING_THRESHOLD_TEMPERATURE,
value=23.0,
# We do not set PROP_MIN_STEP here and instead use the HomeKit
# default of 0.1 in order to have enough precision to convert
# temperature units and avoid setting to 73F will result in 74F
properties={PROP_MIN_VALUE: hc_min_temp, PROP_MAX_VALUE: hc_max_temp},
)
if CHAR_HEATING_THRESHOLD_TEMPERATURE in self.chars:
self.char_heating_thresh_temp = serv_thermostat.configure_char(
CHAR_HEATING_THRESHOLD_TEMPERATURE,
value=19.0,
# We do not set PROP_MIN_STEP here and instead use the HomeKit
# default of 0.1 in order to have enough precision to convert
# temperature units and avoid setting to 73F will result in 74F
properties={PROP_MIN_VALUE: hc_min_temp, PROP_MAX_VALUE: hc_max_temp},
)
self.char_target_humidity = None
self.char_current_humidity = None
if CHAR_TARGET_HUMIDITY in self.chars:
self.char_target_humidity = serv_thermostat.configure_char(
CHAR_TARGET_HUMIDITY,
value=50,
# We do not set a max humidity because
# homekit currently has a bug that will show the lower bound
# shifted upwards. For example if you have a max humidity
# of 80% homekit will give you the options 20%-100% instead
# of 0-80%
properties={PROP_MIN_VALUE: min_humidity},
)
self.char_current_humidity = serv_thermostat.configure_char(
CHAR_CURRENT_HUMIDITY, value=50
)
self._async_update_state(state)
serv_thermostat.setter_callback = self._set_chars
def _temperature_to_homekit(self, temp):
return temperature_to_homekit(temp, self._unit)
def _tem
|
KNCT-KPC/RapidHouse
|
rapidhouse/lib/ui.py
|
Python
|
mit
| 3,916
| 0.035291
|
# -*- coding: utf-8 -*-
from ..algorithm import ga
import parameter
import tune
import sys
import os
import signal
import time
class Ui(object):
"""
This class handle both the CUI and the GUI.
:param rc: a instance of the `rapidconfig.RapidConfig`.
"""
def __init__(self, rc):
self.tuner = None
self.seq = None
self.config = None
self.rc = rc
self.fp = None if rc.log_file is None else open(rc.log_file, "a")
def get_input(self, msg, test=None):
"""
Get a input from a user.
:param str msg: a outputted message on the console.
:param test: for a unit-test. Unless the `test` is the `None`, then the return-value is the `test`.
:return: the input-value from the user.
"""
return test if test is not None else raw_input("%s: " % msg)
def get_boolean(self, msg, test=None):
"""
Get a boolean input from a user.
:param str msg: a outputted message on the console.
:param test: for a unit-test. Unless the `test` is the `None`, then the return-value is the `test`.
:return: the boolean input-value from the user.
"""
if test is not None:
return test
tmp = self.get_input(msg + "[y/N]")
return tmp in ["y", "yes"]
def notice_score(self, score, pop):
"""
Call self.__notice(), and return a score.
:param score: the s
|
core.
:param pop: a population that having the score.
:return: the score.
"""
self.__notice("Score", score, pop)
return score
def notice_best(self, best_score, pop):
"""
Call self.__notice()
|
, and return a best score.
:param best_score: the best score.
:param pop: a population that having the best score.
:return: the best score.
"""
self.__notice("Best", best_score, pop)
return best_score
def notice_debug(self, type, contents):
"""
Give notice for debugging.
:param str type: a identifier.
:param dictionary contents: a result of a executed command.
"""
if self.rc.log_level != "debug":
return
for s in ['stdout', 'stderr']:
if contents[s] is None:
continue
for l in contents[s].splitlines():
self.__print(">[%s:%s]> %s%s" % (type, s, l, os.linesep))
def debug_print(self, string):
"""
Call self.__print() for debugging.
:param str string: the string.
"""
if self.rc.log_level != "debug":
return
self.__print(string)
def __notice(self, type, score, pop):
"""
Give notice of both a score and a population.
:param str type: a identifier.
:param score: the score.
:param pop: a population that having the best score.
"""
tmp = ''
for (n, p) in zip(self.seq, pop):
tmp += "%s=%s," % (self.config.get_param_name(n), p)
tmp = tmp[0:-1]
self.__print(">[%s:%s]> %s %s%s" % ('score', 'normal' if type == 'Score' else 'best', score, tmp, os.linesep))
if self.fp is None:
return
if type == 'Best':
print '---'
print '%s %s %s' % (type, score, tmp)
if type == 'Best':
print '---'
def __print(self, string):
"""
Print a string to either the `STDOUT` or a log-file.
:param str string: the string.
"""
t = time.strftime('%Y/%m/%d %H:%M:%S')
f = sys.stdout if self.fp is None else self.fp
f.write("<%s%s" % (t, string))
f.flush()
def manager(self):
"""
Manage a tuning.
"""
self.config = parameter.Parameter(self.rc.input, self.rc.output)
self.seq = self.config.get_param_list()
# 範囲
ranges = [self.config.get_param_range(n) for n in self.seq]
# アルゴリズムの準備
group = ([], 10)
if self.rc.alg_type == "GA":
algorithm = ga.GaSuneko(ranges)
else:
raise ValueError("Invalid Algorithm type: %s" % self.rc.alg_type)
# 開始していく
signal.signal(signal.SIGINT, self.stop)
self.tuner = tune.Tune(self, algorithm, self.config, self.rc, group)
self.tuner.run()
def stop(self, *args):
"""
Stop a tuning.
"""
sys.stderr.write('SIGINT%s' % os.linesep)
self.tuner.write()
if self.fp is not None:
self.fp.close()
exit()
|
jerryz1982/neutron
|
neutron/plugins/openvswitch/agent/openflow/ovs_ofctl/br_tun.py
|
Python
|
apache-2.0
| 10,433
| 0
|
# Copyright (C) 2014,2015 VA Linux Systems Japan K.K.
# Copyright (C) 2014,2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
#
|
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import netaddr
from neutron.agent.common import ovs_lib
from neutron.plugins.openvswitch.agent.openflow.ovs_ofctl import br_dvr_process
from neutron.plugins.openvswitch.ag
|
ent.openflow.ovs_ofctl import ovs_bridge
from neutron.plugins.openvswitch.common import constants
class OVSTunnelBridge(ovs_bridge.OVSAgentBridge,
br_dvr_process.OVSDVRProcessMixin):
"""openvswitch agent tunnel bridge specific logic."""
# Used by OVSDVRProcessMixin
dvr_process_table_id = constants.DVR_PROCESS
dvr_process_next_table_id = constants.PATCH_LV_TO_TUN
def setup_default_table(self, patch_int_ofport, arp_responder_enabled):
# Table 0 (default) will sort incoming traffic depending on in_port
self.add_flow(priority=1,
in_port=patch_int_ofport,
actions="resubmit(,%s)" %
constants.PATCH_LV_TO_TUN)
self.add_flow(priority=0, actions="drop")
if arp_responder_enabled:
# ARP broadcast-ed request go to the local ARP_RESPONDER table to
# be locally resolved
# REVISIT(yamamoto): arp_op=arp.ARP_REQUEST
self.add_flow(table=constants.PATCH_LV_TO_TUN,
priority=1,
proto='arp',
dl_dst="ff:ff:ff:ff:ff:ff",
actions=("resubmit(,%s)" %
constants.ARP_RESPONDER))
# PATCH_LV_TO_TUN table will handle packets coming from patch_int
# unicasts go to table UCAST_TO_TUN where remote addresses are learnt
self.add_flow(table=constants.PATCH_LV_TO_TUN,
priority=0,
dl_dst="00:00:00:00:00:00/01:00:00:00:00:00",
actions="resubmit(,%s)" % constants.UCAST_TO_TUN)
# Broadcasts/multicasts go to table FLOOD_TO_TUN that handles flooding
self.add_flow(table=constants.PATCH_LV_TO_TUN,
priority=0,
dl_dst="01:00:00:00:00:00/01:00:00:00:00:00",
actions="resubmit(,%s)" % constants.FLOOD_TO_TUN)
# Tables [tunnel_type]_TUN_TO_LV will set lvid depending on tun_id
# for each tunnel type, and resubmit to table LEARN_FROM_TUN where
# remote mac addresses will be learnt
for tunnel_type in constants.TUNNEL_NETWORK_TYPES:
self.add_flow(table=constants.TUN_TABLE[tunnel_type],
priority=0,
actions="drop")
# LEARN_FROM_TUN table will have a single flow using a learn action to
# dynamically set-up flows in UCAST_TO_TUN corresponding to remote mac
# addresses (assumes that lvid has already been set by a previous flow)
learned_flow = ("table=%s,"
"priority=1,"
"hard_timeout=300,"
"NXM_OF_VLAN_TCI[0..11],"
"NXM_OF_ETH_DST[]=NXM_OF_ETH_SRC[],"
"load:0->NXM_OF_VLAN_TCI[],"
"load:NXM_NX_TUN_ID[]->NXM_NX_TUN_ID[],"
"output:NXM_OF_IN_PORT[]" %
constants.UCAST_TO_TUN)
# Once remote mac addresses are learnt, output packet to patch_int
self.add_flow(table=constants.LEARN_FROM_TUN,
priority=1,
actions="learn(%s),output:%s" %
(learned_flow, patch_int_ofport))
# Egress unicast will be handled in table UCAST_TO_TUN, where remote
# mac addresses will be learned. For now, just add a default flow that
# will resubmit unknown unicasts to table FLOOD_TO_TUN to treat them
# as broadcasts/multicasts
self.add_flow(table=constants.UCAST_TO_TUN,
priority=0,
actions="resubmit(,%s)" %
constants.FLOOD_TO_TUN)
if arp_responder_enabled:
# If none of the ARP entries correspond to the requested IP, the
# broadcast-ed packet is resubmitted to the flooding table
self.add_flow(table=constants.ARP_RESPONDER,
priority=0,
actions="resubmit(,%s)" %
constants.FLOOD_TO_TUN)
# FLOOD_TO_TUN will handle flooding in tunnels based on lvid,
# for now, add a default drop action
self.install_drop(table_id=constants.FLOOD_TO_TUN)
def provision_local_vlan(self, network_type, lvid, segmentation_id,
distributed=False):
if distributed:
table_id = constants.DVR_NOT_LEARN
else:
table_id = constants.LEARN_FROM_TUN
self.add_flow(table=constants.TUN_TABLE[network_type],
priority=1,
tun_id=segmentation_id,
actions="mod_vlan_vid:%s,"
"resubmit(,%s)" %
(lvid, table_id))
def reclaim_local_vlan(self, network_type, segmentation_id):
self.delete_flows(table=constants.TUN_TABLE[network_type],
tun_id=segmentation_id)
@staticmethod
def _ofport_set_to_str(ports_set):
return ",".join(map(str, ports_set))
def install_flood_to_tun(self, vlan, tun_id, ports, deferred_br=None):
br = deferred_br if deferred_br else self
br.mod_flow(table=constants.FLOOD_TO_TUN,
dl_vlan=vlan,
actions="strip_vlan,set_tunnel:%s,output:%s" %
(tun_id, self._ofport_set_to_str(ports)))
def delete_flood_to_tun(self, vlan, deferred_br=None):
br = deferred_br if deferred_br else self
br.delete_flows(table=constants.FLOOD_TO_TUN, dl_vlan=vlan)
def install_unicast_to_tun(self, vlan, tun_id, port, mac,
deferred_br=None):
br = deferred_br if deferred_br else self
br.add_flow(table=constants.UCAST_TO_TUN,
priority=2,
dl_vlan=vlan,
dl_dst=mac,
actions="strip_vlan,set_tunnel:%s,output:%s" %
(tun_id, port))
def delete_unicast_to_tun(self, vlan, mac, deferred_br=None):
br = deferred_br if deferred_br else self
if mac is None:
br.delete_flows(table=constants.UCAST_TO_TUN,
dl_vlan=vlan)
else:
br.delete_flows(table=constants.UCAST_TO_TUN,
dl_vlan=vlan,
dl_dst=mac)
def install_arp_responder(self, vlan, ip, mac, deferred_br=None):
br = deferred_br if deferred_br else self
actions = constants.
|
Bernardinhouessou/Projets_Autres
|
Python-Projets/Scripts/Algorithms-master/dp/bellman_ford.py
|
Python
|
mit
| 1,115
| 0.020628
|
""" The bellman ford algorithm for calculating single source shortest
paths - CLRS style """
graph = {
's' : {'t':6, 'y':7},
't' : {'x':5, 'z':-4, 'y':8 },
'y' : {'z':9, 'x':-3},
'z' : {'x':7, 's': 2},
'x' : {'t':-2}
}
INF = float('inf')
dist = {}
predecessor = {}
def initialize_single_source(graph, s):
for v in graph:
dist[v] = INF
predecessor[v] = None
dist[s] = 0
def relax(graph, u, v):
if dist[v] > dist[u] + graph[u][v]:
dist[v] = dist[u] + graph[u][v]
predecessor[v] = u
def bellman_ford(graph, s):
initialize_single_source(graph, s)
edges = [(u, v) for u in graph for v in graph[u].keys()]
number_vertices = len(graph)
for i in range(number_vertices-1):
for (u, v) in edges:
relax(graph, u, v)
for (u, v) in edges:
if dist[v] > dist[u] + graph[u][v]:
return False # there exists a negative cycle
return True
def get_distances(graph
|
, s):
if bellman_ford(graph, s):
return dist
return "Graph contains a negative cycle"
print get_distan
|
ces(graph, 's')
|
USC-ACTLab/pyCreate2
|
pyCreate2/__init__.py
|
Python
|
mit
| 96
| 0
|
from .create2 import *
|
from .factory import *
__all__ = ["FactoryCreate", "FactorySim
|
ulation"]
|
platformio/platformio
|
platformio/commands/org.py
|
Python
|
apache-2.0
| 4,758
| 0.001471
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softw
|
are
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import click
from tabulate import tabulate
from platformio.clients.account import AccountClient
from platformio.commands.account import validate_email, validate_username
|
@click.group("org", short_help="Manage Organizations")
def cli():
pass
def validate_orgname(value):
return validate_username(value, "Organization name")
@cli.command("create", short_help="Create a new organization")
@click.argument(
"orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option(
"--email", callback=lambda _, __, value: validate_email(value) if value else value
)
@click.option("--displayname",)
def org_create(orgname, email, displayname):
client = AccountClient()
client.create_org(orgname, email, displayname)
return click.secho(
"The organization %s has been successfully created." % orgname, fg="green",
)
@cli.command("list", short_help="List organizations")
@click.option("--json-output", is_flag=True)
def org_list(json_output):
client = AccountClient()
orgs = client.list_orgs()
if json_output:
return click.echo(json.dumps(orgs))
if not orgs:
return click.echo("You do not have any organizations")
for org in orgs:
click.echo()
click.secho(org.get("orgname"), fg="cyan")
click.echo("-" * len(org.get("orgname")))
data = []
if org.get("displayname"):
data.append(("Display Name:", org.get("displayname")))
if org.get("email"):
data.append(("Email:", org.get("email")))
data.append(
(
"Owners:",
", ".join((owner.get("username") for owner in org.get("owners"))),
)
)
click.echo(tabulate(data, tablefmt="plain"))
return click.echo()
@cli.command("update", short_help="Update organization")
@click.argument("orgname")
@click.option(
"--new-orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option("--email")
@click.option("--displayname",)
def org_update(orgname, **kwargs):
client = AccountClient()
org = client.get_org(orgname)
del org["owners"]
new_org = org.copy()
if not any(kwargs.values()):
for field in org:
new_org[field] = click.prompt(
field.replace("_", " ").capitalize(), default=org[field]
)
if field == "email":
validate_email(new_org[field])
if field == "orgname":
validate_orgname(new_org[field])
else:
new_org.update(
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
)
client.update_org(orgname, new_org)
return click.secho(
"The organization %s has been successfully updated." % orgname, fg="green",
)
@cli.command("destroy", short_help="Destroy organization")
@click.argument("orgname")
def account_destroy(orgname):
client = AccountClient()
click.confirm(
"Are you sure you want to delete the %s organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
return click.secho("Organization %s has been destroyed." % orgname, fg="green",)
@cli.command("add", short_help="Add a new owner to organization")
@click.argument("orgname",)
@click.argument("username",)
def org_add_owner(orgname, username):
client = AccountClient()
client.add_org_owner(orgname, username)
return click.secho(
"The new owner %s has been successfully added to the %s organization."
% (username, orgname),
fg="green",
)
@cli.command("remove", short_help="Remove an owner from organization")
@click.argument("orgname",)
@click.argument("username",)
def org_remove_owner(orgname, username):
client = AccountClient()
client.remove_org_owner(orgname, username)
return click.secho(
"The %s owner has been successfully removed from the %s organization."
% (username, orgname),
fg="green",
)
|
isandlaTech/cohorte-devtools
|
qualifier/deploy/cohorte-home/repo/cohorte/composer/node/criteria/distance/history.py
|
Python
|
apache-2.0
| 7,358
| 0.000136
|
#!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
Gathers components according to their history.
This algorithm is a test: it can be a memory hog
:author: Thomas Calmant
:license: Apache Software License 2.0
:version: 3.0.0
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Standard library
import logging
import operator
# iPOPO Decorators
from pelix.ipopo.decorators import ComponentFactory, Provides, Instantiate, \
Invalidate, Validate, Requires
# Composer
import cohorte.composer
# ------------------------------------------------------------------------------
# Bundle version
import cohorte.version
__version__=cohorte.version.__version__
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
@ComponentFactory()
@Provides(cohorte.composer.SERVICE_NODE_CRITERION_DISTANCE)
@Requires('_status', cohorte.composer.SERVICE_STATUS_NODE)
@Instantiate('cohorte-composer-node-criterion-compatibility')
class HistoryCriterion(object):
"""
Gathers components which never crashed when they were in the same isolate
"""
def __init__(self):
"""
Sets up members
"""
# A set of tuples: each tuple contains the components which were in an
# isolate that crashed
self._crashes = set()
# Injected
self._status = None
def __str__(self):
"""
String representation
"""
return "Components gathering based on history"
@Validate
def validate(self, context):
"""
Component validated
"""
# TODO: load previous crashes from a file/db...
self._crashes.clear()
@Invalidate
def invalidate(self, context):
"""
Component invalidated
"""
# TODO: store crashes to a file/db...
self._crashes.clear()
def handle_event(self, event):
"""
Does nothing: this elector only cares about what is written in
configuration files
"""
# Get the implicated components
components = set(component.name for component in event.components)
if event.kind == 'isolate.lost':
self.on_crash(components)
def on_crash(self, components):
"""
An isolate has been lost
:param components: Names of the components in the crashed isolate
"""
if len(components) == 1:
# Do not forbid a group of 1 component
# (or it would never be instantiated again)
return
# Consolidate history
crash = frozenset(components)
to_remove = []
for old_crash in self._crashes:
if crash.issubset(old_crash):
to_remove.append(old_crash)
for old_crash in to_remove:
self._crashes.remove(old_crash)
# Store the isolate composition at the time of crash
self._crashes.add(tuple(sorted(components)))
_logger.info("%d crash(es) in history:\n%s", len(self._crashes),
'\n'.join('- ' + ', '.join(crash)
for crash in self._crashes))
def vote(self, candidates, subject, ballot):
"""
Votes for the isolate(s) with the minimal compatibility distance
:param candidates: Isolates to vote for
:param subject: The component to place
:param ballot: The vote ballot
"""
# Subject component name
component_name = subject.name
# Preference for candidate: (number of components, candidate)
preference = []
# Neutral isolate (last resort)
neutral_candidate = None
# Prepare a dictionary: candidate -> components
all_components = {}
for candidate in candidates:
components = sorted(component.name
for component in candidate.components)
if not components and not candidate.name:
# Found the neutral isolate (do not add it to 'all_components')
neutral_candidate = candidate
else:
if component_name in components:
# Found the isolate where the isolate already is
components.remove(component_name)
# Store information
all_components[candidate] = components
# Sort candidates by number of components already there
sorted_candidates = [(len(content), candidate)
for candidate, content in all_components.items()]
sorted_candidates.sort(key=lambda x: (-x[0], x[1].name))
# Compute candidate preference (empty or OK)
for _, candidate in sorted_candidates:
# Analyze each candidate
components = all_components[candidate]
if not components:
# No components, we're OK with it
preference.append((0, candidate))
else:
# Ensure that the content of this isolate won't be a known
# crashing solution
future_content = set(components)
future_content.add(component_name)
for crash in self._crashes:
if future_content.issuperset(crash):
# Solution is (a superset of) a crashing solution
_logger.info(
"Known bad solution for %s:\n%s\ndue to:\n%s",
component_name,
', '.join(name for name in sorted(future_content)),
', '.join(name for nam
|
e in sorted(crash)))
ballot.append_against(candidate)
break
else:
# Not a crashing solution
preference.append((len(components), candidate))
# TODO: tweak vote preferences to reduce the number of moves
if preference:
# Sort results (greater is better: it gathers components)
preference.sort(key=operator.it
|
emgetter(0), reverse=True)
_logger.info("Vote preference for %s: %s",
component_name, ', '.join(item[1].name or "Neutral"
for item in preference))
# Vote
for _, candidate in preference:
ballot.append_for(candidate)
elif neutral_candidate is not None:
# We voted for no one: vote for neutral
_logger.info("Using neutral candidate for %s", component_name)
ballot.append_for(neutral_candidate)
# Lock our vote
ballot.lock()
|
PMEAL/OpenPNM
|
tests/unit/algorithms/TransientAdvectionDiffusionTest.py
|
Python
|
mit
| 2,595
| 0.001156
|
import openpnm as op
from numpy.testing import assert_allclose
class TransientAdvectionDiffusionTest:
def setup_class(self):
self.net = op.network.Cubic(shape=[4, 3, 1], spacing=1.0)
self.geo = op.geometry.GenericGeometry(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = op.phase.GenericPhase(network=self.net)
self.phys = op.physics.GenericPhysics(network=self.net,
phase=self.phase,
geometry=self.geo)
self.phys['throat.diffusive_conductance'] = 1e-15
self.phys['throat.hydraulic_conductance'] = 1e-15
self.geo['pore.volume'] = 1e-14
self.sf = op.algorithms.StokesFlow(network=self.net, phase=self.phase)
self.sf.settings._update({'quantity': 'pore.pressure',
'conductance': 'throat.hydraulic_conductance'})
self.sf.set_value_BC(pores=self.net.pores('right'), values=1)
self.sf.set_value_BC(pores=self.net.pores('left'), values=0)
self.sf.run()
self.phase[self.sf.settings['quantity']] = self.sf.x
mod = op.models.physics.ad_dif_conductance.ad_dif
self.phys.add_model(propname='throat.ad_dif_conductance', model=mod,
s_scheme='powerlaw')
self.phys.regenerate_models()
def test_transient_advection_diffusion(self):
ad = op.algorithms.TransientAdvectionDiffusion(network=self.net,
phase=self.phase)
ad.settings._update({
'quantity': 'pore.concentration',
'conductance':'throat.ad_dif_conductance',
'diffusive_conductance': 'throat.diffusive_conductance',
'hydraulic_conductance': 'throat.hydraulic_conductance',
'pressure': 'pore.pressure'
})
ad.set_value_BC(pores=self.net.pores('right'), values=2)
ad.set_value_BC(pores=self.net.pores('left'), values=0)
ad.run(x0=0, tspan=(0, 1))
desired = 0.55642
actual = ad.x.mean()
assert_allclose(actual, desired, rtol=1e-5)
def teardown_class(self):
ws = op.Workspace
|
()
ws.clear()
if __name__ == '__main__':
t = TransientAdvectionDif
|
fusionTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print(f'Running test: {item}')
t.__getattribute__(item)()
|
shrimo/node_image_tools
|
node_core.py
|
Python
|
gpl-3.0
| 2,317
| 0.025896
|
# Node image tools (core)
# Copyright 2013 Victor Lavrentev
import json, sys
from node_lib import *
from PIL import Image, ImageDraw, ImageFilter
import numpy
print '\nNode image tools (core) v01a\n'
# Read node file *.json
try:
file_node=sys.argv[1]
except:
print '->Error. No script'
sys.exit (0)
with open(file_node) as jdf:
data_io = json.load(jdf)
# Bringing format node to object type and sort by ID
G = graph
sorted_names=sorted(data_io, key=lambda x : data_io[x]['id'])
cached={} #create cached list
for _name in sorted_names:
node = G(data_io[_name]) #list[properties] to node.properties
if (node.type=='read'):
try:
img = Image.open(node.file)
except:
print '->Error. No such file', node.file
sys.exit (0)
|
width, height = img.size
cached[node.name] = numpy.array(img)
|
print 'cached->', node.file,'(',width, height,')'
if (node.type=='cc'):
cached[node.name]=CC_(cached[node.link],node.bright,node.contrast)
if (node.type=='size'):
cached[node.name]=size_(cached[node.link],node.size,width,height)
if (node.type=='rotate'):
cached[node.name]=rotate_(cached[node.link],node.angle)
if (node.type=='gradient'):
cached[node.name]=gradient_(node.width,node.height)
if (node.type=='composite'):
if(node.job!='mask'):
cached[node.name]=composite_(cached[node.link_a],cached[node.link_b],
0,node.job)
if(node.job=='mask'):
cached[node.name]=composite_(cached[node.link_a],cached[node.link_b],
cached[node.mask],node.job)
if (node.type=='blur'):
cached[node.name]=blur_(cached[node.link],node.size)
if (node.type=='invert'):
cached[node.name]=invert_(cached[node.link])
if (node.type=='sharpen'):
cached[node.name]=sharpen_(cached[node.link],node.size)
if (node.type=='view'):
view_img = Image.fromarray(cached[node.link])
view_img.show()
if (node.type=='write'):
out_img = Image.fromarray(cached[node.link])
out_img.save(node.file)
print 'write->', node.file
print '\nScript:',file_node,'completed'
|
deepmind/neural_testbed
|
neural_testbed/likelihood/utils_test.py
|
Python
|
apache-2.0
| 1,794
| 0.005017
|
# python3
# pylint: disable=g-bad-file-header
# Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implie
|
d.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================
|
=========================================================
"""Tests for neural_testbed.likelihood."""
from absl.testing import absltest
from absl.testing import parameterized
import chex
import jax
import jax.numpy as jnp
from neural_testbed.likelihood import utils
class LogSumProdTest(parameterized.TestCase):
@parameterized.product(
num_centroids=[10, 100],
tau=[100, 1000],
magnitude=[-12, -10, -8],
)
def test_not_nan(self, num_centroids: int, tau: int, magnitude: float):
"""Check that we don't get Inf."""
def compute_ll(key: chex.PRNGKey) -> float:
num_obs = jax.random.poisson(key, 1, [num_centroids])
nu = num_obs / jnp.sum(num_obs)
q_hat = jnp.ones([num_centroids, tau]) * (10 ** magnitude)
q_hat += jnp.expand_dims(nu == 0, 1).astype(jnp.float32)
q_hat = jnp.clip(q_hat, 0, 1)
return utils.log_sum_prod(nu, q_hat)
keys = jax.random.split(jax.random.PRNGKey(0), 10)
log_likelihoods = jax.jit(jax.vmap(compute_ll))(keys)
assert jnp.all(jnp.isfinite(log_likelihoods))
if __name__ == '__main__':
absltest.main()
|
smjurcak/csm
|
csmserver/work_units/inventory_work_unit.py
|
Python
|
apache-2.0
| 4,795
| 0.002294
|
# =============================================================================
# Copyright (c) 2016, Cisco Systems, Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from models import Host
from models import InventoryJob
from models import InventoryJobHistory
from constants import JobStatus
from handlers.loader import get_inventory_handler_class
from context import InventoryContext
from utils import create_log_directory
from multi_process import WorkUnit
import traceback
import sys
class InventoryWorkUnit(WorkUnit):
def __init__(self, host_id, job_id):
WorkUnit.__init__(self)
self.host_id = host_id
self.job_id = job_id
def get_unique_key(self):
return self.host_id
d
|
ef
|
start(self, db_session, logger, process_name):
host = None
inventory_job = None
try:
inventory_job = db_session.query(InventoryJob).filter(InventoryJob.id == self.job_id).first()
if inventory_job is None:
logger.error('Unable to retrieve inventory job: %s' % self.job_id)
return
host_id = inventory_job.host_id
host = db_session.query(Host).filter(Host.id == host_id).first()
if host is None:
logger.error('Unable to retrieve host: %s' % host_id)
ctx = InventoryContext(db_session, host, inventory_job)
handler_class = get_inventory_handler_class(ctx)
if handler_class is None:
logger.error('Unable to get handler for %s, inventory job %s', host.software_platform, self.job_id)
inventory_job.set_status(JobStatus.IN_PROGRESS)
inventory_job.session_log = create_log_directory(host.connection_param[0].host_or_ip, inventory_job.id)
db_session.commit()
handler = handler_class()
handler.execute(ctx)
if ctx.success:
self.archive_inventory_job(db_session, inventory_job, JobStatus.COMPLETED)
else:
# removes the host object as host.packages may have been modified.
db_session.expunge(host)
self.archive_inventory_job(db_session, inventory_job, JobStatus.FAILED)
# Reset the pending retrieval flag
inventory_job.request_update = False
db_session.commit()
except Exception:
try:
self.log_exception(logger, host)
self.archive_inventory_job(db_session, inventory_job, JobStatus.FAILED, trace=sys.exc_info)
# Reset the pending retrieval flag
inventory_job.request_update = False
db_session.commit()
except Exception:
self.log_exception(logger, host)
finally:
db_session.close()
def log_exception(self, logger, host):
logger.exception('InventoryManager hit exception - hostname = %s, inventory job = %s',
host.hostname if host is not None else 'Unknown', self.job_id)
def archive_inventory_job(self, db_session, inventory_job, job_status, trace=None):
inventory_job.set_status(job_status)
hist = InventoryJobHistory()
hist.host_id = inventory_job.host_id
hist.set_status(job_status)
hist.session_log = inventory_job.session_log
if trace is not None:
hist.trace = traceback.format_exc()
db_session.add(hist)
|
cloudnull/tribble-api
|
tribble/common/rpc.py
|
Python
|
gpl-3.0
| 3,626
| 0
|
# =============================================================================
# Copyright [2013] [Kevin Carter]
# License Information :
# This software has no warranty, it is provided 'as is'. It is your
# responsibility to validate the behavior of the routines and its accuracy
# using the code provided. Consult the GNU General Public license for further
# details (see GNU General Public License).
# http://www.gnu.org/licenses/gpl.html
# =============================================================================
import json
import logging
import traceback
import kombu
from kombu.utils import debug
from tribble.common import system_config
CONFIG = system_config.ConfigurationSetup()
RPC_CFG = CONFIG.config_args('rpc')
LOG = logging.getLogger('tribble-common')
def rpc_logging_service(log_level, handlers):
"""Setup an RPC logger.
:param log_level: ``object``
:param handlers: ``object``
"""
debug.setup_logging(loglevel=log_level, loggers=handlers)
def load_queues(connection):
"""Load queues off of the set topic.
:param connection: ``object``
:return: ``object``
"""
if connection is False:
return False
_routing_key = get_routing_key()
_exchange = _load_exchange(connection)
return declare_queue(_routing_key, connection, _exchange)
def _load_exchange(connection):
"""Load RPC exchange.
:param connection: ``object``
:return: ``object``
"""
return exchange(conn=connection)
def connect():
"""Create the connection the AMQP.
:return: ``object``
"""
if not RPC_CFG:
return False
return kombu.Connection(
hostname=RPC_CFG.get('host', '127.0.0.1'),
port=RPC_CFG.get(
|
'port', 5672),
userid=RPC_CFG.get('userid', 'guest'),
password=RPC_CFG.get('password', 'guest'),
virtual_host=RPC_CFG.get('virtual_host', '/')
)
def excha
|
nge(conn):
"""Bind a connection to an exchange.
:param conn: ``object``
:return: ``object``
"""
return kombu.Exchange(
RPC_CFG.get('control_exchange', 'tribble'),
type='topic',
durable=RPC_CFG.get('durable_queues', False),
channel=conn.channel()
)
def declare_queue(routing_key, conn, topic_exchange):
"""Declare working queue.
:param routing_key: ``str``
:param conn: ``object``
:param topic_exchange: ``str``
:return: ``object``
"""
return_queue = kombu.Queue(
name=routing_key,
routing_key=routing_key,
exchange=topic_exchange,
channel=conn.channel(),
durable=RPC_CFG.get('durable_queues', False),
)
return_queue.declare()
return return_queue
def publisher(message, topic_exchange, routing_key):
"""Publish Messages into AMQP.
:param message: ``str``
:param topic_exchange: ``str``
:param routing_key: ``str``
"""
try:
msg_new = topic_exchange.Message(
json.dumps(message), content_type='application/json'
)
topic_exchange.publish(msg_new, routing_key)
except Exception:
LOG.error(traceback.format_exc())
def get_routing_key(routing_key='control_exchange'):
"""Return the routing Key from config.
:param routing_key: ``str``
:return: ``str``
"""
return '%s.info' % RPC_CFG.get(routing_key)
def default_publisher(message):
"""Publish an RPC message.
:param message: ``dict``
"""
conn = connect()
_exchange = exchange(conn)
_routing_key = get_routing_key()
publisher(
message=message, topic_exchange=_exchange, routing_key=_routing_key
)
|
ndtran/compassion-modules
|
sponsorship_tracking/migrations/1.2/pre-migration.py
|
Python
|
agpl-3.0
| 1,284
| 0.000779
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
#######
|
#######################################################################
import logging
logger = logging.getLogger()
def rename_columns(cr, column_spec):
"""
Rename table columns. Taken from OpenUpgrade.
:param column_spec: a hash with table keys, with lists of tuples as \
values. Tuples consist of (old_name, new_name).
"""
for table in column_spec.keys():
for (old, new) in column_spec[table]:
logger.info
|
("table %s, column %s: renaming to %s", table, old, new)
cr.execute('ALTER TABLE %s RENAME %s TO %s' % (table, old, new,))
cr.execute('DROP INDEX IF EXISTS "%s_%s_index"' % (table, old))
def migrate(cr, version):
if not version:
return
# rename field last_sds_state_change_date
rename_columns(cr, {
'recurring_contract': [
('last_sds_state_change_date', 'sds_state_date_old'),
]
})
|
Lysxia/dissemin
|
notification/settings.py
|
Python
|
agpl-3.0
| 173
| 0
|
from __future__ i
|
mport unicode_literals
notification_settings = {
# PostgreSQL backend with JSON fields
'STORAGE_BACKEND': 'no
|
tification.backends.DefaultBackend'
}
|
erudit/eruditorg
|
eruditorg/core/authorization/rules.py
|
Python
|
gpl-3.0
| 501
| 0.001996
|
# -*- coding: utf-8 -*-
import rules
from rules.predicat
|
es import is_staff
from rules.predicates import is_superuser
from core.journal.predicates import is_journal_member
from .defaults import AuthorizationConfig as AC
from .predicates import HasAuthorization
# This permission
|
assume to use a 'Journal' object to perform the perm check
rules.add_perm(
"authorization.manage_authorizations",
is_superuser | is_staff | is_journal_member & HasAuthorization(AC.can_manage_authorizations),
)
|
wujuguang/motor
|
test/tornado_tests/test_motor_client.py
|
Python
|
apache-2.0
| 11,652
| 0.000086
|
# Copyright 2012-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
"""Test Motor, an asynchronous driver for MongoDB and Tornado."""
import os
import unittest
import pymongo
from pymongo import CursorType
import pymongo.mongo_client
from bson import CodecOptions
from mockupdb import OpQuery
from pymongo import ReadPreference, WriteConcern
from pymongo.errors import ConfigurationError, OperationFailure
from pymongo.errors import ConnectionFailure
from tornado import gen
from tornado.concurrent import Future
from tornado.testing import gen_test
import motor
import test
from test import SkipTest
from test.test_environment import db_user, db_password, env
from test.tornado_tests import (MotorMockServerTest,
MotorTest,
remove_all_users)
from test.utils import one, get_primary_pool
class MotorClientTest(MotorTest):
@gen_test
def test_client_lazy_connect(self):
yield self.db.test_client_lazy_connect.delete_many({})
# Create client without connecting; connect on demand.
cx = self.motor_client()
collection = cx.motor_test.test_client_lazy_connect
future0 = collection.insert_one({'foo': 'bar'})
future1 = collection.insert_one({'foo': 'bar'})
yield [future0, future1]
self.assertEqual(2, (yield collection.count_documents({'foo': 'bar'})))
cx.close()
@gen_test
def test_unix_socket(self):
if env.mongod_started_with_ssl:
raise SkipTest("Server started with SSL")
mongodb_socket = '/tmp/mongodb-%d.sock' % env.port
if not os.access(mongodb_socket, os.R_OK):
raise SkipTest("Socket file is not accessible")
encoded_socket = '%2Ftmp%2Fmongodb-' + str(env.port) + '.sock'
if test.env.auth:
uri = 'mongodb://%s:%s@%s' % (db_user, db_password, encoded_socket)
else:
uri = 'mongodb://%s' % (encoded_socket,)
client = self.motor_client(uri)
yield client.motor_test.test.insert_one({"dummy": "object"})
# Confirm it fails with a missing socket.
client = motor.MotorClient(
"mongodb://%2Ftmp%2Fnon-existent.sock", io_loop=self.io_loop,
serverSelectionTimeoutMS=100)
with self.assertRaises(ConnectionFailure):
yield client.admin.command('ismaster')
def test_io_loop(self):
with self.assertRaises(TypeError):
motor.MotorClient(test.env.uri, io_loop='
|
foo')
def test_database_named_delegate(self):
self.assertTrue(
isinstance(self.cx.delegate, pymongo.mongo_client.MongoClient))
self.
|
assertTrue(isinstance(self.cx['delegate'],
motor.MotorDatabase))
@gen_test
def test_connection_failure(self):
# Assuming there isn't anything actually running on this port
client = motor.MotorClient('localhost', 8765, io_loop=self.io_loop,
serverSelectionTimeoutMS=10)
with self.assertRaises(ConnectionFailure):
yield client.admin.command('ismaster')
@gen_test(timeout=30)
def test_connection_timeout(self):
# Motor merely tries to time out a connection attempt within the
# specified duration; DNS lookup in particular isn't charged against
# the timeout. So don't measure how long this takes.
client = motor.MotorClient(
'example.com', port=12345,
serverSelectionTimeoutMS=1, io_loop=self.io_loop)
with self.assertRaises(ConnectionFailure):
yield client.admin.command('ismaster')
@gen_test
def test_max_pool_size_validation(self):
with self.assertRaises(ValueError):
motor.MotorClient(maxPoolSize=-1)
with self.assertRaises(ValueError):
motor.MotorClient(maxPoolSize='foo')
cx = self.motor_client(maxPoolSize=100)
self.assertEqual(cx.max_pool_size, 100)
cx.close()
@gen_test(timeout=30)
def test_drop_database(self):
# Make sure we can pass a MotorDatabase instance to drop_database
db = self.cx.test_drop_database
yield db.test_collection.insert_one({})
names = yield self.cx.list_database_names()
self.assertTrue('test_drop_database' in names)
yield self.cx.drop_database(db)
names = yield self.cx.list_database_names()
self.assertFalse('test_drop_database' in names)
@gen_test
def test_auth_from_uri(self):
if not test.env.auth:
raise SkipTest('Authentication is not enabled on server')
# self.db is logged in as root.
yield remove_all_users(self.db)
db = self.db
try:
test.env.create_user(db.name, 'mike', 'password',
roles=['userAdmin', 'readWrite'])
client = self.motor_client(
'mongodb://u:pass@%s:%d' % (env.host, env.port))
with self.assertRaises(OperationFailure):
yield client.db.collection.find_one()
client = self.motor_client(
'mongodb://mike:password@%s:%d/%s' %
(env.host, env.port, db.name))
yield client[db.name].collection.find_one()
finally:
test.env.drop_user(db.name, 'mike')
def test_get_database(self):
codec_options = CodecOptions(tz_aware=True)
write_concern = WriteConcern(w=2, j=True)
db = self.cx.get_database(
'foo', codec_options, ReadPreference.SECONDARY, write_concern)
self.assertTrue(isinstance(db, motor.MotorDatabase))
self.assertEqual('foo', db.name)
self.assertEqual(codec_options, db.codec_options)
self.assertEqual(ReadPreference.SECONDARY, db.read_preference)
self.assertEqual(write_concern, db.write_concern)
@gen_test
def test_list_databases(self):
yield self.collection.insert_one({})
cursor = yield self.cx.list_databases()
self.assertIsInstance(cursor, motor.motor_tornado.MotorCommandCursor)
# Make sure the cursor works, by searching for "local" database.
while (yield cursor.fetch_next):
info = cursor.next_object()
if info['name'] == self.collection.database.name:
break
else:
self.fail("'%s' database not found" % self.collection.database.name)
@gen_test
def test_list_database_names(self):
yield self.collection.insert_one({})
names = yield self.cx.list_database_names()
self.assertIsInstance(names, list)
self.assertIn(self.collection.database.name, names)
class MotorClientTimeoutTest(MotorMockServerTest):
@gen_test
def test_timeout(self):
server = self.server(auto_ismaster=True)
client = motor.MotorClient(server.uri, socketTimeoutMS=100)
with self.assertRaises(pymongo.errors.AutoReconnect) as context:
yield client.motor_test.test_collection.find_one()
self.assertIn('timed out', str(context.exception))
client.close()
class MotorClientExhaustCursorTest(MotorMockServerTest):
def primary_server(self):
primary = self.server()
hosts = [primary.address_string]
primary.autoresponds(
'ismaster', ismaster=True, setName='rs', hosts=hosts,
maxWireVersion=6)
return primary
def primary_or_standalone(self, rs):
if rs:
return self.primary_server()
else:
return self.server(auto_ismaster=True)
@ge
|
CantemoInternal/pyxb
|
tests/trac/test-trac-0218.py
|
Python
|
apache-2.0
| 1,347
| 0.012621
|
# -*-
|
coding: utf-8 -*-
im
|
port logging
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
xst = '''<?xml version="1.0"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:element name="topLevel">
<xs:complexType>
<xs:sequence>
<xs:element name="item" type="xs:int" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
</xs:schema>
'''
code = pyxb.binding.generate.GeneratePython(schema_text=xst)
#print code
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestTrac0218 (unittest.TestCase):
def testBasic (self):
instance = topLevel()
self.assertTrue(instance.item is not None)
self.assertFalse(instance.item is None)
self.assertTrue(instance.item != None)
self.assertTrue(None != instance.item)
self.assertFalse(instance.item)
instance.item.extend([1,2,3,4])
self.assertTrue(instance.item is not None)
self.assertFalse(instance.item is None)
self.assertTrue(instance.item != None)
self.assertTrue(None != instance.item)
self.assertTrue(instance.item)
if __name__ == '__main__':
unittest.main()
|
DakRomo/2017Challenges
|
challenge_3/python/kar-moore/src/challenge3.py
|
Python
|
mit
| 496
| 0.090726
|
def maj_element(array):
maj = len(array)/2 #py
|
thon does flooring on int division
num_dict = {}
for num in array:
if num in num_dict:
num_dict[num] += 1
else:
num_dict[num] = 1
for element in num_dict:
if num_dict[element] >= maj:
return element
array = [2,2,3,7,5,7,7,7,4,7,2,7,4,5,6,7,
7,8,6,7,7,8,10,12,29,30,19,10,7,7,7,7,7,7,7,7,7] \
print
|
maj_element(array)
b_array = [1,1,2,2,2,2,2]
print maj_element(b_array)
|
tejaswi2492/pmip6ns3.13new
|
src/tools/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 172,830
| 0.014749
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.tools', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## delay-jitter-estimation.h (module 'tools'): ns3::DelayJitterEstimation [class]
module.add_class('DelayJitterEstimation')
## event-garbage-collector.h (module 'tools'): ns3::EventGarbageCollector [class]
module.add_class('EventGarbageCollector')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## gnuplot.h (module 'tools'): ns3::Gnuplot [class]
module.add_class('Gnuplot')
## gnuplot.h (module 'tools'): ns3::GnuplotCollection [class]
module.add_class('GnuplotCollection')
## gnuplot.h (module 'tools'): ns3::GnuplotDataset [class]
module.add_class('GnuplotDataset')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLO
|
AD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_fr
|
om_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## gnuplot.h (module 'tools'): ns3::Gnuplot2dDataset [class]
module.add_class('Gnuplot2dDataset', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h (module 'tools'): ns3::Gnuplot2dDataset::Style [enumeration]
module.add_enum('Style', ['LINES', 'POINTS', 'LINES_POINTS', 'DOTS', 'IMPULSES', 'STEPS', 'FSTEPS', 'HISTEPS'], outer_class=root_module['ns3::Gnuplot2dDataset'])
## gnuplot.h (module 'tools'): ns3::Gnuplot2dDataset::ErrorBars [enumeration]
module.add_enum('ErrorBars', ['NONE', 'X', 'Y', 'XY'], outer_class=root_module['ns3::Gnuplot2dDataset'])
## gnuplot.h (module 'tools'): ns3::Gnuplot2dFunction [class]
module.add_class('Gnuplot2dFunction', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h (module 'tools'): ns3::Gnuplot3dDataset [class]
module.add_class('Gnuplot3dDataset', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h (module 'tools'): ns3::Gnuplot3dFunction [class]
module.add_class('Gnuplot3dFunction', parent=root_module['ns3::GnuplotDataset'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBas
|
rdo-management/neutron
|
neutron/tests/unit/hyperv/test_hyperv_security_groups_driver.py
|
Python
|
apache-2.0
| 7,747
| 0
|
# Copyright 2014 Cloudbase Solutions SRL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit tests for the Hyper-V Security Groups Driver.
"""
import mock
from oslo_config import cfg
from neutron.plugins.hyperv.agent import security_groups_driver as sg_driver
from neutron.plugins.hyperv.agent import utilsfactory
from neutron.tests import base
CONF = cfg.CONF
class TestHyperVSecurityGroupsDriver(base.BaseTestCase):
_FAKE_DEVICE = 'fake_device'
_FAKE_ID = 'fake_id'
_FAKE_DIRECTION = 'ingress'
_FAKE_ETHERTYPE = 'IPv4'
_FAKE_ETHERTYPE_IPV6 = 'IPv6'
_FAKE_DEST_IP_PREFIX = 'fake_dest_ip_prefix'
_FAKE_SOURCE_IP_PREFIX = 'fake_source_ip_prefix'
_FAKE_PARAM_NAME = 'fake_param_name'
_FAKE_PARAM_VALUE = 'fake_param_value'
_FAKE_PORT_MIN = 9001
_FAKE_PORT_MAX = 9011
def setUp(self):
super(TestHyperVSecurityGroupsDriver, self).setUp()
self._mock_windows_version = mock.patch.object(utilsfactory,
'get_hypervutils')
self._mock_windows_version.start()
self._driver = sg_driver.HyperVSecurityGroupsDriver()
self._driver._utils = mock.MagicMock()
@mock.patch('neutron.plugins.hyperv.agent.security_groups_driver'
'.HyperVSecurityGroupsDriver._create_port_rules')
def test_prepare_port_filter(self, mock_create_rules):
mock_port = self._get_port()
mock_utils_method = self._driver._utils.create_default_reject_all_rules
self._driver.prepare_port_filter(mock_port)
self.assertEqual(mock_port,
self._driver._security_ports[self._FAKE_DEVICE])
mock_utils_method.assert_called_once_with(self._FAKE_ID)
self._driver._create_port_rules.assert_called_once_with(
self._FAKE_ID, mock_port['security_group_rules'])
def test_update_port_filter(self):
mock_port = self._get_port()
new_mock_port = self._get_port()
new_mock_port['id'] += '2'
new_mock_port['security_group_rules'][0]['ethertype'] += "2"
self._driver._security_ports[mock_port['device']] = mock_port
self._driver._create_port_rules = mock.MagicMock()
self._driver._remove_port_rules = mock.MagicMock()
self._driver.update_port_filter(new_mock_port)
self._driver._remove_port_rules.assert_called_once_with(
mock_port['id'], mock_port['security_group_rules'])
self._driver._create_port_rules.assert_called_once_with(
new_mock_port['id'], new_mock_port['security_group_rules'])
self.assertEqual(new_mock_port,
self._driver._security_ports[new_mock_port['device']])
@mock.patch('neutron.plugins.hyperv.agent.security_groups_driver'
'.HyperVSecurityGroupsDriver.prepare_port_filter')
def test_update_port_filter_new_port(self, mock_method):
mock_port = self._get_port()
self._driver.prepare_port_filter = mock.MagicMock()
self._driver.update_port_filter(mock_port)
self._driver.prepare_port_filter.assert_called_once_with(mock_port)
def test_remove_port_filter(self):
mock_port = self._get_port()
self._driver._security_ports[mock_port['device']] = mock_port
self._driver.remove_port_filter(mock_port)
self.assertFalse(mock_port['device'] in self._driver._security_ports)
def test_create_port_rules_exception(self):
fake_rule = self._create_security_rule()
self._driver._utils.create_security_rule.side_effect = Exception(
'Generated Exception for testing.')
self._driver._create_port_rules(self._FAKE_ID, [fake_rule])
def test_create_param_map(self):
fake_rule = self._create_security_rule()
self._driver._get_rule_remote_address = mock.MagicMock(
return_value=self._FAKE_SOURCE_IP_PREFIX)
actual = self._driver._create_param_map(fake_rule)
expected = {
'direction': self._driver._ACL_PROP_MAP[
'direction'][self._FAKE_DIRECTION],
'acl_type': self._driver._ACL_PROP_MAP[
'ethertype'][self._FAKE_ETHERTYPE],
'local_port': '%s-%s' % (self._FAKE_PORT_MIN, self._FAKE_PORT_MAX),
'protocol': self._driver._ACL_PROP_MAP['default'],
'remote_address': self._FAKE_SOURCE_IP_PREFIX
}
self.assertEqual(expected, actual)
@mock.patch('neutron.plugins.hyperv.agent.security_groups_driver'
'.HyperVSecurityGroupsDriver._create_param_map')
def test_create_port_rules(
|
self, mock_method):
fake_rule = self._create_security_rule()
mock_method.return_value = {
self._FAKE_PARAM_NAME: self._FAKE_PARAM_VALUE}
|
self._driver._create_port_rules(self._FAKE_ID, [fake_rule])
self._driver._utils.create_security_rule.assert_called_once_with(
self._FAKE_ID, fake_param_name=self._FAKE_PARAM_VALUE)
def test_convert_any_address_to_same_ingress(self):
rule = self._create_security_rule()
actual = self._driver._get_rule_remote_address(rule)
self.assertEqual(self._FAKE_SOURCE_IP_PREFIX, actual)
def test_convert_any_address_to_same_egress(self):
rule = self._create_security_rule()
rule['direction'] += '2'
actual = self._driver._get_rule_remote_address(rule)
self.assertEqual(self._FAKE_DEST_IP_PREFIX, actual)
def test_convert_any_address_to_ipv4(self):
rule = self._create_security_rule()
del rule['source_ip_prefix']
actual = self._driver._get_rule_remote_address(rule)
self.assertEqual(self._driver._ACL_PROP_MAP['address_default']['IPv4'],
actual)
def test_convert_any_address_to_ipv6(self):
rule = self._create_security_rule()
del rule['source_ip_prefix']
rule['ethertype'] = self._FAKE_ETHERTYPE_IPV6
actual = self._driver._get_rule_remote_address(rule)
self.assertEqual(self._driver._ACL_PROP_MAP['address_default']['IPv6'],
actual)
def test_get_rule_protocol_icmp(self):
self._test_get_rule_protocol(
'icmp', self._driver._ACL_PROP_MAP['protocol']['icmp'])
def test_get_rule_protocol_no_icmp(self):
self._test_get_rule_protocol('tcp', 'tcp')
def _test_get_rule_protocol(self, protocol, expected):
rule = self._create_security_rule()
rule['protocol'] = protocol
actual = self._driver._get_rule_protocol(rule)
self.assertEqual(expected, actual)
def _get_port(self):
return {
'device': self._FAKE_DEVICE,
'id': self._FAKE_ID,
'security_group_rules': [self._create_security_rule()]
}
def _create_security_rule(self):
return {
'direction': self._FAKE_DIRECTION,
'ethertype': self._FAKE_ETHERTYPE,
'dest_ip_prefix': self._FAKE_DEST_IP_PREFIX,
'source_ip_prefix': self._FAKE_SOURCE_IP_PREFIX,
'port_range_min': self._FAKE_PORT_MIN,
'port_range_max': self._FAKE_PORT_MAX
}
|
mcgov/fishpaste
|
FileStats.py
|
Python
|
mit
| 3,515
| 0.013656
|
#!/usr/bin/python3
"""
FileStats object, abstraction to take in HTML and spit out all the stats and
stuff that we want.
ALso has a handy function for returning a tuple for instertion into SQL databases.
@author mgmcgove
"""
import os
import get_tags as gt
import stats_lib
import url_validator as url_lib
from make_hash import make_hash
import json
def strip_non_space_whitespace(st):
st = " ".join(st.split()) #get rid of duplicate spaces and other shit.
return st
class FileStats():
"""
Container for stats we gather on each file.
"""
def __init__(self, site_url, html):
language_table = stats_lib.set_up_character_ranges_table()
tags = gt.get_tags(html)
self.url = site_url
self.title = gt.get_title( tags )
self.outgoing_links = gt.get_links(site_url, tags)
self.outgoing_link_count = len( self.outgoing_links )
self.scripts = gt.get_scripts(tags)
self.number_of_scripts = len(self.scripts)
self.langs = dict()
self.hash = make_hash(site_url)
self.alphabets_on_site = []
for script in tags(['script', 'style']):
script.extract()
self.body = strip_non_space_whitespace( tags.getText() )
text = self.body
#print( text)
for ch in text:
lang = stats_lib.check_ranges_from_table( ch, language_table )
#print( ch , lang )
if lang in self.langs:
self.langs[lang] += 1
else:
self.langs[lang] = 1
for key in self.langs:
if self.langs[key] > len(text)/70 and key not in self.alphabets_on_site:
self.alphabets_on_site.append(key)
self.n_grams = stats_lib.count_n_grams( self.body, 5 )
self.symbol_freq = stats_lib.count_symbol_frequency( self.body )
self.symbol_entropy = stats_lib.calculate_symbol_entropy( self.symbol_freq )
self.raw_html = html
def write_out_links(self, link_filename):
##this is a dumb way to do this too.
##well now we're not using it. Don't be so negative, past matthew.
links_seen = []
with open(link_filename, 'a') as linkfile:
for link in self.outgoing_links:
if link and link not in links_seen:
links_seen.append(link)
linkfile.write(link + '\n')
def print_all_stats(self):
print( "Title:", self.title )
print( self.outgoing_link_count, self.number_of_scripts , self.outgoing_links[:2] )
print( self.alphabets_on_site )
print("----------------------------------------")
print( self.body[:100] )
def format_values_for_sql( self, urls ):
#table_columns:hash,url,title,body_text,n_incoming_links,n_outgoing_links,links,visited_links_bool, n_grams_json, symbol_freq_json, symbol_entropy,html
deduped_links = l
|
ist(set(self.outgoing_links))
for url in urls:
if url in deduped_links:
|
#print( "removing",url )
deduped_links.remove(url)
else:
#print("%s wasn't in the set of links"%url)
pass
values = [self.hash, self.url, self.title, self.body, 0, self.outgoing_link_count, deduped_links, 0 , json.dumps(self.n_grams, sort_keys=True), json.dumps(self.symbol_freq, sort_keys=True), self.symbol_entropy, strip_non_space_whitespace(self.raw_html) ]
return values
|
WarrenWeckesser/numpy
|
benchmarks/benchmarks/bench_avx.py
|
Python
|
bsd-3-clause
| 4,701
| 0.008722
|
from .common import Benchmark
import numpy as np
avx_ufuncs = ['sin',
'cos',
'exp',
'log',
'sqrt',
'absolute',
'reciprocal',
'square',
'rint',
'floor',
'ceil' ,
'trunc']
stride = [1, 2, 4]
dtype = ['f', 'd']
class AVX_UFunc(Benchmark):
params = [avx_ufuncs, stride, dtype]
param_names = ['avx_based_ufunc', 'stride', 'dtype']
timeout = 10
def setup(self, ufuncname, stride, dtype):
np.seterr(all='ignore')
try:
self.f = getattr(np, uf
|
uncname)
except AttributeError:
raise NotImplementedError()
N = 10000
self.arr = np.ones(stride*N, dtype)
def time_ufunc(self, ufuncname, stride, dtype):
self.f(self.arr[::
|
stride])
avx_bfuncs = ['maximum',
'minimum']
class AVX_BFunc(Benchmark):
params = [avx_bfuncs, dtype, stride]
param_names = ['avx_based_bfunc', 'dtype', 'stride']
timeout = 10
def setup(self, ufuncname, dtype, stride):
np.seterr(all='ignore')
try:
self.f = getattr(np, ufuncname)
except AttributeError:
raise NotImplementedError()
N = 10000
self.arr1 = np.array(np.random.rand(stride*N), dtype=dtype)
self.arr2 = np.array(np.random.rand(stride*N), dtype=dtype)
def time_ufunc(self, ufuncname, dtype, stride):
self.f(self.arr1[::stride], self.arr2[::stride])
cmplx_bfuncs = ['add',
'subtract',
'multiply',
'divide']
cmplxstride = [1, 2, 4]
cmplxdtype = ['F', 'D']
class AVX_cmplx_arithmetic(Benchmark):
params = [cmplx_bfuncs, cmplxstride, cmplxdtype]
param_names = ['bfunc', 'stride', 'dtype']
timeout = 10
def setup(self, bfuncname, stride, dtype):
np.seterr(all='ignore')
try:
self.f = getattr(np, bfuncname)
except AttributeError:
raise NotImplementedError()
N = 10000
self.arr1 = np.ones(stride*N, dtype)
self.arr2 = np.ones(stride*N, dtype)
def time_ufunc(self, bfuncname, stride, dtype):
self.f(self.arr1[::stride], self.arr2[::stride])
cmplx_ufuncs = ['reciprocal',
'absolute',
'square',
'conjugate']
class AVX_cmplx_funcs(Benchmark):
params = [cmplx_ufuncs, cmplxstride, cmplxdtype]
param_names = ['bfunc', 'stride', 'dtype']
timeout = 10
def setup(self, bfuncname, stride, dtype):
np.seterr(all='ignore')
try:
self.f = getattr(np, bfuncname)
except AttributeError:
raise NotImplementedError()
N = 10000
self.arr1 = np.ones(stride*N, dtype)
def time_ufunc(self, bfuncname, stride, dtype):
self.f(self.arr1[::stride])
class Mandelbrot(Benchmark):
def f(self,z):
return np.abs(z) < 4.0
def g(self,z,c):
return np.sum(np.multiply(z,z) + c)
def mandelbrot_numpy(self, c, maxiter):
output = np.zeros(c.shape, np.int)
z = np.empty(c.shape, np.complex64)
for it in range(maxiter):
notdone = self.f(z)
output[notdone] = it
z[notdone] = self.g(z[notdone],c[notdone])
output[output == maxiter-1] = 0
return output
def mandelbrot_set(self,xmin,xmax,ymin,ymax,width,height,maxiter):
r1 = np.linspace(xmin, xmax, width, dtype=np.float32)
r2 = np.linspace(ymin, ymax, height, dtype=np.float32)
c = r1 + r2[:,None]*1j
n3 = self.mandelbrot_numpy(c,maxiter)
return (r1,r2,n3.T)
def time_mandel(self):
self.mandelbrot_set(-0.74877,-0.74872,0.06505,0.06510,1000,1000,2048)
class LogisticRegression(Benchmark):
timeout = 1000
def train(self, max_epoch):
for epoch in range(max_epoch):
z = np.matmul(self.X_train, self.W)
A = 1 / (1 + np.exp(-z)) # sigmoid(z)
loss = -np.mean(self.Y_train * np.log(A) + (1-self.Y_train) * np.log(1-A))
dz = A - self.Y_train
dw = (1/self.size) * np.matmul(self.X_train.T, dz)
self.W = self.W - self.alpha*dw
def setup(self):
np.random.seed(42)
self.size = 250
features = 16
self.X_train = np.float32(np.random.rand(self.size,features))
self.Y_train = np.float32(np.random.choice(2,self.size))
# Initialize weights
self.W = np.zeros((features,1), dtype=np.float32)
self.b = np.zeros((1,1), dtype=np.float32)
self.alpha = 0.1
def time_train(self):
self.train(1000)
|
scigghia/account-invoicing
|
account_invoice_validation_workflow/__openerp__.py
|
Python
|
agpl-3.0
| 1,645
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joël Grand-Guillaume (Camptocamp)
# Copyright 2010-2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
|
# T
|
his program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Add "To Send" and "To Validate" states in Invoices',
'version': '8.0.1.0.1',
'category': 'Generic Modules/Invoicing',
'description':
'''
This module adds 2 states between draft and open state in invoices:
- To Validate: For invoices which need a validation
- To Send: For all invoices that need to be sent
''',
'author': "Camptocamp,Odoo Community Association (OCA)",
'website': 'http://camptocamp.com',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'invoice_wkf.xml',
'invoice_view.xml',
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
'application': False
}
|
walterbender/turtle3D
|
TurtleArtActivity.py
|
Python
|
mit
| 79,936
| 0.000851
|
# -*- coding: utf-8 -*-
#Copyright (c) 2007, Playful Invention Company
#Copyright (c) 2008-14, Walter Bender
#Copyright (c) 2009-13 Raul Gutierrez Segales
#Copyright (c) 2012 Alan Aguiar
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
import pygtk
pygtk.require('2.0')
import gtk
import cairo
import gobject
import dbus
import logging
_logger = logging.getLogger('turtleart-activity')
from sugar.activity import activity
try: # 0.86 toolbar widgets
from sugar.act
|
ivity.widgets import (ActivityToolbarButton, StopButton)
from sugar.graphics.toolbarbox import (ToolbarBox, ToolbarButton)
HAS_TOOLBARBOX = True
except ImportError:
HAS_TOOLBARBOX = False
from sugar.graphics.toolbutton import ToolButton
from sugar.graphics.radiotoolbutton import RadioToolButton
from sugar.graphics.alert import (ConfirmationAlert, Alert)
from sugar.graphics impor
|
t style
from sugar.graphics.icon import Icon
from sugar.graphics.xocolor import XoColor
from sugar.datastore import datastore
from sugar import profile
import os
import glob
import tarfile
import subprocess
import ConfigParser
import shutil
import tempfile
try:
import gconf
HAS_GCONF = True
except ImportError:
HAS_GCONF = False
from gettext import gettext as _
from TurtleArt.taplugin import (load_a_plugin, cancel_plugin_install,
complete_plugin_install)
from TurtleArt.tapalette import (palette_names, help_strings, help_palettes,
help_windows, default_values)
from TurtleArt.taconstants import (BLOCK_SCALE, XO1, XO15, XO175, XO4,
MIMETYPE, PASTE_OFFSET)
from TurtleArt.taexportlogo import save_logo
from TurtleArt.taexportpython import save_python
from TurtleArt.tautils import (data_to_file, data_to_string, data_from_string,
get_path, chooser_dialog, get_hardware)
from TurtleArt.tawindow import TurtleArtWindow
from TurtleArt.tacollaboration import Collaboration
from TurtleArt.taprimitive import PyExportError
if HAS_TOOLBARBOX:
from util.helpbutton import (HelpButton, add_section, add_paragraph)
class TurtleArtActivity(activity.Activity):
''' Activity subclass for Turtle Art '''
_HOVER_HELP = '/desktop/sugar/activities/turtleart/hoverhelp'
_ORIENTATION = '/desktop/sugar/activities/turtleart/orientation'
_COORDINATE_SCALE = '/desktop/sugar/activities/turtleart/coordinatescale'
def __init__(self, handle):
''' Set up the toolbars, canvas, sharing, etc. '''
try:
super(TurtleArtActivity, self).__init__(handle)
except dbus.exceptions.DBusException, e:
_logger.error(str(e))
self.tw = None
self.init_complete = False
self.bundle_path = activity.get_bundle_path()
self.error_list = []
self.palette_buttons = []
self._palette_names = []
self._overflow_buttons = []
self._check_ver_change(get_path(activity, 'data'))
self.connect("notify::active", self._notify_active_cb)
self.has_toolbarbox = HAS_TOOLBARBOX
_logger.debug('_setup_toolbar')
self._setup_toolbar()
self.label_offset = style.GRID_CELL_SIZE
_logger.debug('_setup_canvas')
self._setup_canvas(self._setup_scrolled_window())
_logger.debug('_setup_palette_toolbar')
self._setup_palette_toolbar()
self._setup_extra_controls()
_logger.debug('_setup_sharing')
if self.shared_activity:
# We're joining
if not self.get_shared():
xocolors = XoColor(profile.get_color().to_string())
share_icon = Icon(icon_name='zoom-neighborhood',
xo_color=xocolors)
self._joined_alert = Alert()
self._joined_alert.props.icon = share_icon
self._joined_alert.props.title = _('Please wait')
self._joined_alert.props.msg = _('Starting connection...')
self.add_alert(self._joined_alert)
# Wait for joined signal
self.connect("joined", self._joined_cb)
self._setup_sharing()
# Activity count is the number of times this instance has been
# accessed
count = 1
if hasattr(self, 'metadata') and self.metadata is not None:
if 'activity count' in self.metadata:
count = int(self.metadata['activity count'])
count += 1
self.metadata['activity count'] = str(count)
self._defer_palette_move = False
# Now called from lazy_init
# self.check_buttons_for_fit()
if HAS_GCONF:
self.client = gconf.client_get_default()
if self.client.get_int(self._HOVER_HELP) == 1:
self._do_hover_help_toggle(None)
if not self.client.get_int(self._COORDINATE_SCALE) in [0, 1]:
self.tw.coord_scale = 1
self.do_rescale_cb(None)
else:
self.tw.coord_scale = 0
self.do_rescale_cb(None)
self._selected_sample = None
self._sample_window = None
self.init_complete = True
def update_palette_from_metadata(self):
if HAS_GCONF:
# We have to wait to set the orientation for the palettes
# to be loaded.
self.client = gconf.client_get_default()
if self.client.get_int(self._ORIENTATION) == 1:
self.tw.set_orientation(1)
if 'palette' in self.metadata:
n = int(self.metadata['palette'])
if n == -1:
self.tw.hideshow_palette(False)
else:
# Try to set radio button to active
if n < len(self.palette_buttons):
self.palette_buttons[n].set_active(True)
else:
self.tw.show_palette(n=0)
if 'orientation' in self.metadata:
self.tw.set_orientation(int(self.metadata['orientation']))
else:
# Else start on the Turtle palette
self.tw.show_palette(n=0)
def check_buttons_for_fit(self):
''' Check to see which set of buttons to display '''
if not self.has_toolbarbox:
return
# If there are too many palettes to fit, put them in a
# scrolling window
self._setup_palette_toolbar()
if self.samples_button in self.toolbox.toolbar:
self.toolbox.toolbar.remove(self.extras_separator)
self.toolbox.toolbar.remove(self.samples_button)
self.toolbox.toolbar.remove(self.stop_separator)
self.toolbox.toolbar.remove(self.stop_button)
self._view_toolbar.remove(self._coordinates_toolitem)
if gtk.gdk.screen_width() / 14 < style.GRID_CELL_SIZE:
self.samples_button2.show()
self.samples_label2.show()
self.toolbox.toolbar.insert(self.stop_button, -1)
else:
self.samples_button2.hide()
self.samples_label2.hide()
self.toolbox.toolbar.insert(self.extras_separator, -1)
|
holmes-app/holmes-api
|
holmes/validators/title.py
|
Python
|
mit
| 2,893
| 0
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from holmes.validators.base import Validator
from holmes.utils import _
class TitleValidator(Validator):
@classmethod
def get_violation_definitions(cls):
return {
'page.title.not_found': {
'title': _('Page title not found'),
'description': _("Title was not found on '%s'."),
'category': _('HTTP'),
'generic_description': _(
'Validates the presence of the page\'s title. '
'The <title> tag is required in all HTML documents '
'and it defines the title of the document.'
)
},
'page.title.multiple': {
'title': _('Too many titles'),
'description': _(
"Page '%(page_url)s' has %(title_count)d title tags."),
'category': _('Semantics'),
'generic_description': _(
'Validates the presence of more than one page\'s title '
'tag.'
)
},
'page.title.size': {
'title': _('Maximum size of a page title'),
'description': _(
'Title is too long on "%(page_url)s". '
'The max size is %(max_size)d characters.'),
'category': _('SEO'),
'generic_description': _(
'Validates the size of the page\'s title.'),
'unit': 'number'
}
}
@classmethod
def get_default_violations_values(cls, config):
return {
'page.title.size': {
'value': config.MAX_TITLE_SIZE,
'description': config.get_description('MAX_TITLE_SIZE')
}
}
def validate(self):
max_title_size = self.get_violation_pref('page.title.size')
title_count = self.review.data.get('page.title_count', 0)
title = self.review.data.get('page.title', None)
if not title_count or not title:
self.add_violation(
key='page.title.not_found',
value=self.reviewer.page_url,
|
points=50
)
return
if title_count > 1:
self.add_violation(
key='page.title.multiple',
value={
|
'page_url': self.reviewer.page_url,
'title_count': title_count
},
points=50
)
elif max_title_size and len(title) > int(max_title_size):
self.add_violation(
key='page.title.size',
value={
'page_url': self.reviewer.page_url,
'max_size': int(max_title_size)
},
points=10
)
|
stvstnfrd/configuration
|
playbooks/callback_plugins/sqs.py
|
Python
|
agpl-3.0
| 6,158
| 0.001786
|
# Copyright 2013 John Jarvis <john@jarv.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# From https://github.com/ansible/ansible/issues/31527#issuecomment-335495855
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
import time
import json
import socket
try:
import boto
except ImportError:
boto = None
else:
import boto.sqs
from boto.exception import NoAuthHandlerFound
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
This Ansible callback plugin sends task events
to SQS.
The following vars must be set in the environment:
ANSIBLE_ENABLE_SQS - enables the callback module
SQS_REGION - AWS region to connect to
SQS_MSG_PREFIX - Additional data that will be put
on the queue (optional)
The following events are put on the queue
- FAILURE events
- OK events
- TASK events
- START events
"""
def __init__(self):
self.enable_sqs = 'ANSIBLE_ENABLE_SQS' in os.environ
if not self.enable_sqs:
return
# make sure we got our imports
if not boto:
raise ImportError(
"The sqs callback module requires the boto Python module, "
"which is not installed or was not found."
)
self.start_time = time.time()
if not 'SQS_REGION' in os.environ:
print('ANSIBLE_ENABLE_SQS enabled but SQS_REGION ' \
'not defined in environment')
sys.exit(1)
self.region = os.environ['SQS_REGION']
try:
self.sqs = boto.sqs.connect_to_region(self.region)
except NoAuthHandlerFound:
print('ANSIBLE_ENABLE_SQS enabled but cannot connect ' \
'to AWS due invalid credentials')
sys.exit(1)
if not 'SQS_NAME' in os.environ:
print('ANSIBLE_ENABLE_SQS enabled but SQS_NAME not ' \
'defined in environment')
sys.exit(1)
self.name = os.environ['SQS_NAME']
self.queue = self.sqs.create_queue(self.name)
if 'SQS_MSG_PREFIX' in os.environ:
self.prefix = os.environ['SQS_MSG_PREFIX']
else:
self.prefix = ''
self.last_seen_ts = {}
def runner_on_failed(self, host, res, ignore_errors=False):
if self.enable_sqs:
if not ignore_errors:
self._send_queue_message(res, 'FAILURE')
def runner_on_ok(self, host, res):
if self.enable_sqs:
# don't send the setup results
if 'invocation' in res and 'module_name' in res['invocation'] and res['invocation']['module_name'] != "setup":
self._send_queue_message(res, 'OK')
def playbook_on_task_start(self, name, is_conditional):
if self.enable_sqs:
self._send_queue_message(name, 'TASK')
def playbook_on_play_start(self, pattern):
if self.enable_sqs:
self._send_queue_message(pattern, 'START')
def playbook_on_stats(self, stats):
if self.enable_sqs:
d = {}
delta = time.time() - self.start_time
d['delta'] = delta
for s i
|
n ['changed', 'failures', 'ok', 'processed', 'skipped']:
d[s] = getattr(stats, s)
self._send_queue_message(d, 'STATS')
def _send_queue_message(self, msg, msg_type):
|
if self.enable_sqs:
from_start = time.time() - self.start_time
payload = {msg_type: msg}
payload['TS'] = from_start
payload['PREFIX'] = self.prefix
# update the last seen timestamp for
# the message type
self.last_seen_ts[msg_type] = time.time()
if msg_type in ['OK', 'FAILURE']:
# report the delta between the OK/FAILURE and
# last TASK
if 'TASK' in self.last_seen_ts:
from_task = \
self.last_seen_ts[msg_type] - self.last_seen_ts['TASK']
payload['delta'] = from_task
for output in ['stderr', 'stdout']:
if output in payload[msg_type]:
# only keep the last 1000 characters
# of stderr and stdout
# Some modules set the value of stdout or stderr to booleans in
# which case the len will fail. Check to see if there is content
# before trying to clip it.
if payload[msg_type][output] and len(payload[msg_type][output]) > 1000:
payload[msg_type][output] = "(clipping) ... " \
+ payload[msg_type][output][-1000:]
if 'stdout_lines' in payload[msg_type]:
# only keep the last 20 or so lines to avoid payload size errors
if len(payload[msg_type]['stdout_lines']) > 20:
payload[msg_type]['stdout_lines'] = ['(clipping) ... '] + payload[msg_type]['stdout_lines'][-20:]
while True:
try:
self.sqs.send_message(self.queue, json.dumps(payload))
break
except socket.gaierror as e:
print('socket.gaierror will retry: ' + e)
time.sleep(1)
except Exception as e:
raise e
|
tedlaz/pyted
|
misthodosia/pyMisthodosia/src/tst.py
|
Python
|
gpl-3.0
| 11,303
| 0.045007
|
#!/usr/bin/env python
#coding=utf-8
'''
Created on 06 Ιουν 2011
@author: tedlaz
'''
import functions as f
import decimal as decim
from datetime import datetime, date, time
class d():
def __init__(self,val=0, txt='',desc='',decimals=4):
self.decimals = decimals
self.val = f.dec(val,self.decimals)
self.txt = txt
self.desc= desc
def d(self,txt):
self.txt=txt
def __add__(self,x):
if isinstance(x,int):
|
return d(self.val+x,'','%s<%s> + %s' % (self.txt,self.val,x ))
elif isinstance(x,decim.Decimal):
return d(self.val+x,'','%s<%s> + %s' % (self.txt,self.val,x ))
elif isinstance(x,d):
return d(self.val+x.val,'','%s<%s> + %s<%s>' %(self.t
|
xt,self.val,x.txt,x.val))
else:
return d(0,'Error')
def __sub__(self,x):
if isinstance(x,int):
return d(self.val-x,'','%s<%s> - %s' % (self.txt,self.val,x ))
elif isinstance(x,decim.Decimal):
return d(self.val-x,'','%s<%s> - %s' % (self.txt,self.val,x ))
elif isinstance(x,d):
return d(self.val-x.val,'','%s<%s> - %s<%s>' %(self.txt,self.val,x.txt,x.val))
else:
return d(0,'Error')
def __mul__(self,x):
if isinstance(x,int):
return d(f.dec(self.val*f.dec(x),4),'','%s<%s> X %s' % (self.txt,self.val,x ))
elif isinstance(x,decim.Decimal):
return d(f.dec(self.val*f.dec(x),4),'','%s<%s> X %s' % (self.txt,self.val,x ))
elif isinstance(x,d):
return d(f.dec(self.val*x.val,4),'','%s<%s> X %s<%s>' %(self.txt,self.val,x.txt,x.val))
else:
return d(0,'Error')
def __div__(self,x):
if isinstance(x,int):
return d(f.dec(self.val/f.dec(x),4),'','%s<%s> / %s' % (self.txt,self.val,x ))
elif isinstance(x,decim.Decimal):
return d(f.dec(self.val/f.dec(x),4),'','%s<%s> / %s' % (self.txt,self.val,x ))
elif isinstance(x,d):
return d(f.dec(self.val/x.val,4),'','%s<%s> / %s<%s>' %(self.txt,self.val,x.txt,x.val))
else:
return d(0,'Error')
def __str__(self):
if len(self.desc.strip()) == 0:
return '%s = %s '% (self.txt,self.val)
else:
return '%s = %s (%s)'% (self.txt,self.val,self.desc)
def calcMisNormal(misth,misths,mer,brad=0,kyrMer=0,kyrOr=0,yp=0,ypBrad=0,ypKy=0):
code = d(901,'Τακτικές αποδοχές')
misthos =d(misth,'Μισθός') ; print misthos
misthoss = d(misths,'Συμβατικός μισθός') ; print misthoss
imergasias = d(mer,'Ημέρες εργασίας') ; print imergasias
brady = d(brad,'Ώρες για Βραδυνή προσαύξηση') ; print brady
kyriakiMeres = d(kyrMer,'Μέρες Κυριακές-Αργίες για προσαύξηση') ; print kyriakiMeres
kyriakiOres = d(kyrOr,'ώρες για προσαύξηση Κυριακών-Αργιών') ; print kyriakiOres
yperories = d(yp,'Υπερωρίες') ; print yperories
ypBrady = d(ypBrad,'Υπερωρίες για βραδυνή προσαύξηση') ; print ypBrady
ypKyr = d(ypKy,'Υπερωρίες για προσαύξηση Κυριακών-Αργιών') ; print ypKyr
meresSymbasisMina =d(25,'Ημέρες εργασίας ανά μήνα') ; print meresSymbasisMina
meresIKABdomada =d(6,'Ημέρες εργασίας (ΙΚΑ) ανά βδομάδα') ; print meresIKABdomada
meresSymbasiBdomada =d(5,'Ημέρες εργασίας πραγματικές ανά βδομάδα') ; print meresSymbasiBdomada
oresSymbasiBdomada = d(40,'Ώρες εργασίας ανά βδομάδα') ; print oresSymbasiBdomada
posostoNyxta = d(.25,'Ποσοστό νυχτερινής προσαύξησης') ; print posostoNyxta
posostoKyriaki = d(.75,'Ποσοστό προσαύξησης Κυριακών-Αργιών') ; print posostoKyriaki
posostoYperoria= d(.5,'Ποσοστό προσαύξησης υπερωριών') ;print posostoYperoria
imeromisthio = misthos / meresSymbasisMina ; imeromisthio.d('Ημερομίσθιο') ; print imeromisthio
imeromisthios= misthoss/ meresSymbasisMina ; imeromisthios.d('Συμβατικό Ημερομίσθιο') ; print imeromisthios
bdomadiatiko = imeromisthio * meresIKABdomada ; bdomadiatiko.d('Βδομαδιάτικο') ; print bdomadiatiko
bdomadiatikos = imeromisthios * meresIKABdomada ; bdomadiatikos.d('Συμβατικό Βδομαδιάτικο'); print bdomadiatikos
oromisthio = bdomadiatiko / oresSymbasiBdomada ; oromisthio.d('Ωρομίσθιο') ; print oromisthio
oromisthios = bdomadiatikos / oresSymbasiBdomada ; oromisthios.d('Συμβατικό Ωρομίσθιο') ; print oromisthios
apodoxes = imeromisthio * imergasias ; apodoxes.d('Τακτικές αποδοχές περιόδου'); print apodoxes
nyxta1 = oromisthios * brady ; nyxta1.d('Νυχτερινές αποδοχές για προσαύξηση') ; print nyxta1
nyxta2 = nyxta1 * posostoNyxta ; nyxta2.d('Προσαύξηση νυχτερινής απασχόλησης') ; print nyxta2
kyrm1 = imeromisthio * posostoKyriaki ; kyrm1.d('Ημερομίσθιο προσαύξησης Κυριακής');print kyrm1
kyrm2 = kyriakiMeres * kyrm1 ; kyrm2.d('Προσαύξηση Κυριακών'); print kyrm2
kyr1 = oromisthios * kyriakiOres ; kyr1.d('Αποδοχές Κυριακών-Αργιών για προσαύξηση') ; print kyr1
kyr2 = kyr1 * posostoKyriaki ; kyr2.d('Προσαύξηση Κυριακών-Αργιών') ; print kyr2
yp1 = oromisthio * yperories ; yp1.d('Κανονική αμοιβή υπερωριών') ; print yp1
ypkyr1 = oromisthios * ypKyr ; ypkyr1.d('Αποδοχές υπερωρίας Κυριακών-Αργιών για προσαύξηση') ; print ypkyr1
ypkyr2 = ypkyr1 * posostoKyriaki ; ypkyr2.d('Προσαύξηση υπερωριών Κυριακών-Αργιών') ; print ypkyr2
ypnyxta1 = oromisthios * ypBrady ; ypnyxta1.d('Νυχτερινές αποδοχές υπερωριών για προσαύξηση') ; print ypnyxta1
ypnyxta2 = ypnyxta1 * posostoNyxta ; ypnyxta2.d('Προσαύξηση υπερωριών νυχτερινής απασχόλησης') ; print ypnyxta2
yp2 = yp1+ypkyr2+ypnyxta2 ; yp2.d('Συνολική αξία υπερωριών για προσαύξηση') ; print yp2
yp3 = yp2 * posostoYperoria ; yp3.d('Προσαύξηση Υπερωριών') ; print yp3
yp4 = yp1+yp3 ; yp4.d('Συνολικό κόστος υπερωριών'); print yp4
totalApodoxes = apodoxes +nyxta2+kyr2+yp4; totalApodoxes.d('Συνολικές μικτές αποδοχές περιόδου') ; print totalApodoxes
pika = d(.4406,'Ποσοστό ΙΚΑ') ; print pika
pikaenos = d(.16,'Ποσοστό ΙΚΑ εργαζομένου') ; print pikaenos
pikaetis = pika - pikaenos ; pikaetis.d('Ποσοστό ΙΚΑ εργοδότη');print pikaetis
ika = totalApodoxes * pika ; ika.d('ΙΚΑ'); print ika
ikaenos = totalApodoxes * pikaenos ; ikaenos.d('ΙΚΑ εργαζομένου'); print ikaenos
ikaetis = ika - ikaenos ; ikaetis.d('ΙΚΑ εργοδότη'); print ikaetis
forologiteo = totalApodoxes - ikaenos ; forologiteo.d('Φορολογητέο') ; print forologiteo
def meresImerologiakes(apo,eos):
yapo ,yeos = int(apo[:4]) , int(eos[:4])
mapo ,meos = int(apo[4:6]), int(eos[4:6])
dapo ,deos = int(apo[6:]) , int(eos[6:])
dat_apo = date(yapo,mapo,dapo)
dat_eos = date(yeos,meos,deos)
delta = dat_eos - dat_apo
meres = delta.days+1
return meres
def ores(apo,eos): # yyyymmddhhmm
yapo ,yeos = int(apo[:4]) , int(eos[:4] )
mapo ,meos = int(apo[4:6]), int(eos[4:6] )
dapo ,deos = int(apo[6:8]) , int(eos[6:8] )
hapo, heos = int(apo[8:10]), int(eos[8:10] )
lapo, leos = int(apo[10:]), int(eos[10:] )
brEnarksi = 22
brLiksi = 6
dat_apo = datetime(yapo,mapo,dapo,hapo,lapo)
dat_eos = datetime(yeos,meos,deos,heos,leos)
delta = dat_eos - dat_apo
ores = (delta.seconds / 3600.0)+ (delta.days*24)
nyxterines = 0
loipes = 0
v1 = lapo / 60.0
v2 = leos / 60.0
#if v2 == 0 : v2 =1
for i in range(hapo,int(ores+hapo)):
modi = i % 24
if modi < 6 or modi >=22:
nyxterines += 1
else:
loipes += 1
if hapo < 6 or hapo >=22:
nyxterines = nyxterines - v1
else:
loipes = loipes - v1
if heos < 6 or heos >= 22:
nyxterines = nyxterines + v2
else:
loipes = loipes + v2
yperories = ores - 8
if yperories < 0: yperories = 0
return ores, loipes, nyxterines,yperories
def calcMisDoroXrist(apo,eos,_meres,_poso):
code = d(903,'Δώρο Χριστουγέννων')
total_meres = d(meresImerologiakes('20110501','20111231'),'Συνολικές μέρες για Δώρο')
meres = d(_meres,'Ημέρες εργασίας')
poso = d(_poso,'Αποδοχές περιόδου')
pmeres = d(meresImerologiakes(apo,eos),'Ημέρες α
|
erigones/esdc-ce
|
api/mon/backends/zabbix/__init__.py
|
Python
|
apache-2.0
| 379
| 0.002639
|
from api.mon.backends.zabbix.monitor import Zabbi
|
x, get_zabbix, del_zabbix
from api.mon.backends.zabbix.server import ZabbixMonitoringServer
get_monitoring = get_zabbix
del_monitoring = del_zabbix
MonitoringBackendClass = Zabbix
MonitoringServerClass = ZabbixMonitoringServer
__all__ = ('get_monitoring', '
|
del_monitoring', 'MonitoringBackendClass', 'MonitoringServerClass')
|
jujumo/gpsbip-configurator
|
source/task/path_tools_test.py
|
Python
|
mit
| 2,315
| 0.000864
|
#!/usr/bin/env python
import unittest
from os.path import realpath
from path_tools import create_filepath
class TestCreateFilepath(unittest.TestCase):
def test_identity(self):
original_filepath = realpath('E:/GoPro/2018-04-18/GH010926.MP4')
dest_filepath = create_filepath(original_filepath)
dest_filepath = realpath(dest_filepath)
self.assertEqual(original_filepath, dest_filepath)
def test_prefix(self):
original_filepath = realpath('E:/GoPro/2018-04-18/GH010926.MP4')
expected_filepath = realpath('E:/GoPro/2018-04-18/prefix_GH010926.MP4')
dest_filepath = create_filepath(original_filepath, prefix='prefix_')
dest_filepath = realpath(dest_filepath)
self.assertEqual(expected_filepath, dest_filepath)
def test_suffix(self):
original_filepath = realpath('E:/GoPro/2018-04-18/GH010926.MP4')
expected_filepath = realpath('E:/GoPro/2018-04-18/GH010926_suffix.MP4')
dest_filepath = create_filepath(original_filepath, suffix='_suffix')
dest_filepath = realpath(dest_filepath)
self.assertEqual(expected_filepath, dest_filepath)
def test_ext(self):
original_filepath = realpath('E:/GoPro/2018-04-18/GH010926.MP4')
expected_filepath = realpath('E:/GoPro/2018-04-18/GH010926.mp3')
dest_filepath = create_filepath(original_filepath, ext='.mp3')
dest_filepath = realpath(dest_filepath)
self.assertEqual(expected_filepath, dest_filepath)
def test_another_dir(self):
original_filepath = realpath('E:/GoPro/2018-04-18/GH010926.MP4')
expected_filepath
|
= realpath('C:/temp/GH010926.MP4')
dest_filepath = create_filepath(original_filepath, dest_dirpath='C:/temp')
dest_filepath = realpath(dest_filepath)
self.assertEqual(expected_filepath, dest_filepath)
def test_another_relative_dir(self):
original_filepath = realpath('E:/GoPro/2018-04-18/GH010926.MP4')
expected_filepath = realpath('C:/temp/2018-04-18/GH010926.MP4')
dest_filepath = create_filepath(original_filepath, original_rootpath='E
|
:/GoPro/', dest_dirpath='C:/temp')
dest_filepath = realpath(dest_filepath)
self.assertEqual(expected_filepath, dest_filepath)
if __name__ == '__main__':
unittest.main()
|
digling/cddb
|
datasets/Allen2007/raw/inventories.py
|
Python
|
gpl-3.0
| 838
| 0.002387
|
from lingpy import *
from collections import defaultdict
invs = csv2list('inventories.tsv', strip_lines=False)
segments = defau
|
ltdict(dict)
langs = set()
for d, t, v, a, n in invs[1:]:
if a.strip():
allophone, context = a.split('/')
segments[allophone, t][d] = (v, context)
segments[v, t][d] = (a, n or 'X')
langs.add(d)
langs = sorted(langs)
with open('Allen2007.prf', 'w') as f:
f.write('GRAPHEMES\tTYPE\tAll
|
en2007\tCLPA\tSTRUCTURE\t'+'\t'.join(langs)+'\n')
for a, b in sorted(segments):
f.write('\t'.join([a, b, a, a, '']))
for lang in langs:
vals = segments[a, b].get(lang, ['', ''])
if vals[0] and vals[1]:
value = '/'.join(vals)
else:
value = ''.join(vals)
f.write('\t'+value)
f.write('\n')
|
gkc1000/pyscf
|
pyscf/nao/test/test_0100_fireball.py
|
Python
|
apache-2.0
| 441
| 0.013605
|
from __future__ import print_function, division
import unittest, numpy as np
from pyscf.nao import mf
class KnowValues(unittest.TestCase):
def test_fireball(self):
""" Test computation of matrix elements of overlap after fireball """
sv
|
= mf(fireball="fireball.out", gen_pb=False)
s_ref = sv.hsx.s4_csr.to
|
array()
s = sv.overlap_coo().toarray()
#print(abs(s-s_ref).sum())
if __name__ == "__main__": unittest.main()
|
SurfasJones/djcmsrc3
|
venv/lib/python2.7/site-packages/djangocms_link/forms.py
|
Python
|
mit
| 1,320
| 0.00303
|
from django.forms.models import ModelForm
from django.utils.translation import ugettext_lazy as _
from djangocms_link.models import Link
from cms.models import Page
from django.forms.widgets import Media
class LinkForm(ModelForm):
try:
from djangocms_link.fields import PageSearchField
page_link = PageSearchField(queryset=Page.objects.drafts(), label=_("page"), required=False)
except ImportError:
from cms.forms.fields import PageSelectFormField
page_link = PageSelectFormField(queryset=Page.objects.drafts(), label=_("page"), required=False)
def for_site(self, site):
# override the page_link fields queryset to containt just pages for
# current site
from cms.models import Page
self.fields['page_link'].queryset = Page.objects.drafts().on_site(site)
class Meta:
model = Link
exclude = ('page', 'position', 'placeholder', 'language', 'plugin_type')
def _get_media(self):
"""
Provide a description of all media required to render the widgets on this form
"""
media = Media()
for field in self.fields.values():
media = media + field.widget.media
media._js = ['cms/js/libs/jquery.min.j
|
s'] + media._js
retur
|
n media
media = property(_get_media)
|
JaDogg/__py_playground
|
reference/ply-3.8/test/yacc_error7.py
|
Python
|
mit
| 1,790
| 0.013966
|
# -----------------------------------------------------------------------------
# yacc_error7.py
#
# Panic mode recove
|
ry test using deprecated functionality
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
# Parsing rules
precedence = (
('left','PLUS','MINUS'),
('left','TIMES','DIVIDE'),
('right','UMINUS'),
)
def p_statements(t):
'statemen
|
ts : statements statement'
pass
def p_statements_1(t):
'statements : statement'
pass
def p_statement_assign(p):
'statement : LPAREN NAME EQUALS expression RPAREN'
print("%s=%s" % (p[2],p[4]))
def p_statement_expr(t):
'statement : LPAREN expression RPAREN'
print(t[1])
def p_expression_binop(t):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
t[0] = -t[2]
def p_expression_number(t):
'expression : NUMBER'
t[0] = t[1]
def p_error(p):
if p:
print("Line %d: Syntax error at '%s'" % (p.lineno, p.value))
# Scan ahead looking for a name token
while True:
tok = yacc.token()
if not tok or tok.type == 'RPAREN':
break
if tok:
yacc.restart()
return None
parser = yacc.yacc()
import calclex
calclex.lexer.lineno=1
parser.parse("""
(a = 3 + 4)
(b = 4 + * 5 - 6 + *)
(c = 10 + 11)
""")
|
xxjcaxx/openerp-learning-module2
|
parking/__openerp__.py
|
Python
|
gpl-2.0
| 372
| 0.02957
|
{
"name" : "parking",
"version" : "0.1",
"author" : "Castillo",
"website"
|
: "http://openerp.com",
"category" : "Unknown",
"description": """ Module for a parking """
|
,
"depends" : ['base'],
"init_xml" : [ ],
"demo_xml" : [ ],
"update_xml" : ['parking_view.xml'],
"installable": True
}
|
ArthurZey/toyproblems
|
projecteuler/0021_amicable_numbers.py
|
Python
|
mit
| 1,028
| 0.013645
|
#!/usr/bin/env python
'''
https://projecteuler.net/problem=20
Let d(n) be defined as the sum of proper divisors of n (numbers less than n which divide evenly into n).
If d(a) = b and d(b) = a, where a ≠ b, then a and b are an amicable pair and each of a and b are called amicable numbers.
For example, the proper divisors of 220 are 1, 2, 4, 5, 10, 11, 20, 22, 44, 55 and 110; therefore d(220) = 284. The proper divisors of 284 are 1, 2, 4, 71 and 142; so d(284) = 220.
Evaluate the sum of all the amicable numbers under 10000.
'''
import math
amicable_numbers = list()
def d(n):
sum_of_divisors = 1 # since 1 is always a divisor, no need to check
for possible_divisor in range(2, math.floor(math.sqrt(n)) + 2):
if n%possible_divisor == 0
|
:
sum_of_divisors += possible_divisor
sum_of_divisors += int(n/possible_divisor)
return sum_of_divisors
sum_of_amicable_numbers = 0
for i in range(1, 10000):
if d(d(i)) == i and d(i) != i:
sum_of_amicable_numbers += i
prin
|
t(sum_of_amicable_numbers)
|
Alberto-Beralix/Beralix
|
i386-squashfs-root/usr/lib/python2.7/dist-packages/twisted/__init__.py
|
Python
|
gpl-3.0
| 46
| 0.021739
|
../
|
../../../share/pyshared/t
|
wisted/__init__.py
|
NINAnor/sentinel4nature
|
Tree canopy cover/regression/GBRT_Hjerkinn_manual_FCLS.py
|
Python
|
gpl-2.0
| 8,852
| 0.0061
|
# GBRT for Hjerkinn case study site
# Training data: manually digitized training areas, including water pixels
# Predictors: results of FCLS spectral unmixing
# Authors: Stefan Blumentrath
import numpy as np
import matplotlib.pyplot as plt
from sklearn import ensemble
from sklearn import datasets
from sklearn.utils import shuffle
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
from sklearn.ensemble.partial_dependence import plot_partial_dependence
from sklearn.model_selection import GridSearchCV
from grass.pygrass import raster as r
from grass.pygrass.utils import getenv
import grass.script as gs
from cStringIO import StringIO
from subprocess import PIPE
from io import BytesIO
from itertools import combinations
def setParamDict():
params = {}
for p in ['learning_rate', 'max_depth', 'loss', 'subsample',
'min_samples_leaf', 'max_features', 'n_estimators']:
if p in ['max_depth', 'min_samples_leaf', 'n_estimators']:
params[p] = map(int, options[p].split(','))
elif p in ['learning_rate', 'max_features', 'subsample']:
params[p] = map(float, options[p].split(','))
else:
params[p] = options[p].split(',')
return params
def writeMap(name, x,y,z):
result = BytesIO()
np.savetxt(result,
np.column_stack((x,
y,
z)))
result.seek(0)
gs.write_command('r.in.xyz', stdin=result.getvalue(), input='-', output=name,
method='mean', separator=' ', overwrite=True)
# #############################################################################
# Define variables
# List of input maps has to start with Y
# Initaial settings for automatized model selection
options = {'cores': '6',
'learning_rate': '0.013,0.011,0.009,0.007,0.005,0.003,0.002,0.001',
'max_depth': '5,7,9,11,13,15,17,19',
'min_samples_leaf': '1,2,3,4,5,6',
'max_features': '0.9,0.8,0.7,0.6,0.5,0.4,0.3,0.2,0.1',
'subsample': '0.5',
'loss': 'huber',
'n_estimators': '3000',
'y': 'test_area_hjerkinn_water_grid_25833_10m@p_Sentinel4Nature_S2_Hjerkinn',
'x': 'unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_1,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_2,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_3,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_4,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_5,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_6,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_7,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_8,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_9,unmix_pysptools_bands_NDVI_VVVH_10000_10_NFINDR_FCLS_mask_10',
'deviance': '/data/R/GeoSpatialData/Orthoimagery/Fenoscandia_Sentinel_2/temp_Avd15GIS/
|
Case_Hjerkinn/regression/Hjerkinn_water_FCLS_GBRT_deviance.pdf',
'featureimportance': '/data/R/GeoSpatialData/Orthoimagery/Fenoscandia_Sentinel_2/temp_Avd15GIS/Case_Hjerkinn/regression/Hjerkinn_water_FCLS_GBRT_featureimportance.pdf',
'partialdependence': '/data/R/GeoSpatialData/Orthoimagery/Fenoscandia_Sentinel_2/temp_Avd15GIS/Case_Hjerkinn/regressio
|
n/Hjerkinn_water_FCLS_GBRT_partial_dependence.pdf',
'crossval': '0.25',
'output': 'ForestCover_Hjerkinn_water_FCLS',
'spatial_term': None
}
cores = int(options['cores'])
spatial_term = options['spatial_term']
output = options['output']
deviance = options['deviance']
featureimportance = options['featureimportance']
partialdependence = options['partialdependence']
crossval = float(options['crossval'])
params = setParamDict()
# #############################################################################
# Load data
maps = [options['y']] + options['x'].rstrip('\n').split(',')
data = np.genfromtxt(BytesIO(gs.read_command('r.stats',
flags='1Ng',
input=maps)), delimiter=" ")
y = 2
if spatial_term:
x = [0,1] + range(3,len(data[0]))
else:
x = range(3,len(data[0]))
# Create a mas for NoData in either x or y
mask_y = np.isnan(data[:,y])
for i in range(3,len(data[0])):
if i == 3:
mask_x = np.isnan(data[:,i])
else:
mask_x = np.logical_or((np.isnan(data[:,i])), mask_x)
all_y_idx = np.where(np.logical_or(mask_x, mask_y)==False)
all_x_idx = np.where(mask_x==False)
# Random shuffle data points with training data, excluding all NoData
all_y = shuffle(data[all_y_idx])
# Training and test set
offset = int(all_y.shape[0] * (1 - crossval))
X_train, y_train, coor_train = all_y[:offset,x], all_y[:offset,y], all_y[:offset,[0,1]]
X_test, y_test, coor_test= all_y[offset:,x], all_y[offset:,y], all_y[offset:,[0,1]]
# Set for predicitions
predict, coor_predict = data[all_x_idx][:,x], data[all_x_idx][:,[0,1]]
# Run model selection process if requested
model_selection = False
for k in params.keys():
if len(params[k]) > 1:
model_selection = True
if model_selection:
gs.message('Running model selection ...')
clf = ensemble.GradientBoostingRegressor()
# this may take some minutes
gs_cv = GridSearchCV(clf, params, n_jobs=cores).fit(X_train, y_train)
# best hyperparameter setting
best_params = gs_cv.best_params_
print('Best hyper-parameter set is:')
print(best_params)
else:
best_params = {}
for k in params.keys():
best_params[k] = params[k][0]
# #############################################################################
# Fit regression model
gs.message('Fitting regression model ...')
clf = ensemble.GradientBoostingRegressor(**best_params)
clf.fit(X_train, y_train)
mse = mean_squared_error(y_test, clf.predict(X_test))
r2 = r2_score(y_test, clf.predict(X_test))
print("MSE: %.4f" % mse)
print("R2: %.4f" % r2)
# #############################################################################
# Generate requested plots
# Plot training deviance
# compute test set deviance
if deviance:
test_score = np.zeros((best_params['n_estimators'],), dtype=np.float64)
for i, y_pred in enumerate(clf.staged_predict(X_test)):
test_score[i] = clf.loss_(y_test, y_pred)
plt.figure(figsize=(12, 6))
plt.rcParams.update({'figure.autolayout': True})
plt.title('Deviance')
plt.plot(np.arange(best_params['n_estimators']) + 1, clf.train_score_, 'b-',
label='Training Set Deviance')
plt.plot(np.arange(best_params['n_estimators']) + 1, test_score, 'r-',
label='Test Set Deviance')
plt.legend(loc='upper right')
plt.xlabel('Boosting Iterations')
plt.ylabel('Deviance')
plt.savefig(deviance)
# #############################################################################
# Plot feature importance
if featureimportance:
if spatial_term:
cols = ['x', 'y'] + maps[1:]
else:
cols = maps[1:]
plt.figure(figsize=(12, 12))
plt.rcParams.update({'figure.autolayout': True})
feature_importance = clf.feature_importances_
# make importances relative to max importance
feature_importance = 100.0 * (feature_importance / feature_importance.max())
sorted_idx = np.argsort(feature_importance)
pos = np.arange(sorted_idx.shape[0]) + .5
#plt.subplot(1, 2, 2)
plt.barh(pos, feature_importance[sorted_idx], align='center')
plt.yticks(pos, np.array(cols)[sorted_idx])
plt.xlabel('Relative Importance')
plt.title('Variable Importance')
plt.savefig(featureimportance)
if partialdependence:
if spatial_term:
cols = ['x', 'y'] + maps[1:]
else:
cols = maps[1:]
fig, axs = plot_partial_dependence(clf, X_train, cols, n_jobs=cores, n_cols=2,
feature_names=cols, figsize=(len(cols), len(cols)*2))
fig.savefig(partialdependence)
sorted_idx = np.argsort(clf.feature_importances_)
twoway = list(combinations(list(reversed(sorted_idx[-6:])), 2))
fig, axs = plot_partial_dependence(clf, X_tr
|
Zanzibar82/streamondemand.test
|
servers_sports/iguide.py
|
Python
|
gpl-3.0
| 2,410
| 0.025726
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# streamondemand - XBMC Plugin
# Conector para iguide
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
DEBUG = config.get_setting("debug")
def find_url_play(data, headers):
logger.info("[iguide.py] find_url_play")
fid = scrapertools.find_single_match (data, 'src="http://www.iguide.to/embed/([^&]+)') #http://www.iguide.to/embed/29586&width=730&height=430&autoplay=true
if fid == '':
return ''
pageurl = 'http://www.iguide.to/embedplayer.php?width=730&height=430&channel=%s&autoplay=true' % fid
data2 = scrapertools.cachePage(pageurl, headers=headers)
if (DEBUG): logge
|
r.info("data2="+data2)
'''
var token = "";
$.getJSON("http://www.iguide.to/serverfile.php?id=1422862766", function(json){
token = json.token;
setStream(token);
});
|
function setStream(token) {
jwplayer('dplayer').setup({
'id': 'dplayer',
'autostart': 'true',
'width': '730',
'height': '430',
'controlbar':'bottom',
'provider': 'rtmp',
'streamer': 'rtmp://live2.iguide.to/redirect',
'rtmp.tunneling':false,
'bufferLength':0.1,
'file': '0zznd3dk4sqr3xg.flv',
'modes': [
{type: 'flash', src: 'http://www.iguide.to/player/secure_player_iguide_embed_token.swf'},
{
type: 'html5',
config: {
'file': 'http://mobilestreaming.ilive.to:1935/edge/0zznd3dk4sqr3xg/playplist.m3u8',
'provider': 'video'
}
}
]
});
}
'''
#url = scrapertools.find_single_match (data2, "'file': '([^']+)'", 1)
#return url
tokenurl = scrapertools.find_single_match (data2, 'getJSON\("([^"]+)"')
data3 = scrapertools.cache_page(tokenurl,headers=headers)
if (DEBUG): logger.info("data3="+data3)
tokenvalue = scrapertools.find_single_match (data3, '"token":"([^"]+)"')
swfurl = 'http://www.iguide.to/player/secure_player_iguide_embed_token.swf'
rtmpurl = scrapertools.find_single_match (data2, "'streamer': '([^']+)'")
fileflv = scrapertools.find_single_match (data2, "'file': '([^'\.]+)")
url = '%s playpath=%s swfUrl=%s live=1 token=%s timeout=15 swfVfy=1 pageUrl=%s' % (rtmpurl, fileflv, swfurl, tokenvalue, pageurl)
return url
|
metacloud/python-keystoneclient
|
keystoneclient/apiclient/exceptions.py
|
Python
|
apache-2.0
| 1,124
| 0
|
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 Nebula, Inc.
# Copyright 2013 Alessio Ababilov
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by app
|
licable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
Deprecated since v0.7.1. Use 'keystoneclient.exceptions' instead of
this module.
"""
import
|
warnings
from keystoneclient.exceptions import * # noqa
warnings.warn("The 'keystoneclient.apiclient.exceptions' module is deprecated "
"since v.0.7.1. Use 'keystoneclient.exceptions' instead of this "
"module.", DeprecationWarning)
|
suellenf/hearmecode
|
playtime/lesson2_pbj.py
|
Python
|
mit
| 958
| 0.004175
|
# Goal 1
bread = 4
pb = 3
jelly = 10
sandwich = 0
while bread >= 2 and pb >= 1 and jelly >= 1:
sandwich = sandwich + 1
print "I am making sandwich number {0}".format(sandwich)
bread = bread - 2
pb = pb - 1
jell
|
y = jelly - 1
p
|
rint "All done! I made {0} sandwich(es)".format(sandwich)
# Goal 2
bread = 10
pb = 10
jelly = 4
sandwich = 0
ran_out = [""]
while bread >= 2 and pb >= 1 and jelly >= 1:
sandwich = sandwich + 1
print "I am making sandwich number {0}".format(sandwich)
bread = bread - 2
pb = pb - 1
jelly = jelly - 1
print "I have enough bread for {0} sandwiches, enough pb for {0} sandwiches, and enough jelly for {0} sandwiches".format(bread/2,pb,jelly)
if bread == 0:
ran_out.append("bread")
if pb == 0:
ran_out.append("pb")
if jelly == 0:
ran_out.append("jelly")
print "All done! I made {0} sandwich(es) and ran out of {1}".format(sandwich, ran_out[1:])
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/viennarna/package.py
|
Python
|
lgpl-2.1
| 2,706
| 0.002217
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Viennarna(AutotoolsPackage):
"""The ViennaRNA Package consists of a C code library and several
stand-alone programs for the prediction and comparison of RNA secondary
structures.
"""
homepage = "https://www.tbi.univie.ac.at/RNA/"
url = "https://www.tbi.univie.ac.at/RNA/download/sourcecode/2_4_x/ViennaRNA-2.4.3.tar.gz"
version('2.4.3', '41be2fd36a5323a35ed50debfc7bd118')
version('2.3.5', '4542120adae9b7abb605e2304c2a1326')
variant('sse', default=True, description='Enable SSE in orde
|
r to substantially speed up execution')
variant('perl', default=True, description='Build ViennaRNA with Perl interface')
variant('python', default=True, description='Build ViennaRNA with Py
|
thon interface')
depends_on('perl', type=('build', 'run'))
depends_on('python', type=('build', 'run'))
depends_on('libsvm')
depends_on('gsl')
def url_for_version(self, version):
url = 'https://www.tbi.univie.ac.at/RNA/download/sourcecode/{0}_x/ViennaRNA-{1}.tar.gz'
return url.format(version.up_to(2).underscored, version)
def configure_args(self):
args = self.enable_or_disable('sse')
args += self.with_or_without('python')
args += self.with_or_without('perl')
if self.spec.satisfies('@2.4.3:'):
args.append('--without-swig')
if 'python@3:' in self.spec:
args.append('--with-python3')
return args
|
marioharper182/ComputationalMethodsFinance
|
Homework2/Options_American.py
|
Python
|
apache-2.0
| 1,728
| 0.004051
|
__author__ = 'HarperMain'
import numpy as np
from scipy.stats import binom
class AmericanOption(object):
def __init__(self, strike, X, rate, volatility, T, n):
self.strike = strike
self.X = X
self.rate = rate
self.volatility = volatility
self.T = T
self.n = float(n)
h = self.T/self.n
# self.spot = self.CalcSpot()
u = np.exp((self.rate * h) + self.volatility * np.sqrt(h
|
))
d = np.exp((self.rate * h) - self.volatility * np.sqrt(h))
nodes = self.T+1
P = (np.exp(self.rate*h) - d) / (u - d)
Pc = 1-P
self.CallMatrix = []
self.PutMatrix = []
for j in range(0, T):
Call = []
Put = []
# jnodes = nodes-(T-j)
for i in range(0, j):
spot = self.strike * (u ** (j-i))* (d ** i)
callvar = self.Ca
|
llPayOff(spot, X) * binom.pmf(self.T - i, self.T, P)
putvar = self.PutPayOff(spot, X) * binom.pmf(self.T - i, self.T, Pc)
Call.append(callvar)
Put.append(putvar)
self.CallMatrix.append(Call)
self.PutMatrix.append(Put)
print('Loop Completed')
print(self.CallMatrix)
print(self.PutMatrix)
def GetStrike(self):
return self.strike
def SetStrike(self, strike):
self.strike = strike
def GetPrice(self):
return (self.CallMatrix, self.PutMatrix)
def CallPayOff(self, spot, strike):
return max(spot-strike, 0.0)
def PutPayOff(self, spot, strike):
return max(strike-spot, 0.0)
def main():
AmericanOption(100, 80, .03, .25, 5, 1)
if __name__ == '__main__':
main()
|
7fever/script.pseudotv.live
|
utilities.py
|
Python
|
gpl-3.0
| 6,697
| 0.00881
|
# Copyright (C) 2015 Kevin S. Graer
#
#
# This file is part of PseudoTV Live.
#
# PseudoTV is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PseudoTV is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PseudoTV Live. If not, see <http://www.gnu.org/licenses/>.
import xbmc, xbmcgui, xbmcaddon, xbmcvfs
import os, sys, time, fileinput, re
import urllib, urllib2
from resources.lib.Globals import *
from resources.lib.utils import *
def showText(heading, text):
log("showText")
id = 10147
xbmc.executebuiltin('ActivateWindow(%d)' % id)
xbmc.sleep(100)
win = xbmcgui.Window(id)
retry = 50
while (retry > 0):
try:
xbmc.sleep(10)
retry -= 1
win.getControl(1).setLabel(heading)
win.getControl(5).setText(text)
return
except:
pass
def showChangelog(addonID=None):
log("showChangelog")
try:
if addonID:
ADDON = xbmcaddon.Addon(addonID)
else:
ADDON = xbmcaddon.Addon(ADDONID)
f = open(ADDON.getAddonInfo('changelog'))
text = f.read()
title = "Changelog - PseudoTV Live"
showText(title, text)
except:
pass
#DonorDownload
DonorURLPath = (PTVLURL + 'Donor.py')
LinkPath = (os.path.join(ADDON_PATH, 'resources', 'lib', 'links.py'))
DonorPath = (os.path.join(ADDON_PATH, 'resources', 'lib', 'Donor.pyo'))
DL_DonorPath = (os.path.join(ADDON_PATH, 'resources', 'lib', 'Donor.py'))
def DDautopatch():
log("DDautopatch")
REAL_SETTINGS.setSetting("AT_Donor", "false")
REAL_SETTINGS.setSetting("COM_Donor", "false")
REAL_SETTINGS.setSetting("TRL_Donor", "false")
REAL_SETTINGS.setSetting("CAT_Donor", "false")
try:
if xbmcvfs.exists(xbmc.translatePath(DL_DonorPath)):
xbmcvfs.delete(xbmc.translatePath(DL_DonorPath))
log('Removed DL_DonorPath')
if xbmcvfs.exists(xbmc.translatePath(DonorPath)):
xbmcvfs.delete(xbmc.translatePath(DonorPath))
log('Removed DonorPath')
except Exception:
pass
try:
urllib.urlretrieve(DonorURLPath, (xbmc.translatePath(DL_DonorPath)))
if xbmcvfs.exists(DL_DonorPath):
log('DL_DonorPath Downloaded')
|
REAL_SETTINGS.s
|
etSetting("AT_Donor", "true")
REAL_SETTINGS.setSetting("COM_Donor", "true")
REAL_SETTINGS.setSetting("TRL_Donor", "true")
REAL_SETTINGS.setSetting("CAT_Donor", "true")
xbmc.executebuiltin("Notification( %s, %s, %d, %s)" % ("PseudoTV Live", "Donor Autoupdate Complete", 4000, THUMB) )
except Exception:
pass
def DonorDownloader():
log('DonorDownloader')
REAL_SETTINGS.setSetting("AT_Donor", "false")
REAL_SETTINGS.setSetting("COM_Donor", "false")
REAL_SETTINGS.setSetting("TRL_Donor", "false")
REAL_SETTINGS.setSetting("CAT_Donor", "false")
Install = False
Verified = False
InstallStatusMSG = 'Activate'
if xbmcvfs.exists(DonorPath):
InstallStatusMSG = 'Update'
if dlg.yesno("PseudoTV Live", str(InstallStatusMSG) + " Donor Features?"):
try:
xbmcvfs.delete(xbmc.translatePath(DonorPath))
log('Removed DonorPath')
Install = True
except Exception:
pass
else:
Install = True
if Install == True:
try:
urllib.urlretrieve(DonorURLPath, (xbmc.translatePath(DL_DonorPath)))
if xbmcvfs.exists(DL_DonorPath):
log('DL_DonorPath Downloaded')
REAL_SETTINGS.setSetting("AT_Donor", "true")
REAL_SETTINGS.setSetting("COM_Donor", "true")
REAL_SETTINGS.setSetting("TRL_Donor", "true")
REAL_SETTINGS.setSetting("CAT_Donor", "true")
xbmc.executebuiltin("UpdateLocalAddons")
if REAL_SETTINGS.getSetting('AT_Donor') and REAL_SETTINGS.getSetting('COM_Donor') and REAL_SETTINGS.getSetting('TRL_Donor') and REAL_SETTINGS.getSetting('CAT_Donor'):
Verified = True
if Verified == True:
MSG = "Donor Features " + str(InstallStatusMSG) + "d"
else:
MSG = "Donor Features Not " + str(InstallStatusMSG) + "d"
xbmc.executebuiltin("Notification( %s, %s, %d, %s)" % ("PseudoTV Live", MSG, 1000, THUMB) )
REAL_SETTINGS.openSettings()
except Exception:
pass
def LogoDownloader():
log('LogoDownloader')
if dlg.yesno("PseudoTV Live", "Download Color Logos or No, Download Mono Logos"):
LogoDEST = os.path.join(LOCK_LOC,'PTVL_Color.zip')
URLPath = PTVLURL + 'PTVL_Color.zip'
else:
LogoDEST = os.path.join(LOCK_LOC,'PTVL_Mono.zip')
URLPath = PTVLURL + 'PTVL_Mono.zip'
if not xbmcvfs.exists(LOCK_LOC):
log('Creating LogoPath')
xbmcvfs.mkdir(LOCK_LOC)
try:
xbmcvfs.delete(xbmc.translatePath(LogoDEST))
log('Removed old LogoDEST')
except Exception:
pass
try:
download(URLPath, LogoDEST)
all(LogoDEST, LOCK_LOC)
REAL_SETTINGS.setSetting("ChannelLogoFolder", LOCK_LOC + 'logos')
try:
xbmcvfs.delete(LogoDEST)
log('Removed LogoDEST')
except Exception:
pass
except Exception:
pass
# Return to PTVL Settings
REAL_SETTINGS.openSettings()
if sys.argv[1] == '-DDautopatch':
DDautopatch()
elif sys.argv[1] == '-DonorDownloader':
if xbmcgui.Window(10000).getProperty("PseudoTVRunning") != "True":
DonorDownloader()
else:
xbmc.executebuiltin("Notification( %s, %s, %d, %s)" % ("PseudoTV Live", "Not available while running.", 1000, THUMB) )
elif sys.argv[1] == '-LogoDownloader':
LogoDownloader()
elif sys.argv[1] == '-SimpleDownloader':
xbmcaddon.Addon(id='script.module.simple.downloader').openSettings()
elif sys.argv[1] == '-showChangelog':
showChangelog(ADDON_ID)
|
amitsaha/learning
|
python/search/find_median.py
|
Python
|
unlicense
| 401
| 0.004988
|
'''
Find the median element in an unsorted array
'''
import he
|
apq
def find_median(arr):
# O(n)
heapq.heapify(arr)
num_elements = len(arr)
if num_elements % 2 != 0:
return arr[(num_elements + 1)/2 - 1]
else:
return (arr[num_elements/2 - 1] + arr[num_elements/2 + 1 - 1])/2.0
assert find_median(
|
[1, -1, 2, 3, 4]) == 2
assert find_median([1, -1, 2, 3, 4, 5]) == 2.5
|
MrIliev/AIPY-Music-Player
|
AIPY.py
|
Python
|
apache-2.0
| 1,128
| 0.036348
|
import sys
from PyQt4 import Qt
from PyQt4.phonon import Phonon
class AIPYbody():
# in the class body are all functionalities of the AIPY Music Player
# body will be one window from PyQT
def __init__ (self):
pass
def Display():
# used for showing the remaining time, or elapsed time of song, it is posible to
# hav
|
e visulations
pass
def PlayList():
# defines play list with files selected by user
pass
def PlayButton():
#button to start the music file, if there is not you will be able to choose one
pass
def PauseButton():
# button to pause the music file, can be together with play
pass
def StopButton():
# button to stop the music file, and when you push play it starts from begining
pass
def NextButton():
# button for ne
|
xt song in playlist, if there is not starts same song again
pass
def PrevButton():
# button for previous song in playlist, if there is not starts the same song again
pass
def OnlineStreaming():
# button which opens new window to select source for online streaming
pass
def EQ():
# button which opens window with equilaizer(optional)
pass
|
STiago/Python
|
Code/temperatures.py
|
Python
|
gpl-3.0
| 571
| 0.008757
|
# Read inputs from Standard Input.
# Write outputs to Standard Output.
# Please, do not use fileinput module to read Standard Input.
import sys
try:
n = int(raw_input())
lis = raw_input().split()
except ValueError:
print "0 - Cannot process the data"
position = 0
less = 0
more = 0
s = map(int, lis)
s
|
.append(0)
s.sort()
# Position of 0
for i in range(0,n):
if s[i] == 0:
position = i
break
# Number close to 0
less = s[position-1]
more = s[position+1]
|
if abs(more) <= abs(less):
print more
else:
print less
|
dbarrosop/pySIR
|
pySIR/call.py
|
Python
|
apache-2.0
| 1,069
| 0.001871
|
import requests
import json
import sir_exceptions
class Call:
def __init__(self, url, verify_ssl, method, params=None):
if method == 'GET':
r = requests.get(url, params=params, verify=verify_ssl)
elif method == 'POST
|
':
r = requests.post(url, json=params, verify=verify_ssl)
elif method == 'PUT':
r = requests.put(url, json=params, verify=verify_ssl)
elif method == 'DELETE':
r = requests.delete(url
|
, verify=verify_ssl)
if r.status_code != 200:
raise sir_exceptions.WrongCallException(r.status_code, r.content)
elif r.headers['content-type'] != 'application/json':
raise sir_exceptions.WrongEndpointException('Wrong content-type: {}', format(r.headers['content-type']))
self.raw_data = r.json()
if self.raw_data['meta']['error']:
raise Exception('Something went wrong')
self.meta = self.raw_data['meta']
self.parameters = self.raw_data['parameters']
self.result = self.raw_data['result']
|
GoogleCloudPlatform/covid-19-open-data
|
src/scripts/cloud_error_processing.py
|
Python
|
apache-2.0
| 3,450
| 0.003768
|
import os
import requests
import sys
from google.cloud import firestore
from googleapiclient.discovery import build
from google.cloud import secretmanager
# Add our library utils to the path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from lib.constants import ISSUES_API_URL
from lib.error_logger import ErrorLogger
class GithubIssueHandler(ErrorLogger):
""" Handles posting issues to github using the personal access token stored in SecretsManager. """
def __init__(self, gcs_project_name):
self._username = "automation" # This is ignored because we use an access token.
self._gcs_project_name = gcs_project_name
self._password = self._get_github_token()
def _error_group_to_github_issue(self, error_group):
title = "Automated error report"
body = error_group["representative"]["message"]
return {"title": title, "body": body}
def _get_github_token(self):
client = secretmanager.SecretManagerServiceClient()
name = client.secret_version_path(self._gcs_project_name, "github-token", "latest")
response = client.access_secret_version(name)
return response.payload.data
def post_error_to_github(self, error_group):
""" Returns the issue url if successfully posted, else raises a ConnectionError exception """
session = requests.Session()
session.auth = (self._username, self._password)
response = session.post(ISSUES_API_URL, json=self._error_group_to_github_issue(error_group))
if response.status_code != 201:
self.log_error("Could not create github issue.", status_code=response.status_code)
raise ConnectionError()
return response.json()["html_url"]
def register_new_errors(gcs_project_name):
""" If new error groups are reported, log an issue on github with the details """
service = build("clouderrorreporting", "v1beta1")
errors_past_day = (
service.projects()
.groupStats()
.list(projectName="projects/{}".format(gcs_project_name), timeRange_period="PERIOD_1_DAY")
.execute()
)
gh_issue_handler = GithubIssueHandler(gcs_project_name)
db = firestore.Client()
errors_db = db.collection("errors")
for error_group in errors_past_day["errorGroupStats"]:
|
group_id = error_group["group"]["groupId"]
if int(error_group["count"]) < 2:
# Don't add one-off errors to the db
continue
doc = errors_db.document(group_id)
if not doc.get().exists and error_group["group"]["resolutionStatus"] == "OPEN":
try:
error_group["group"]["trackingIssues"] = [
{"url": gh_issue_handler.post_error_to_
|
github(error_group)}
]
error_group["group"]["resolutionStatus"] = "ACKNOWLEDGED"
except ConnectionError:
# Could not create an issue
# Don't add it to our known errors db, we can retry on the next scheduled job.
continue
# Now set it to acknowledged and link to the issue in Cloud Error Reporting.
service.projects().groups().update(
name=error_group["group"]["name"], body=error_group["group"]
).execute()
doc.set(error_group)
if __name__ == "__main__":
import os
register_new_errors(os.getenv("GOOGLE_CLOUD_PROJECT"))
|
Bam4d/Roxxy
|
tools/hammer_test.py
|
Python
|
gpl-3.0
| 335
| 0.01791
|
import requests
import json
import time
if __name__ == '__main__':
count = 0
#Non-threaded test
for i in range(0,1000):
res = requests.get('http://localhost:8055/?url=https://magic.import.io')
count += 1
print count
time.sleep
|
(0.1)
if count%1000 == 0:
|
print count
|
GuillaumeSeren/linux
|
scripts/gdb/linux/cpus.py
|
Python
|
gpl-2.0
| 4,543
| 0
|
#
# gdb helper commands and functions for Linux kernel debugging
#
# per-cpu tools
#
# Copyright (c) Siemens AG, 2011-2013
#
# Authors:
# Jan Kiszka <jan.kiszka@siemens.com>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
from linux import tasks, utils
MAX_CPUS = 4096
def get_current_cpu():
if utils.get_gdbserver_type() == utils.GDBSERVER_QEMU:
return gdb.selected_thread().num - 1
elif utils.get_gdbserver_type() == utils.GDBSERVER_KGDB:
tid = gdb.selected_thread().ptid[2]
if tid > (0x100000000 - MAX_CPUS - 2):
return 0x100000000 - tid - 2
else:
return tasks.get_thread_info(tasks.get_task_by_pid(tid))['cpu']
else:
raise gdb.GdbError("Sorry, obtaining the current CPU is not yet "
"supported with this gdb server.")
def per_cpu(var_ptr, cpu):
if cpu == -1:
cpu = get_current_cpu()
if utils.is_target_arch("sparc:v9"):
offset = gdb.parse_and_eval(
"trap_block[{0}].__per_cpu_base".format(str(cpu)))
else:
try:
offset = gdb.parse_and_eval(
"__per_cpu_offset[{0}]".format(str(cpu)))
except gdb.error:
# !CONFIG_SMP case
offset = 0
pointer = var_ptr.cast(utils.get_long_type()) + offset
return pointer.cast(var_ptr.type).dereference()
cpu_mask = {}
def cpu_mask_invalidate(event):
global cpu_mask
cpu_mask = {}
gdb.events.stop.disconnect(cpu_mask_invalidate)
if hasattr(gdb.events, 'new_objfile'):
gdb.events.new_objfile.disconnect(cpu_mask_invalidate)
def cpu_list(mask_name):
global cpu_mask
mask = None
if mask_name in cpu_mask:
mask = cpu_mask[mask_name]
if mask is None:
mask = gdb.parse_and_eval(mask_name + ".bits")
if hasattr(gdb, 'events'):
cpu_mask[mask_name] = mask
gdb.events.stop.connect(cpu_mask_invalidate)
if hasattr(
|
gdb.events, 'new_objfile'):
|
gdb.events.new_objfile.connect(cpu_mask_invalidate)
bits_per_entry = mask[0].type.sizeof * 8
num_entries = mask.type.sizeof * 8 / bits_per_entry
entry = -1
bits = 0
while True:
while bits == 0:
entry += 1
if entry == num_entries:
return
bits = mask[entry]
if bits != 0:
bit = 0
break
while bits & 1 == 0:
bits >>= 1
bit += 1
cpu = entry * bits_per_entry + bit
bits >>= 1
bit += 1
yield int(cpu)
def each_online_cpu():
for cpu in cpu_list("__cpu_online_mask"):
yield cpu
def each_present_cpu():
for cpu in cpu_list("__cpu_present_mask"):
yield cpu
def each_possible_cpu():
for cpu in cpu_list("__cpu_possible_mask"):
yield cpu
def each_active_cpu():
for cpu in cpu_list("__cpu_active_mask"):
yield cpu
class LxCpus(gdb.Command):
"""List CPU status arrays
Displays the known state of each CPU based on the kernel masks
and can help identify the state of hotplugged CPUs"""
def __init__(self):
super(LxCpus, self).__init__("lx-cpus", gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
gdb.write("Possible CPUs : {}\n".format(list(each_possible_cpu())))
gdb.write("Present CPUs : {}\n".format(list(each_present_cpu())))
gdb.write("Online CPUs : {}\n".format(list(each_online_cpu())))
gdb.write("Active CPUs : {}\n".format(list(each_active_cpu())))
LxCpus()
class PerCpu(gdb.Function):
"""Return per-cpu variable.
$lx_per_cpu("VAR"[, CPU]): Return the per-cpu variable called VAR for the
given CPU number. If CPU is omitted, the CPU of the current context is used.
Note that VAR has to be quoted as string."""
def __init__(self):
super(PerCpu, self).__init__("lx_per_cpu")
def invoke(self, var_name, cpu=-1):
var_ptr = gdb.parse_and_eval("&" + var_name.string())
return per_cpu(var_ptr, cpu)
PerCpu()
class LxCurrentFunc(gdb.Function):
"""Return current task.
$lx_current([CPU]): Return the per-cpu task variable for the given CPU
number. If CPU is omitted, the CPU of the current context is used."""
def __init__(self):
super(LxCurrentFunc, self).__init__("lx_current")
def invoke(self, cpu=-1):
var_ptr = gdb.parse_and_eval("¤t_task")
return per_cpu(var_ptr, cpu).dereference()
LxCurrentFunc()
|
deeplearning4j/deeplearning4j
|
jumpy/jumpy/spark/fast_impl.py
|
Python
|
apache-2.0
| 4,802
| 0.002707
|
################################################################################
# Copyright (c) 2015-2018 Skymind, Inc.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License, Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# SPDX-License-Identifier: Apache-2.0
##########################
|
######################################################
import numpy as np
from ..java_classes import ArrayList
from ..java_classes import ArrayD
|
escriptor as getArrayDescriptor
from ..java_classes import DatasetDescriptor as getDatasetDescriptor
from ..java_classes import DataType
from ..java_classes import spark_utils as get_spark_utils
from ..java_classes import JDataset
from ..ndarray import array
from .utils import np2desc
from .utils import py2j_ds_desc
from .utils import j2py_ds_desc
from .utils import j2py_arr_desc
from .utils import py2j_arr_desc
from .utils import desc2np
from .utils import desc2ds
from .utils import ds2desc
ArrayDescriptor = None
JDatasetDescriptor = None
spark_utils = None
def java2pyArrayRDD(java_rdd, py_sc):
'''
Arguments
`java_rdd`: JavaRDD<INDArray> instance
`py_sc`: Pyspark context instance
Returns
pyspark.RDD instance
'''
global spark_utils
if spark_utils is None:
spark_utils = get_spark_utils()
desc_rdd = spark_utils.getArrayDescriptorRDD(java_rdd)
descriptors = desc_rdd.collect()
num_descriptors = descriptors.size()
nparrays = []
pydescriptors = []
for i in range(num_descriptors):
jdesc = descriptors.get(i)
pydesc = j2py_arr_desc(jdesc)
nparrays.append(desc2np(pydesc))
#pydescriptors.append(pydesc)
#pyrdd = py_sc.parallelize(pydescriptors)
#pyrdd = pyrdd.map(desc2np)
pyrdd = py_sc.parallelize(nparrays)
return pyrdd
def py2javaArrayRDD(py_rdd, java_sc):
'''
Arguments
`py_rdd`: pyspark.RDD instance
`java_sc`: JavaSparkContext instance
Returns
JavaRDD<INDArray> instance
'''
global ArrayDescriptor, spark_utils
if ArrayDescriptor is None:
ArrayDescriptor = getArrayDescriptor()
if spark_utils is None:
spark_utils = get_spark_utils()
#desc_rdd = py_rdd.map(np2desc)
#descriptors = desc_rdd.collect()
arrlist = ArrayList()
nparrays = py_rdd.collect()
for nparr in nparrays:
arrlist.add(array(nparr).array)
return java_sc.parallelize(arrlist)
for d in descriptors:
#arrlist.add(array(desc2np(d)).array)
arrlist.add(ArrayDescriptor(d[0], d[1], d[2], dtype_map[d[3]], 'c').getArray())
java_rdd = java_sc.parallelize(arrlist)
#return java_rdd
java_rdd = spark_utils.getArrayRDD(java_rdd)
return java_rdd
def java2pyDatasetRDD(java_rdd, py_sc):
global spark_utils, JDatasetDescriptor
if spark_utils is None:
spark_utils = get_spark_utils()
if JDatasetDescriptor is None:
JDatasetDescriptor = getDatasetDescriptor()
jdatasets = java_rdd.collect()
num_ds = jdatasets.size()
pydatasets = []
for i in range(num_ds):
jds = jdatasets.get(i)
jdesc = JDatasetDescriptor(jds)
pydesc = j2py_ds_desc(jdesc)
pyds = desc2ds(pydesc)
pydatasets.append(pyds)
return py_sc.parallelize(pydatasets)
####
desc_rdd = spark_utils.getDataSetDescriptorRDD(java_rdd)
descriptors = desc_rdd.collect()
num_descriptors = descriptors.size()
pydescriptors = []
for i in range(num_descriptors):
jdesc = descriptors.get(i)
pydesc = j2py_ds_desc(jdesc)
pydescriptors.append(pydesc)
pyrdd = py_sc.parallelize(pydescriptors)
pyrdd = pyrdd.map(desc2ds)
return pyrdd
def py2javaDatasetRDD(py_rdd, java_sc):
global spark_utils
if spark_utils is None:
spark_utils = get_spark_utils()
###
pydatasets = py_rdd.collect()
jdatasets = ArrayList()
for pyds in pydatasets:
pydesc = ds2desc(pyds)
jdesc = py2j_ds_desc(pydesc)
jds = jdesc.getDataSet()
jdatasets.add(jds)
return java_sc.parallelize(jdatasets)
###
desc_rdd = py_rdd.map(ds2desc)
pydescriptors = desc_rdd.collect()
jdescriptors = ArrayList()
for pydesc in pydescriptors:
jdescriptors.add(py2j_ds_desc(pydesc))
java_rdd = java_sc.parallelize(jdescriptors)
java_rdd = spark_utils.getDataSetRDD(java_rdd)
return java_rdd
|
lyst/shovel
|
src/shovel/config.py
|
Python
|
apache-2.0
| 138
| 0
|
# -*-
|
coding: utf-8 -*-
class Config(object):
def __init__(self, bucket, root):
self.bucket = bucket
self.root = root
| |
chainer/chainercv
|
chainercv/links/model/fpn/misc.py
|
Python
|
mit
| 974
| 0
|
from __future__ import division
import numpy as np
from chainer.backends import cuda
import chainer.functions as F
from chainercv import transforms
exp_clip = np.log(1000 / 16)
def smooth_l1(x, t, beta):
return F.huber_loss(x, t, beta, reduce='no') / beta
# to avoid out of memory
def argsort(x):
xp = cuda.get_array_module(x)
i = np.argsort(cuda.to_cpu(x))
if xp is np:
return i
else:
return cuda.to_gpu(i)
# to avoid out of memory
def choice(x, size):
xp = cuda.get_array_module(x)
y = np.random.choice(cuda.to_cpu(x), size, replace=False)
if xp is np:
r
|
eturn y
else:
return cuda.to_gpu(y)
def scale_img(img, min_size, max_size):
"""Process image."""
_, H, W = img.shape
scale = min_size / min(H, W)
if scale * max(H, W) > max_size:
scale = max_
|
size / max(H, W)
H, W = int(H * scale), int(W * scale)
img = transforms.resize(img, (H, W))
return img, scale
|
steelion/python-tools
|
mao/toolbox/__init__.py
|
Python
|
mit
| 27
| 0
|
__aut
|
hor__ = 'Ofner Mari
|
o'
|
abtreece/ansible
|
lib/ansible/plugins/strategy/__init__.py
|
Python
|
mit
| 44,580
| 0.003993
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import threading
import time
from collections import deque
from multiprocessing import Lock
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible.compat.six.moves import queue as Queue
from ansible.compat.six import iteritems, string_types
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
from ansible.executor import action_write_locks
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
from ansible.plugins import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader
from ansible.template import Templar
from ansible.vars import combine_vars, strip_internal_keys
from ansible.module_utils._text import to_text
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['StrategyBase']
# TODO: this should probably be in the plugins/__init__.py, with
# a smarter mechanism to set all of the attributes based on
# the loaders created there
class SharedPluginLoaderObj:
'''
A simple object to make pass the various plugin loaders to
the forked processes over the queue easier
'''
def __init__(self):
self.action_loader = action_loader
self.connection_loader = connection_loader
self.filter_loader = filter_loader
self.test_loader = test_loader
self.lookup_loader = lookup_loader
self.module_loader = module_loader
_sentinel = object()
def results_thread_main(strategy):
while True:
try:
result = strategy._final_q.get()
if type(result) == object:
break
else:
strategy._results_lock.acquire()
strategy._results.append(result)
strategy._results_lock.release()
except (IOError, EOFError):
break
except Queue.Empty:
pass
class StrategyBase:
'''
This is the base class for strategy plugins, which contains some common
code useful to all strategies like running handlers, cleanup actions, etc.
'''
def __init__(self, tqm):
self._tqm = tqm
self._inventory = tqm.get_inventory()
self._workers = tqm.get_workers()
self._notified_handlers = tqm._notified_handlers
self._listening_handlers = tqm._listening_handlers
self._variable_manager = tqm.get_variable_manager()
self._loader = tqm.get_loader()
self._final_q = tqm._final_q
self._step = getattr(tqm._options, 'step', False)
self._diff = getattr(tqm._options, 'diff', False)
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
# internal counters
self._pending_results = 0
self._cur_worker = 0
# this dictionary is used to keep track of hosts that have
# outstanding tasks still in queue
self._blocked_hosts = dict()
self._results = deque()
self._results_lock = threading.Condition(threading.Lock())
# create the result processing thread for reading results in the background
self._results_thread = threading.Thread(target=results_thread_main, args=(self,))
self._results_thread.daemon = True
self._results_thread.start()
def cleanup(self):
self._final_q.put(_sentinel)
self._results_thread.join()
def run(self, iterator, play_context, result=0):
# execute one more pass through the iterator without peeking, to
# make sure that all of the hosts are advanced to their final task.
# This should be safe, as everything should be ITERATING_COMPLETE by
# this point, though the strategy may not advance the hosts itself.
[iterator.get_next_task_for_host(host) for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts]
# save the failed/unreachable hosts, as the run_handlers()
# method will clear that information during its execution
failed_hosts = iterator.get_failed_hosts()
unreachable_hosts = self._tqm._unreachable_hosts.keys()
display.debug("running handlers")
handler_result = self.run_handlers(iterator, play_context)
if isinstance(handler_result, bool) and not handler_result:
result |= self._tqm.RUN_ERROR
elif not handler_result:
result |= handler_result
# now update with the hosts (if any) that failed or were
# unreachable during the handler execution phase
failed_hosts = set(failed_hosts).union(iterator.get_failed_hosts())
unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys())
# return the appropriate code, depending on the status hosts after the run
if not isinstance(result, bool) and result != self._tqm.RUN_OK:
return result
elif len(unreachable_hosts) > 0:
return self._tqm.RUN_UNREACHABLE_HOSTS
elif len(failed_hosts) > 0:
return self._tqm.RUN_FAILED_HOSTS
else:
return self._tqm.RUN_OK
def get_hosts_rema
|
ining(self, play):
return [host for host in self._inventory.get_hosts(play.hosts)
if host.name not in self._tqm._failed_hosts and host.name not in self._tqm._unreachable_hosts]
def get_failed_hosts(self, play):
return [host for host in self._inventory.get_hosts(play.hosts) if host.name in self._tqm._failed_hosts]
def add_tqm_variables(self,
|
vars, play):
'''
Base class method to add extra variables/information to the list of task
vars sent through the executor engine regarding the task queue manager state.
'''
vars['ansible_current_hosts'] = [h.name for h in self.get_hosts_remaining(play)]
vars['ansible_failed_hosts'] = [h.name for h in self.get_failed_hosts(play)]
def _queue_task(self, host, task, task_vars, play_context):
''' handles queueing the task up to be sent to a worker '''
display.debug("entering _queue_task() for %s/%s" % (host.name, task.action))
# Add a write lock for tasks.
# Maybe this should be added somewhere further up the call stack but
# this is the earliest in the code where we have task (1) extracted
# into its own variable and (2) there's only a single code path
# leading to the module being run. This is called by three
# functions: __init__.py::_do_handler_run(), linear.py::run(), and
# free.py::run() so we'd have to add to all three to do it there.
# The next common higher level is __init__.py::run() and that has
# tasks inside of play_iterator so we'd have to extract them to
|
asmacdo/pulp-automation
|
tests/consumer_agent_tests/test_09_consumer.py
|
Python
|
gpl-2.0
| 9,939
| 0.008452
|
import unittest
from pulp_auto.consumer.consumer_class import (Consumer, Binding, Event, ConsumersApplicability)
from pulp_auto.task import Task
from pulp_auto.pulp import Request
from pulp_auto import path_join
from tests.pulp_test import (ConsumerAgentPulpTest, agent_test)
class TestConsumer(ConsumerAgentPulpTest):
def test_00_none(self):
pass
def test_01_update_consumer(self):
# update causes private key loss; do not change self.consumer
consumer = self.consumer | {'display_name': "A %s consumer" % type(self).__name__}
with self.pulp.asserting(True):
consumer.delta_update(self.pulp)
self.assertEqual(Consumer.get(self.pulp, consumer.id), consumer)
### binding
@agent_test(catching=True)
def test_02_bind_distributor(self):
with self.pulp.asserting(True):
response = self.consumer.bind_distributor(self.pulp, self.repo.id, self.distributor.id)
self.assertPulp(code=202)
Task.wait_for_report(self.pulp, response)
@agent_test(catching=True)
def test_02_bind_non_existant_distributor_1115528(self):
# https://bugzilla.redhat.com/show_bug.cgi?id=1115528
self.consumer.bind_distributor(self.pulp, self.repo.id, 'some_dist')
self.assertPulp(code=400)
def test_03_get_repo_bindings(self):
with self.pulp.asserting(True):
bindings = self.consumer.get_repo_bindings(self.pulp, self.repo.id)
binding = Binding(data={
'repo_id': self.repo.id,
'consumer_id': self.consumer.id,
'distributor_id': self.distributor.id,
'id': '123'
})
self.assertIn(binding, bindings)
def test_03_get_nonexistant_repo_bindings_bz1094264(self):
# https://bugzilla.redhat.com/show_bug.cgi?id=1094264
with self.assertRaises(AssertionError):
self.consumer.get_repo_bindings(self.pulp, 'some_repo')
self.assertPulp(code=404)
def test_04_get_single_binding(self):
with self.pulp.asserting(True):
single_binding = self.consumer.get_single_binding(self.pulp, self.repo.id, self.distributor.id)
binding = Binding(data={
'repo_id': self.repo.id,
'consumer_id': self.consumer.id,
'distributor_id': self.distributor.id,
'id': '123'
})
self.assertEqual(binding, single_binding)
def test_04_get_nonexistant_binding(self):
with self.assertRaises(AssertionError):
self.consumer.get_single_binding(self.pulp, self.repo.id, 'some_dist')
self.assertPulp(code=404)
def test_05_list_bindings(self):
with self.pulp.asserting(True):
bindings = self.consumer.list_bindings(self.pulp)
binding = Binding(data={
'repo_id': self.repo.id,
'consumer_id': self.consumer.id,
'distributor_id': self.distributor.id,
'id': '123'
})
self.assertIn(binding, bindings)
@agent_test(catching=True)
def test_06_unbind_distributor(self):
with self.pulp.asserting(True):
response = self.consumer.unbind_distributor(self.pulp, self.repo.id, self.distributor.id)
self.assertPulp(code=202)
Task.wait_for_report(self.pulp, response)
@agent_test(catching=True)
def test_06_unbind_non_existant_distributor(self):
self.consumer.unbind_distributor(self.pulp, self.repo.id, 'some_dist')
self.assertPulp(code=404)
### consumer info
def test_07_get_consumer_info(self):
consumer = Consumer.get(self.pulp, self.consumer.id)
self.assertEqual(consumer.id, self.consumer.id)
def test_07_get_nonesistant_consumer_info(self):
with self.assertRaises(AssertionError):
Consumer.get(self.pulp, 'some_consumer')
self.assertPulp(code=404)
def test_08_get_list_consumers(self):
self.assertIn(Consumer.get(self.pulp, self.consumer.id), Consumer.list(self.pulp))
def test_09_search_consumer(self):
#perfom searh of the consumer by its id
#check that the filter works properly and as a result gave right consumer id
consumer = Consumer.search(self.pulp, data={"criteria": {"sort": None, "fields": None, "limit": None, "filters": {"id": self.consumer.id}, "skip": None}})
self.assertIn(Consumer({"id": self.consumer.id}, ['id'], ['id']), consumer)
#check that in the search result there is only one and unique consumer with such id
self.assertTrue(len(consumer) == 1)
### history
def test_10_event_history(self):
events = self.consumer.get_history(self.pulp)
self.assertPulp(code=200)
def test_11_event_history_filter_type(self):
with self.pulp.asserting(True):
events = self.consumer.get_history(self.pulp, {'event_type': 'consumer_registered'})
assert [event for event in events if event.data['type'] == "consumer_registered"], "consumer_registered event not found"
assert [event for event in events if event.data['type'] != "consumer_unregistered"], "consumer_unregistered event found"
assert [event for event in events if event.data['type'] != "repo_bound"], "repo_bound event found"
def test_12_event_history_filter_limit(self):
with self.pulp.asserting(True):
events = self.consumer.get_history(self.pulp, {'limit': '2'})
self.assertEqual(len(events), 2, "limit fail")
def test_13_event_history_filter_sort(self):
with self.pulp.asserting(True):
events = self.consumer.get_history(self.pulp, {'sort': 'ascending'})
self.assertEqual(events, sorted(events, key = lambda event: event.data['timestamp']))
def test_14_event_history_filter_all(self):
with self.pulp.asserting(True):
events = self.consumer.get_history(self.pulp, {'event_type': 'consumer_registered', 'limit': '1', 'sort': 'descending'})
assert [event for event in events if event.data['type'] == "consumer_registered"], "consumer_registered event not found"
assert [event for event in events if event.data['type'] != "repo_bound"], "repo_bound event found"
# important! if this testcase is run for the first time there will be only one event_type history 'consumer_registered'
# next runs will just accumulate previous history of the consumer history untill data reaper will clean it up.
self.assertEqual(len(events), 1, "limit fail")
self.assertEqual(events, sorted(events, key = lambda event: event.data['timestamp'], reverse=True))
### profiles
#def test_15_create_profile(self):
#def test_16_replace_profile(self):
#def test_17_list_profiles(self):
#def test_18_get_profile(self):
### applicability
def test_19_applicabilty_single_consumer(self):
#Generate Content Applicability for a single Consumer
response = self.consumer.applicability(self.pulp)
self.assertPulp(code=202)
Task.wait_for_report(self.pulp, response)
class ConsumerApplicabilityTest(ConsumerAgentPulpTest):
def test_01_applicabilty_consumers(self):
#Generate Content Applicability for Updated Consumers
response = ConsumersApplicability.regenerate(self.pulp, data={
|
"consumer_criteria": {"filters": {"id": {"$in": ["sunflower", "voyager"]}}}
}
)
self.assertPulp(code=202)
Task.wait_for_report(self.pulp, response)
# TODO: assert applicability tags in task response
# TODO: assert the applicability applies OK :) or is sane
def test_02_applicabilty_consumers_invalid_param(self):
#Generate Content Applicability for Updated Consumers
#if one or more of the parameters is invalid
ConsumersApplicability.regenerate(self.pulp, data={
"inv
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.