repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
supermari0/ironic | ironic/tests/matchers.py | Python | apache-2.0 | 3,436 | 0 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Matcher classes to be used inside of the testtools assertThat framework."""
import pprint
class DictKeysMismatch(object):
def __init__(self, d1only, d2only):
self.d1only = d1only
self.d2only = d2only
def describe(self):
return ('Keys in d1 and not d2: %(d1only)s.'
' Keys in d2 and not d1: %(d2only)s' % self.__dict__)
def get_details(self):
return {}
class DictMismatch(object):
def __init__(self, key, d1_value, d2_value):
self.key = key
self.d1_value = d1_value
self.d2_value = d2_value
def describe(self):
return ("Dictionaries do not match at %(key)s."
" d1: %(d1_value)s d2: %(d2_value)s" % self.__dict__)
def get_details(self):
return {}
class DictMatches(object):
def __init__(self, d1, approx_equal=False, tolerance=0.001):
self.d1 = d1
self.approx_equal = approx_equal
| self.tolerance = tolerance
def __str__(self):
return 'DictMatches(%s)' % (pprint.pformat(self.d1))
# Useful assertions
def match(self, d2):
"""Assert two dicts are equivalent.
Thi | s is a 'deep' match in the sense that it handles nested
dictionaries appropriately.
NOTE:
If you don't care (or don't know) a given value, you can specify
the string DONTCARE as the value. This will cause that dict-item
to be skipped.
"""
d1keys = set(self.d1.keys())
d2keys = set(d2.keys())
if d1keys != d2keys:
d1only = d1keys - d2keys
d2only = d2keys - d1keys
return DictKeysMismatch(d1only, d2only)
for key in d1keys:
d1value = self.d1[key]
d2value = d2[key]
try:
error = abs(float(d1value) - float(d2value))
within_tolerance = error <= self.tolerance
except (ValueError, TypeError):
# If both values aren't convertible to float, just ignore
# ValueError if arg is a str, TypeError if it's something else
# (like None)
within_tolerance = False
if hasattr(d1value, 'keys') and hasattr(d2value, 'keys'):
matcher = DictMatches(d1value)
did_match = matcher.match(d2value)
if did_match is not None:
return did_match
elif 'DONTCARE' in (d1value, d2value):
continue
elif self.approx_equal and within_tolerance:
continue
elif d1value != d2value:
return DictMismatch(key, d1value, d2value)
|
jileiwang/CJ-Glo | tools/AnalogyData.py | Python | apache-2.0 | 5,375 | 0.005767 |
class AnalogyData():
def __init__(self):
self.read_analogy()
self.translationPairs = None
self.crossZh = None
self.crossJa = None
self.cross2Zh2Ja = None
self.cross1Zh3Ja = None
self.cross3Zh1Ja = None
def read_analogy(self):
self.records = []
fin = open("data/analogy_02/analogy_source.txt", 'r')
for i in xrange(7):
line = fin.readline()
words = line.split()
# in cjboc data file, 0 - ja, 1 - zh
# in cjglo data file, 1 - zh, 2 - ja
zh_county = '1' + words[0]
zh_city = '1' + words[1]
ja_county = '2' + words[2]
ja_city = '2' + words[3]
record = [[zh_county, zh_city], [ja_county, ja_city]]
self.records.append(record)
# in the translation pairs
def generateTranslationPairs(self):
self.translationPairs = []
for record in self.records:
# self.translationPairs.append((token.zh_county, token.zh_city, token.ja_county, token.ja_city))
# self.translationPairs.append((token.zh_city, token.zh_county, token.ja_city, token.ja_county))
# self.translationPairs.append((token.ja_county, token.ja_city, token.zh_county, token.zh_city))
# self.translationPairs.append((token.ja_city, token.ja_county, token.zh_city, token.zh_county))
for lang in [0, 1]:
for c in [0, 1]:
self.translationPairs.append(record[lang][c], record[lang][1-c], record[1-lang][c], record[1-lang][1-c])
def getTranslationPairs(self):
if not self.translationPairs:
self.generateTranslationPairs()
def generateCrossSingleLang(self, lang):
result = []
for record1 in self.records:
for record2 in self.records:
if record1 == record2:
continue
for c in [0, 1]:
result.append((record1[lang][c], record1[lang][1-c], record2[lang][c], record2[lang][1-c]))
return result
def generateCrossZh(self):
self.crossZh = self.generateCrossSingleLang(0)
def getCrossZh(self):
if not self.crossZh:
self.generateCrossZh()
return self.crossZh
def generateCrossJa(self):
self.crossJa = self.generateCrossSingleLang(1)
def getCrossJa(self):
if not self.crossJa:
self.generateCrossJa()
return self.crossJa
def generateCross2Zh2Ja(self):
self.cross2Zh2Ja = []
for record1 in self.records:
for record2 in self.records:
if record1 == record2:
continue
for c in [0, 1]:
self.cross2Zh2Ja.append((record1[0][c], record1[0][1-c], record2[1][c], record2[1][1-c]))
self.cross2Zh2Ja.append((record1[0][c], record1[1][1-c], record2[0][c], record2[1][1-c]))
self.cross2Zh2Ja.append((record1[0][c], record1[1][1-c], record2[1][c], record2[0][1-c]))
self.cross2Zh2Ja.append((record1[1][c], record1[0][1-c], record2[0][c], record2[1][1-c]))
self.cross2Zh2Ja.append((record1[1][c], record1[0][1-c], record2[1][c], record2[0][1-c]))
self.cross2Zh2Ja.append((record1[1][c], record1[1][1-c], record2[0][c], record2[0][1-c]))
def getCross2Zh2Ja(self):
if not self.cross2Zh2Ja:
self.generateCross2Zh2Ja()
return self.cross2Zh2Ja
def generateCr | oss1vs3(self, lang):
# lang has 1 token
result = []
for | record1 in self.records:
for record2 in self.records:
if record1 == record2:
continue
for c in [0, 1]:
result.append((record1[lang][c], record1[1-lang][1-c], record2[1-lang][c], record2[1-lang][1-c]))
result.append((record1[1-lang][c], record1[lang][1-c], record2[1-lang][c], record2[1-lang][1-c]))
result.append((record1[1-lang][c], record1[1-lang][1-c], record2[lang][c], record2[1-lang][1-c]))
result.append((record1[1-lang][c], record1[1-lang][1-c], record2[1-lang][c], record2[lang][1-c]))
return result
def generateCross1Zh3Ja(self):
self.cross1Zh3Ja = self.generateCross1vs3(0)
def getCross1Zh3Ja(self):
if not self.cross1Zh3Ja:
self.generateCross1Zh3Ja()
return self.cross1Zh3Ja
def generateCross3Zh1Ja(self):
self.cross3Zh1Ja = self.generateCross1vs3(1)
def getCross3Zh1Ja(self):
if not self.cross3Zh1Ja:
self.generateCross3Zh1Ja()
return self.cross3Zh1Ja
def getByZhCount(self, zhCount):
if zhCount == 0:
return self.getCrossJa()
elif zhCount == 1:
return self.getCross1Zh3Ja()
elif zhCount == 2:
return self.getCross2Zh2Ja()
elif zhCount == 3:
return self.getCross3Zh1Ja()
else:
return self.getCrossZh()
if __name__ == '__main__':
data = AnalogyData()
#for t in data.getCross3Zh1Ja():
#for t in data.getCross1Zh3Ja():
#for t in data.getCross2Zh2Ja():
for t in data.getCrossZh():
print t[0], t[1], t[2], t[3]
|
fhcrc/taxtastic | taxtastic/subcommands/rollforward.py | Python | gpl-3.0 | 2,393 | 0.000418 | # This file is part of taxtastic.
#
# taxtastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# taxtastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with taxtastic. If not, see <http://www.gnu.org/licenses/>.
"""Restore a change to a refpkg immediately after being reverted
Restore the last ``N`` rolled back operations on ``refpkg``, or the
last operation if ``-n`` is omitted. If there are not at least ``N``
operations that can be rolled forward on this refpkg, then an error is
returned and no changes are made to the refpkg.
Note that operations can only be rolled forward immediately after
being rolled bac | k. If any operation besides a rollback occurs, all
roll forward information is removed.
"""
import logging
from taxtastic import refpkg
log = logging.getLogger(__name__)
def build_parser(p | arser):
parser.add_argument('refpkg', action='store', metavar='refpkg',
help='the reference package to operate on')
parser.add_argument('-n', action='store', metavar='int',
default=1, type=int,
help='Number of operations to roll back')
def action(args):
"""Roll forward previously rolled back commands on a refpkg.
*args* should be an argparse object with fields refpkg (giving the
path to the refpkg to operate on) and optionall n (giving the
number of operations to roll forward.
"""
log.info('loading reference package')
r = refpkg.Refpkg(args.refpkg, create=False)
# First check if we can do n rollforwards
q = r.contents
for i in range(args.n):
if q['rollforward'] is None:
log.error(
'Cannot rollforward {} changes; '
'refpkg only records {} rolled back changes.'.format(args.n, i))
return 1
else:
q = q['rollforward'][1]
for i in range(args.n):
r.rollforward()
return 0
|
autogestion/sh_ctracker | ctracker/migrations/0001_initial.py | Python | bsd-3-clause | 3,369 | 0.003859 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-06 14:43
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ClaimType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=555)),
('icon', models.FileField(blank=True, null=True, upload_to='icons/')),
],
),
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('url', models.URLField(blank=True, null=True)),
('is_verified', models.BooleanField(default=True)),
('updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='OrganizationType',
fields=[
('type_id', models.CharField(max_length=155, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Polygon',
fields=[
('polygon_id', models.CharField(max_length=50, primary_key=True, serialize=False)),
('shape', django.contrib.gis.db.models.fields.PolygonField(blank=True, null=True, srid=4326)),
('centroid', django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326)),
('address', models.CharField(blank=True, max_length=800, null=True)),
('level', models.IntegerField(choices=[(0, 'Root polygon'), (1, 'Regions of country'), (2, 'Subregions or big sities'), (3, 'Towns or districts of city'), (4, 'Houses')], default=4)),
('is_verified', models.BooleanField(default=True)),
('updated', models.DateTimeField(auto_now=True)),
('layer', models.ForeignKey(blank=True, null=True, on_ | delete=django.db.models.deletion.CASCADE, to='ctracker.Polygon')),
('organizations', models.ManyToManyField(blank=True, to='ctracker.Organization')),
],
),
migrations.Creat | eModel(
name='Uploader',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('json_file', models.FileField(upload_to='geojsons')),
],
options={
'verbose_name_plural': 'Uploader',
},
),
migrations.AddField(
model_name='organization',
name='org_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='ctracker.OrganizationType'),
),
migrations.AddField(
model_name='claimtype',
name='org_type',
field=models.ManyToManyField(to='ctracker.OrganizationType'),
),
]
|
pypy90/graphite-web | webapp/graphite/settings.py | Python | apache-2.0 | 5,977 | 0.010373 | """Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
# Django settings for graphite project.
# DO NOT MODIFY THIS FILE DIRECTLY - use local_settings.py instead
import sys, os
from django import VERSION as DJANGO_VERSION
from os.path import abspath, dirname, join
GRAPHITE_WEB_APP_SETTINGS_LOADED = False
WEBAPP_VERSION = '0.10.0-alpha'
DEBUG = False
JAVASCRIPT_DEBUG = False
# Filesystem layout
WEB_DIR = dirname( abspath(__file__) )
WEBAPP_DIR = dirname(WEB_DIR)
GRAPHITE_ROOT = dirname(WEBAPP_DIR)
THIRDPARTY_DIR = join(WEB_DIR,'thirdparty')
# Initialize additional path variables
# Defaults for these are set after local_settings is imported
CONTENT_DIR = ''
CSS_DIR = ''
CONF_DIR = ''
DASHBOARD_CONF = ''
GRAPHTEMPLATES_CONF = ''
STORAGE_DIR = ''
WHITELIST_FILE = ''
INDEX_FILE = ''
LOG_DIR = ''
CERES_DIR = ''
WHISPER_DIR = ''
RRD_DIR = ''
STANDARD_DIRS = []
CLUSTER_SERVERS = []
sys.path.insert(0, WEBAPP_DIR)
# Allow local versions of | the libs shipped in thirdparty to take precedence
sys.path.append(THIRDPARTY_DIR)
# Cluster settings
CLUSTER_SERVERS = []
REMOTE_FIND_TIMEOUT = 3.0
REMOTE_FETCH_TIMEOUT = 6.0
REMOTE_RETRY_DELAY = 60.0
REMOTE_READER_CACHE_SIZE_LIMIT = 1000
CARBONLINK_HOSTS = ["127.0.0.1:7002"]
CARBONLINK_TIMEOUT = 1.0
CARBONLINK_HASHING_KEYFUNC = None
CARBONLINK | _RETRY_DELAY = 15
REPLICATION_FACTOR = 1
MEMCACHE_HOSTS = []
FIND_CACHE_DURATION = 300
FIND_TOLERANCE = 2 * FIND_CACHE_DURATION
DEFAULT_CACHE_DURATION = 60 #metric data and graphs are cached for one minute by default
LOG_CACHE_PERFORMANCE = False
#Remote rendering settings
REMOTE_RENDERING = False #if True, rendering is delegated to RENDERING_HOSTS
RENDERING_HOSTS = []
REMOTE_RENDER_CONNECT_TIMEOUT = 1.0
LOG_RENDERING_PERFORMANCE = False
#Miscellaneous settings
SMTP_SERVER = "localhost"
DOCUMENTATION_URL = "http://graphite.readthedocs.org/"
ALLOW_ANONYMOUS_CLI = True
LOG_METRIC_ACCESS = False
LEGEND_MAX_ITEMS = 10
#Authentication settings
USE_LDAP_AUTH = False
LDAP_SERVER = "" # "ldapserver.mydomain.com"
LDAP_PORT = 389
LDAP_SEARCH_BASE = "" # "OU=users,DC=mydomain,DC=com"
LDAP_BASE_USER = "" # "CN=some_readonly_account,DC=mydomain,DC=com"
LDAP_BASE_PASS = "" # "my_password"
LDAP_USER_QUERY = "" # "(username=%s)" For Active Directory use "(sAMAccountName=%s)"
LDAP_URI = None
#Set this to True to delegate authentication to the web server
USE_REMOTE_USER_AUTHENTICATION = False
# Override to link a different URL for login (e.g. for django_openid_auth)
LOGIN_URL = '/account/login'
#Initialize database settings - Old style (pre 1.2)
DATABASE_ENGINE = 'django.db.backends.sqlite3' # 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
DATABASE_NAME = '' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# If using rrdcached, set to the address or socket of the daemon
FLUSHRRDCACHED = ''
## Load our local_settings
try:
from graphite.local_settings import *
except ImportError:
print >> sys.stderr, "Could not import graphite.local_settings, using defaults!"
## Load Django settings if they werent picked up in local_settings
if not GRAPHITE_WEB_APP_SETTINGS_LOADED:
from graphite.app_settings import *
## Set config dependent on flags set in local_settings
# Path configuration
if not CONTENT_DIR:
CONTENT_DIR = join(WEBAPP_DIR, 'content')
if not CSS_DIR:
CSS_DIR = join(CONTENT_DIR, 'css')
if not CONF_DIR:
CONF_DIR = os.environ.get('GRAPHITE_CONF_DIR', join(GRAPHITE_ROOT, 'conf'))
if not DASHBOARD_CONF:
DASHBOARD_CONF = join(CONF_DIR, 'dashboard.conf')
if not GRAPHTEMPLATES_CONF:
GRAPHTEMPLATES_CONF = join(CONF_DIR, 'graphTemplates.conf')
if not STORAGE_DIR:
STORAGE_DIR = os.environ.get('GRAPHITE_STORAGE_DIR', join(GRAPHITE_ROOT, 'storage'))
if not WHITELIST_FILE:
WHITELIST_FILE = join(STORAGE_DIR, 'lists', 'whitelist')
if not INDEX_FILE:
INDEX_FILE = join(STORAGE_DIR, 'index')
if not LOG_DIR:
LOG_DIR = join(STORAGE_DIR, 'log', 'webapp')
if not WHISPER_DIR:
WHISPER_DIR = join(STORAGE_DIR, 'whisper/')
if not CERES_DIR:
CERES_DIR = join(STORAGE_DIR, 'ceres/')
if not RRD_DIR:
RRD_DIR = join(STORAGE_DIR, 'rrd/')
if not STANDARD_DIRS:
try:
import whisper
if os.path.exists(WHISPER_DIR):
STANDARD_DIRS.append(WHISPER_DIR)
except ImportError:
print >> sys.stderr, "WARNING: whisper module could not be loaded, whisper support disabled"
try:
import rrdtool
if os.path.exists(RRD_DIR):
STANDARD_DIRS.append(RRD_DIR)
except ImportError:
pass
# Default sqlite db file
# This is set here so that a user-set STORAGE_DIR is available
if 'sqlite3' in DATABASE_ENGINE \
and not DATABASE_NAME:
DATABASE_NAME = join(STORAGE_DIR, 'graphite.db')
# Caching shortcuts
if MEMCACHE_HOSTS:
CACHE_BACKEND = 'memcached://' + ';'.join(MEMCACHE_HOSTS) + ('/?timeout=%d' % DEFAULT_CACHE_DURATION)
# Authentication shortcuts
if USE_LDAP_AUTH and LDAP_URI is None:
LDAP_URI = "ldap://%s:%d/" % (LDAP_SERVER, LDAP_PORT)
if USE_REMOTE_USER_AUTHENTICATION:
MIDDLEWARE_CLASSES += ('django.contrib.auth.middleware.RemoteUserMiddleware',)
AUTHENTICATION_BACKENDS.insert(0,'django.contrib.auth.backends.RemoteUserBackend')
if USE_LDAP_AUTH:
AUTHENTICATION_BACKENDS.insert(0,'graphite.account.ldapBackend.LDAPBackend')
|
ducksboard/libsaas | libsaas/services/recurly/subscriptions.py | Python | mit | 2,990 | 0 | from libsaas import http, parsers
from libsaas.services import base
from . import resource
class SubscriptionsBase(resource.RecurlyResource):
path = 'subscriptions'
def delete(self, *args, **kwargs):
raise base.MethodNotSupported()
class Subscriptions(SubscriptionsBase):
@base.apimethod
def get(self, state='live', cursor=None, per_page=None):
"""
Fetch all your subscription.
:var state: The state of subscriptions to return:
"active", "canceled", "expired", "future", "in_trial", "live",
or "past_due". A subscription will belong to more than one state.
:vartype state: str
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_xml
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
class Subscription(SubscriptionsBase):
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
@base.apimethod
def cancel(self):
"""
Cancel a subscription, remaining it as active until next billing cycle.
"""
self.require_item()
url = '{0}/cancel'.format(self.get_url())
request = http.Request('PUT', url)
request.use_xml = False
return request, parsers.parse_empty
@base.apimethod
def reactivate(self):
"""
Reactivating a canceled subscription.
"""
self.require_item()
url = '{0}/reactivate'.format(self.get_url())
request = http.Request('PUT', url)
return request, parsers.parse_empty
@base.apimethod
def terminate(self, refund=None):
"""
Terminate a subsciription, removing any sto | red billing information.
:var refund: The type of the refund to perform: 'full' or 'partial'
Defaults to 'none'.
:vartype refund: str
"""
self.require_item()
url = '{0}/terminate'.format(self.get_url())
params = {
'refund': refund if refu | nd else 'none'
}
url = url + '?' + http.urlencode_any(params)
request = http.Request('PUT', url)
return request, parsers.parse_empty
@base.apimethod
def postpone(self, next_renewal_date):
"""
Postpone a subscription
:var next_renewal_date: The next renewal date that will be applied
:vartype next_renewal_date: str
"""
self.require_item()
url = '{0}/postpone'.format(self.get_url())
params = {'next_renewal_date': next_renewal_date}
url = url + '?' + http.urlencode_any(params)
request = http.Request('PUT', url)
return request, parsers.parse_empty
class AccountSubscriptions(Subscriptions):
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
|
italomandara/mysite | myresume/routers.py | Python | mit | 456 | 0 | from .viewsets import *
from rest_framework import routers
# Routers provide an easy way of automat | ically determining the URL conf.
router = routers.DefaultRouter()
router.register(r'person', PersonViewSet)
router.register(r'skill', SkillViewSet)
router.register(r'mycontent', MyContentViewSet)
router.register(r'job', JobViewSet)
router.register(r'course', CourseViewSet)
router.register(r'post', PostViewSet)
router.register(r'conta | ct', ContactViewSet)
|
tehp/reddit | SubmissionRatio/submissionratio.py | Python | mit | 6,743 | 0.026991 | #/u/GoldenSights
import praw
import time
import sqlite3
import datetime
import traceback
'''USER CONFIGURATION'''
USERNAME = ""
#This is the bot's Username. In order to send mail, he must have some amount of Karma.
PASSWORD = ""
#This is the bot's Password.
USERAGENT = ""
#This is a short description of what the bot does. For example "/u/GoldenSights' Newsletter bot"
SUBREDDIT = "GoldTesting"
#This is the sub or list of subs to scan for new posts. For a single sub, use "sub1". For multiple subreddits, use "sub1+sub2+sub3+..."
RATIO = 2
#This is the required ratio of COMMENTS divided by SUBMISSIONS
PUNISHMENTREPORT = False
#Should the bot report the comment? (Use True or False. Use Capitals, no quotations.)
PUNISHMENTREMOVE = False
#Should the bot remove the comment? (Use True or False. Use Capitals, no quotations.)
PUNISHMENTREPLY = False
PUNISHMENTREPLYSTR = "You have fallen below the comment/submission ratio of " + str(RATIO) + "."
PUNISHMENTREPLYDISTINGUISH = True
#Should the bot reply to the comment? If True, it will use this string.
PUSHTOWIKI = True
PUSHTOWIKIFILE = "wiki.txt"
PUSHTOWIKISUBREDDIT = "GoldTesting"
PUSHTOWIKIPAGE = "heyo"
#Should the database be displayed on a subreddit wiki page?
#The wiki page (PUSHTOWIKIPAGE) will be updated in accordance to a file (PUSHTOWIKIFILE) stored in the same directory as this .py
#You must restart this bot if you edit the wiki file
PUSHTOWIKIWAIT = 120
#This is how many seconds you will wait between wiki page updates. The wiki does not need to be updated on every run
TABLESORT = 3
#This is how the Table on the wiki will be sorted
#0 = Username
#1 = Comment count
#2 = Submission count
#3 = Comment/Submission ratio
MAXPOSTS = 100
#This is how many posts you want to retrieve all at once. PRAW can download 100 at a time.
WAIT = 20
#This is how many seconds you will wait between cycles. The bot is completely inactive during this time.
'''All done!'''
WAITS = str(WAIT)
lastwikiupdate = 0
try:
import bot #This is a file in my python library which contains my Bot's username and password. I can push code to Git without showing credentials
USERNAME = bot.uG
PASSWORD = bot.pG
USERAGENT = bot.aG
except ImportError:
pass
sql = sqlite3.connect('sql.db')
print('Loaded SQL Database')
cur = sql.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS oldposts(ID TEXT)')
cur.execute('CREATE TABLE IF NOT EXISTS users(NAME TEXT, COMMENTS INT, SUBMISSIONS INT, RATIO REAL)')
print('Loaded Completed table')
if PUSHTOWIKI:
try:
wikifile = open(PUSHTOWIKIFILE, 'r')
print('Loaded Wiki file')
except FileNotFoundError:
wikifile = open(PUSHTOWIKIFILE, 'w')
print('Wiki File was not found, and has been created')
sql.commit()
r = praw.Reddit(USERAGENT)
r.login(USERNAME, PASSWORD)
def getTime(bool):
timeNow = datetime.datetime.now(datetime.timezone.utc)
timeUnix = timeNow.timestamp()
if bool == False:
return timeNow
else:
return timeUnix
def updatewiki():
global lastwikiupdate
if PUSHTOWIKI:
now = getTime(True)
if now - lastwikiupdate > PUSHTOWIKIWAIT:
print('Updating wiki page "' + PUSHTOWIKIPAGE + '"')
with open(PUSHTOWIKIFILE, 'r') as temp:
lines = [line.strip() for line in temp]
for pos in range(len(lines)):
line = lines[pos]
try:
if line[0] == '#':
lines[pos] = ''
else:
if "__BUILDTABLE__" in line:
print('\tBuilding Table')
cur.execute('SELECT * FROM users')
fetched = cur.fetchall()
try:
fetched.sort(key=lambda x: x[TABLESORT].lower())
except:
fetched.sort(key=lambda x: x[TABLESORT])
if TABLESO | RT != 0:
fetched.reverse()
table = '\n\nUsername | Comments | Submissions | Ratio\n'
tabl | e += ':- | -: | -: | -:\n'
for item in fetched:
table += '/u/' + item[0] + ' | ' + str(item[1]) + ' | ' + str(item[2]) + ' | ' + str(item[3]) + '\n'
table += '\n\n'
lines[pos] = line.replace('__BUILDTABLE__', table)
if "__STRFTIME" in line:
print('\tBuilding timestamp')
form = line.split('"')[1]
now = getTime(False)
now = now.strftime(form)
lines[pos] = line.replace('__STRFTIME("' + form + '")__', now)
except:
pass
final = '\n\n'.join(lines)
r.edit_wiki_page(PUSHTOWIKISUBREDDIT, PUSHTOWIKIPAGE, final, reason=str(now))
lastwikiupdate = now
print('\tDone')
else:
print('Wiki page will update in ' + str(round(PUSHTOWIKIWAIT - (now-lastwikiupdate))) + ' seconds.')
def updatebase(l):
for post in l:
cur.execute('SELECT * FROM oldposts WHERE ID=?', [post.fullname])
if not cur.fetchone():
print(post.id)
try:
pauthor = post.author.name
cur.execute('SELECT * FROM users WHERE NAME=?', [pauthor])
fetched = cur.fetchone()
if not fetched:
print('\tNew user: ' + pauthor)
cur.execute('INSERT INTO users VALUES(?, ?, ?, ?)', [pauthor, 0, 0, 0])
fetched = [pauthor, 0, 0, 0]
comments = fetched[1]
submissions = fetched[2]
if type(post) == praw.objects.Comment:
comments = comments + 1
if type(post) == praw.objects.Submission:
submissions = submissions + 1
denominator = (1 if submissions == 0 else submissions)
ratio = "%0.2f" % (comments / denominator)
print('\t' + pauthor)
print('\t' + str(comments) + 'c / ' + str(denominator) + 's = ' + str((comments / denominator))[:3])
ratio = float(ratio)
cur.execute('UPDATE users SET COMMENTS=?, SUBMISSIONS=?, RATIO=? WHERE NAME=?', [comments, submissions, ratio, pauthor])
if ratio < RATIO:
print("\tUser's ratio is too low!")
if PUNISHMENTREPORT:
print('\tReporting post')
post.report()
if PUNISHMENTREMOVE:
print('\tRemoving post')
post.remove()
if PUNISHMENTREPLY:
print('\tReplying to post')
if type(post) == praw.objects.Submission:
new = post.add_comment(PUNISHMENTREPLYSTR)
if type(post) == praw.objects.Comment:
new = post.reply(PUNISHMENTREPLYSTR)
if PUNISHMENTREPLYDISTINGUISH:
print('\tDistinguishing reply')
new.distinguish()
except AttributeError:
print('\tComment or Author has been deleted')
cur.execute('INSERT INTO oldposts VALUES(?)', [post.fullname])
sql.commit()
def scan():
print('Scanning ' + SUBREDDIT)
subreddit = r.get_subreddit(SUBREDDIT)
print('\tGathering submissions')
posts = list(subreddit.get_new(limit=MAXPOSTS))
updatebase(posts)
print()
print('\tGathering comments')
comments = list(subreddit.get_comments(limit=MAXPOSTS))
updatebase(comments)
while True:
try:
scan()
print()
updatewiki()
except:
traceback.print_exc()
print('Running again in ' + WAITS + ' seconds.\n')
time.sleep(WAIT) |
mne-tools/mne-tools.github.io | 0.20/_downloads/7bb433a8c5a4cf876b244e99c2c7c8b7/plot_stats_spatio_temporal_cluster_sensors.py | Python | bsd-3-clause | 7,444 | 0 | """
=====================================================
Spatiotemporal permutation F-test on full sensor data
=====================================================
Tests for differential evoked responses in at least
one condition using a permutation clustering test.
The FieldTrip neighbor templates will be used to determine
the adjacency between sensors. This serves as a spatial prior
to the clustering. Spatiotemporal clusters will then
be visualized using custom matplotlib code.
See the `FieldTrip website`_ for a caveat regarding
the possible interpretation of "significant" clusters.
"""
# Authors: Denis Engemann <denis.engemann@gmail.com>
# Jona Sassenhagen <jona.sassenhagen@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import mne
from mne.stats import spatio_temporal_cluster_test
from mne.datasets import sample
from mne.channels import find_ch_connectivity
from mne.viz import plot_compare_evokeds
print(__doc__)
###############################################################################
# Set parameters
# --------------
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
event_id = {'Aud/L': 1, 'Aud/R': 2, 'Vis/L': 3, 'Vis/R': 4}
tmin = -0.2
tmax = 0.5
# Setup for reading the raw data
raw = mne.io.read_raw_fif(raw_fname, preload=True)
raw.filter(1, 30, fir_design='firwin')
events = mne.read_events(event_fname)
###############################################################################
# Read epochs for the channel of interest
# ---------------------------------------
picks = mne.pick_types(raw.info, meg='mag', eog=True)
reject = dict(mag=4e-12, eog=150e-6)
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=None, reject=reject, preload=True)
epochs.drop_channels(['EOG 061'])
epochs.equalize_event_counts(event_id)
X = [epochs[k].get_data() for k in event_id] # as 3D matrix
X = [np.transpose(x, (0, 2, 1)) for x in X] # transpose for clustering
###############################################################################
# Find the FieldTrip neighbor definition to setup sensor connectivity
# -------------------------------------------------------------------
connectivity, ch_names = find_ch_connectivity(epochs.info, ch_type='mag')
print(type(connectivity)) # it's a sparse matrix!
plt.imshow(connectivity.toarray(), cmap='gray', origin='lower',
interpolation='nearest')
plt.xlabel('{} Magnetometers'.format(len(ch_names)))
plt.ylabel('{} Magnetometers'.format(len(ch_names)))
plt.title('Between-sensor adjacency')
###############################################################################
# Compute permutation statistic
# -----------------------------
#
# How does it work? We use clustering to `bind` together features which are
# similar. Our features are the magnetic fields measured over our sensor
# array at different times. This reduces the multiple comparison problem.
# To compute the actual test-statistic, we first sum all F-values in all
# clusters. We end up with one statistic for each cluster.
# Then we generate a distribution from the data by shuffling our conditions
# between our samples and recomputing our clusters and the test statistics.
# We test for the significance of a given cluster by computing the probability
# of observing a cluster of that size. For more background read:
# Maris/Oostenveld (2007), "Nonparametric statistical testing of EEG- and
# MEG-data" Journal of Neuroscience Methods, Vol. 164, No. 1., pp. 177-190.
# doi:10.1016/j.jneumeth.2007.03.024
# set cluster threshold
threshold = 50.0 # very high, but the test is quite sensitive on this data
# set family-wise p-value
p_accept = 0.01
cluster_stats = spatio_temporal_cluster_test(X, n_permutations=1000,
threshold=threshold, tail=1,
n_jobs=1, buffer_size=None,
connectivity=connectivity)
T_obs, clusters, p_values, _ = cluster_stats
good_cluster_inds = np.where(p_values < p_accept)[0]
###############################################################################
# Note. The same functions work with source estimate. The only differences
# are the origin of the data, the size, and the connectivity definition.
# It can be used for single trials or for groups of subjects.
#
# Visualize clusters
# ------------------
# configure variables for visualization
colors = {"Aud": "crimson", "Vis": 'steelblue'}
linestyles = {"L": '-', "R": '--'}
# organize data for plotting
evokeds = {cond: epochs[cond].average() for cond in event_id}
# loop over clusters
for i_clu, clu_idx in enumerate(good_cluster_inds):
# unpack cluster information, get unique indices
time_inds, space_inds = np.squeeze(clusters[clu_idx])
ch_inds = np.unique(space_inds)
time_inds = np.unique(time_inds)
# get topography for F stat
f_map = T_obs[time_inds, ...].mean(axis=0)
# get signals at the sensors contributing to the cluster
sig_times = epochs.times[time_inds]
# create spatial mask
mask = np.zeros((f_map.shape[0], 1), dtype=bool)
mask[ch_inds, :] = True
# initialize figure
fig, ax_topo = plt.subplots(1, 1, figsize=(10, 3))
# plot average test statistic and mark significant sensors
f_evoked = mne.EvokedArray(f_map[:, np.newaxis], epochs.info, tmin=0)
f_evoked.plot_topomap(times=0, mask=mask, axes=ax_topo, cmap='Reds',
vmin=np.min, vmax=np.max, show=False,
colorbar=False, mask_params=dict(markersize=10))
image = ax_topo.images[0]
# create additional axes (for ERF and colorbar)
divider = make_axes_locatable(ax_topo)
# add axes for colorbar
ax_colorbar = divider.append_axes('right', size='5%', pad=0.05)
plt.colorbar(image, cax=ax_colorbar)
ax_topo.set_xlabel(
'Averaged F-map ({:0.3f} - {:0.3f} s)'.format(*sig_times[[0, -1]]))
# add new axis for time courses and plot time courses
ax_signals = divider.append_axes('right', size='300%', pad=1.2)
title = 'Cluster #{0}, {1} sensor'.format(i_clu + 1, len(ch_inds))
if len(ch_inds) > 1:
title += "s (mean)"
plot_compare_evokeds(evokeds, title=title, picks=ch_inds, axes=ax_signals,
colors=colors, linestyles=linestyles, show=False,
split_legend=True, truncate_yaxis='auto')
# plot temporal cluster extent
ymin, ymax = ax_signals.get_ylim()
ax_signals.fill_betweenx((ymin, ymax), sig_times[0], sig_times[-1],
color | ='orange', alpha=0.3)
# clean up viz
mne.viz.tight_layout(fig=fig)
fig.subplots_adjust(bottom=.05)
p | lt.show()
###############################################################################
# Exercises
# ----------
#
# - What is the smallest p-value you can obtain, given the finite number of
# permutations?
# - use an F distribution to compute the threshold by traditional significance
# levels. Hint: take a look at :obj:`scipy.stats.f`
#
# .. _fieldtrip website:
# http://www.fieldtriptoolbox.org/faq/
# how_not_to_interpret_results_from_a_cluster-based_permutation_test
|
tsoporan/tehorng | blog/urls.py | Python | agpl-3.0 | 421 | 0.011876 | from django.conf.urls.defaults import *
urlpat | terns = patterns('',
url(r'^$', 'blog.views.entry_list', name="entry-list"),
url(r'^archive/(?P<year>\d{4})/$', 'blog.views.entry_archive_year', name="year-archive"),
url(r'^archive/(?P<year>\d{4})/(?P<month>\d{1,2})/$', 'blog.views.entry_archive_month', name="month-archive"),
url(r'^(?P<slug>[-\w]+)/$', 'blo | g.views.entry_detail', name="entry-detail"),
)
|
teeple/pns_server | work/install/Python-2.7.4/Lib/lib2to3/fixes/fix_ne.py | Python | gpl-2.0 | 573 | 0 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer that turns <> into !=."""
# Local imports
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
class FixNe(fixer_base.BaseFix):
# This is so simple that we don't need the pattern compiler.
_accept_type = token.NOTEQUAL
def match(self, node):
# Over | ride
return node.value == u"<>"
def transform(self, node, results):
new = pytree.Leaf(token.NOTEQUAL, u"!=", pr | efix=node.prefix)
return new
|
benwilburn/fitist | FITist_project/user_profiles/forms.py | Python | mit | 1,871 | 0 | from django import forms
# from django.core import validators
from django.contrib.auth.models import User
from django.contrib.auth.forms import AuthenticationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(
label="Username", max_length=30,
widget=forms.TextInput(
attrs={'class': 'form-control',
'name': 'username'}
)
)
password = forms.CharField(
label="Password", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control', 'name': 'password'}
)
)
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
confirm_password = forms.CharField(widget=forms.PasswordInput)
email = forms.Char | Field(widget=forms.EmailInput)
confirm_email = forms.CharField(widget=forms.EmailInput)
class Meta:
model | = User
fields = [
'username',
'first_name',
'last_name',
'email',
'confirm_email',
'password',
'confirm_password',
]
def clean(self):
cleaned_data = super(UserForm, self).clean()
password = cleaned_data.get('password')
confirm_password = cleaned_data.get('confirm_password')
email = cleaned_data.get('email')
confirm_email = cleaned_data.get('confirm_email')
if password != confirm_password:
raise forms.ValidationError(
"password and confirm_password fields do not match"
)
if email != confirm_email:
raise forms.ValidationError(
"email and confirm_email fields do not match"
)
|
alanbowman/home-assistant | homeassistant/components/camera/__init__.py | Python | mit | 6,728 | 0 | # pylint: disable=too-many-lines
"""
homeassistant.components.camera
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Component to interface with various cameras.
The following features are supported:
- Returning recorded camera images and streams
- Proxying image requests via HA for external access
- Converting a still image url into a live video stream
Upcoming features
- Recording
- Snapshot
- Motion Detection Recording(for supported cameras)
- Automatic Configuration(for supported cameras)
- Creation of child entities for supported functions
- Collating motion event images passed via FTP into time based events
- A service for calling camera functions
- Camera movement(panning)
- Zoom
- Light/Nightvision toggling
- Support for more devices
- Expanded documentation
"""
import requests
import logging
import time
import re
from homeassistant.helpers.entity import Entity
from homeassistant.const import (
ATTR_ENTITY_PICTURE,
HTTP_NOT_FOUND,
ATTR_ENTITY_ID,
)
from homeassistant.helpers.entity_component import EntityComponent
DOMAIN = 'camera'
DEPENDENCIES = ['http']
GROUP_NAME_ALL_CAMERAS = 'all_cameras'
SCAN_INTERVAL = 30
ENTITY_ID_FORMAT = DOMAIN + '.{}'
SWITCH_ACTION_RECORD = 'record'
SWITCH_ACTION_SNAPSHOT = 'snapshot'
SERVICE_CAMERA = 'camera_service'
STATE_RECORDING = 'recording'
DEFAULT_RECORDING_SECONDS = 30
# Maps discovered services to their platforms
DISCOVERY_PLATFORMS = {}
FILE_DATETIME_FORMAT = '%Y-%m-%d_%H-%M-%S-%f'
DIR_DATETIME_FORMAT = '%Y-%m-%d_%H-%M-%S'
| REC_DIR_PREFI | X = 'recording-'
REC_IMG_PREFIX = 'recording_image-'
STATE_STREAMING = 'streaming'
STATE_IDLE = 'idle'
CAMERA_PROXY_URL = '/api/camera_proxy_stream/{0}'
CAMERA_STILL_URL = '/api/camera_proxy/{0}'
ENTITY_IMAGE_URL = '/api/camera_proxy/{0}?time={1}'
MULTIPART_BOUNDARY = '--jpegboundary'
MJPEG_START_HEADER = 'Content-type: {0}\r\n\r\n'
# pylint: disable=too-many-branches
def setup(hass, config):
""" Track states and offer events for sensors. """
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL,
DISCOVERY_PLATFORMS)
component.setup(config)
# -------------------------------------------------------------------------
# CAMERA COMPONENT ENDPOINTS
# -------------------------------------------------------------------------
# The following defines the endpoints for serving images from the camera
# via the HA http server. This is means that you can access images from
# your camera outside of your LAN without the need for port forwards etc.
# Because the authentication header can't be added in image requests these
# endpoints are secured with session based security.
# pylint: disable=unused-argument
def _proxy_camera_image(handler, path_match, data):
""" Proxies the camera image via the HA server. """
entity_id = path_match.group(ATTR_ENTITY_ID)
camera = None
if entity_id in component.entities.keys():
camera = component.entities[entity_id]
if camera:
response = camera.camera_image()
handler.wfile.write(response)
else:
handler.send_response(HTTP_NOT_FOUND)
hass.http.register_path(
'GET',
re.compile(r'/api/camera_proxy/(?P<entity_id>[a-zA-Z\._0-9]+)'),
_proxy_camera_image)
# pylint: disable=unused-argument
def _proxy_camera_mjpeg_stream(handler, path_match, data):
""" Proxies the camera image as an mjpeg stream via the HA server.
This function takes still images from the IP camera and turns them
into an MJPEG stream. This means that HA can return a live video
stream even with only a still image URL available.
"""
entity_id = path_match.group(ATTR_ENTITY_ID)
camera = None
if entity_id in component.entities.keys():
camera = component.entities[entity_id]
if not camera:
handler.send_response(HTTP_NOT_FOUND)
handler.end_headers()
return
try:
camera.is_streaming = True
camera.update_ha_state()
handler.request.sendall(bytes('HTTP/1.1 200 OK\r\n', 'utf-8'))
handler.request.sendall(bytes(
'Content-type: multipart/x-mixed-replace; \
boundary=--jpgboundary\r\n\r\n', 'utf-8'))
handler.request.sendall(bytes('--jpgboundary\r\n', 'utf-8'))
# MJPEG_START_HEADER.format()
while True:
img_bytes = camera.camera_image()
headers_str = '\r\n'.join((
'Content-length: {}'.format(len(img_bytes)),
'Content-type: image/jpeg',
)) + '\r\n\r\n'
handler.request.sendall(
bytes(headers_str, 'utf-8') +
img_bytes +
bytes('\r\n', 'utf-8'))
handler.request.sendall(
bytes('--jpgboundary\r\n', 'utf-8'))
except (requests.RequestException, IOError):
camera.is_streaming = False
camera.update_ha_state()
camera.is_streaming = False
hass.http.register_path(
'GET',
re.compile(
r'/api/camera_proxy_stream/(?P<entity_id>[a-zA-Z\._0-9]+)'),
_proxy_camera_mjpeg_stream)
return True
class Camera(Entity):
""" The base class for camera components """
def __init__(self):
self.is_streaming = False
@property
# pylint: disable=no-self-use
def is_recording(self):
""" Returns true if the device is recording """
return False
@property
# pylint: disable=no-self-use
def brand(self):
""" Should return a string of the camera brand """
return None
@property
# pylint: disable=no-self-use
def model(self):
""" Returns string of camera model """
return None
def camera_image(self):
""" Return bytes of camera image """
raise NotImplementedError()
@property
def state(self):
""" Returns the state of the entity. """
if self.is_recording:
return STATE_RECORDING
elif self.is_streaming:
return STATE_STREAMING
else:
return STATE_IDLE
@property
def state_attributes(self):
""" Returns optional state attributes. """
attr = {
ATTR_ENTITY_PICTURE: ENTITY_IMAGE_URL.format(
self.entity_id, time.time()),
}
if self.model:
attr['model_name'] = self.model
if self.brand:
attr['brand'] = self.brand
return attr
|
mesnardo/PetIBM | examples/ibpm/cylinder2dRe3000_GPU/scripts/plotDragCoefficient.py | Python | bsd-3-clause | 2,006 | 0 | """
Plots the instantaneous drag coefficient between 0 and 3 time-units of flow
simulation and compares with numerical results from
Koumoutsakos and Leonard (1995).
_References:_
* Koumoutsakos, P., & Leonard, A. (1995).
High-resolution simulations of the flow around an impulsively started
cylinder using vortex methods.
Journal of Fluid Mechanics, 296, 1-38.
"""
import os
import pathlib
import numpy
import collections
from matplotlib import pyplot
simu_dir = pathlib.Path(__file__).absolute().parents[1]
root_dir = os.environ.get('PETIBM_EXAMPLES')
if not root_dir:
root_dir = simu_dir.parents[1]
data = collections.OrderedDict({})
# Reads forces from file.
label = 'PetIBM'
filepath = simu_dir / 'forces-0.txt'
with open(filepath, 'r') as infile:
t, fx = numpy.loadtxt(infile, dtype=numpy.float64,
unpack=True, usecols=(0, 1))
data[label] = {'t': t, 'cd': 2 * fx}
data[label]['kwargs'] = {}
# Reads drag coefficient of Koumoutsakos and Leonard (1995) for Re=3000.
label = 'Koumoutsakos and Leonard (1995)'
filename = 'koumoutsakos_leonard_1995_cylinder_dragCoefficientRe3000.dat'
filepath = root_dir / 'data' / filename
with open(filepath, 'r') as infile:
t, cd = numpy.loadtxt(infile, dtype=numpy.float64, unpack=True)
data[label] = {'t': 0.5 * t, 'cd': cd}
data[label]['kwargs'] = {'linewidth': 0, 'marker': 'o',
'markerfacecolor': 'none', 'markeredgecolor': 'black'}
pyplot.rc('font', family='serif', size=16)
# Plots the instantaneous dr | ag coefficients.
fig, ax = pyplot.subplots(figsize=(8.0, 6.0))
ax.grid()
ax.set_xlabel('Non-dimensional time')
ax.set_ylabel('Drag coefficient')
for label, subdata in data.items():
ax.plot(subdata['t'], subdata['cd'], label | =label, **subdata['kwargs'])
ax.axis((0.0, 3.0, 0.0, 2.0))
ax.legend()
pyplot.show()
# Save figure.
fig_dir = simu_dir / 'figures'
fig_dir.mkdir(parents=True, exist_ok=True)
filepath = fig_dir / 'dragCoefficient.png'
fig.savefig(str(filepath), dpi=300)
|
linkfloyd/linkfloyd | linkfloyd/notifications/templatetags/notification_tags.py | Python | bsd-3-clause | 427 | 0.002342 | # -*- | coding: utf-8 -*-
from django.template import Library
from notifications.models import Notification
register = Library()
@register.assignment_tag(takes_context=True)
def get_unread_notifications_count(context):
if 'user' not in context:
return ''
user = context['user']
if user.is_anonymous():
return ''
return Notification.objects. | filter(recipient=user,
seen=False).count()
|
STIXProject/python-stix | stix/common/kill_chains/__init__.py | Python | bsd-3-clause | 4,359 | 0.001376 | # Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
from mixbox import typedlist
from mixbox import entities
# internal
import stix
import stix.bindings.stix_common as common_binding
class KillChain(stix.Entity):
__hash__ = entities.Entity.__hash__
_binding = common_binding
_namespace = 'http://stix.mitre.org/common-1'
_binding_class = _binding.KillChainType
id_ = fields.TypedField("id")
name = fields.TypedField("name")
definer = fields.TypedField("definer")
reference = fields.TypedField("reference")
number_of_phases = fields.TypedField("number_of_phases")
kill_chain_phases = fields.TypedField("Kill_Chain_Phase", type_="stix.common.kill_chains.KillChainPhase", multiple=True, key_name="kill_chain_phases")
def __init__(self, id_=None, name=None, definer=None, reference=None):
super(KillChain, self).__init__()
self.id_ = id_
self.name = name
self.definer = definer
self.reference = reference
self.number_of_phases = None # can we just do len(self.kill_chain_phases)?
def add_kill_chain_phase(self, value):
self.kill_chain_phases.append(value)
def __eq__(self, other):
if self is other:
return True
if not isinstance(other, self.__class__):
return False
return other.to_dict() == self.to_dict()
def __ne__(self, other):
return not self.__eq__(other)
class KillChains(stix.EntityList):
_binding = common_binding
_namespace = 'http://stix.mitre.org/common-1'
_binding_class = _binding.KillChainsType
kill_chain = fields.TypedField("Kill_Chain", KillChain, multiple=True, key_name="kill_chains")
@classmethod
def _dict_as_list(cls):
return False
class KillChainPhase(stix.Entity):
__hash__ = entities.Entity.__hash__
_binding = common_binding
_namespace = 'http://stix.mitre.org/common-1'
_binding_class = _binding.KillChainPhaseType
phase_id = fields.TypedField("phase_id")
name = fields.TypedField("name")
ordinality = fields.IntegerField("ordinality")
def __init__(self, phase_id=None, name=None, ordinality=None):
super(KillChainPhase, self).__init__()
self.phase_id = phase_id
self.name = name
self.ordinality = ordinality
def __eq__(self, other):
if other is self:
return True
if not isinstance(other, KillChainPhase):
return False
return other.to_dict() == self.to_dict()
def __ne__(self, other):
return not self.__eq__(other)
class KillChainPhaseReference(KillChainPhase):
_binding = common_binding
_namespace = 'http://stix.mitre.org/common-1'
_binding_class = _binding.KillChainPhaseReferenceType
kill_chain_id = fields.TypedField("kill_chain_id")
kill_chain_name = fields.TypedField("kill_chain_name")
def __init__(self, phase_id=None, name=None, ordinality=None, kill_chain_id=None, kill_chain_name=None):
super(KillChainPhaseReference, self).__init__(phase_id, name, ordinality)
self.kill_chain_id = kill_chain_id
self.kill_chain_name = kill_chain_name
class _KillChainPhaseReferenceList(typedlist.TypedList):
def __init__(self, *args):
super(_KillChainPhaseReferenceList, self).__init__(type=KillChainPhaseReference, *args)
def _fix_value(self, value):
if not isinstance(value, KillChainPhase):
return super(_KillChainPhaseReferenceList, self)._fix_value(value)
if value.phase_id:
return KillChainPhaseReference(phase_id=value.phase_id)
raise ValueError("KillChainPhase must have a phase_id.")
class KillChainPhasesReference(stix.EntityList):
_binding = common_binding
_namespace = 'http://stix.mitre.org/common-1'
_binding_class = _binding.KillChainPhasesReferenceType
kill_chain_phase = fields.TypedField(
name="Kill_Chain_Phase",
type_=KillChain | PhaseReference,
multiple=True,
listfunc=_KillChainPhaseReferenceList,
key_name="kill_chain_phases"
)
@classmethod
def _dict_as_list(cls):
retur | n False
# NOT AN ACTUAL STIX TYPE!
class _KillChainPhases(stix.TypedList):
_contained_type = KillChainPhase
|
mage2k/pg_partitioner | pg_partitioner/sql_util.py | Python | bsd-3-clause | 2,074 | 0.007715 |
def table_exists(curs, table_name=''):
'''
If table_name is a schema qualified table name and it exists it is returned,
else if it is not schema qualified and the table name exists in the search
path then that schema qualified table name is returned, else None.
'''
curs.execute('SELECT pgpartitioner.table_exists(%s)', (table_name,))
return curs.fetchone()
def get_column_type(curs, table_name, column_name):
'''
If column_name exists on table_name it's SQL type is returned. Else an
exception is raised.
'''
curs.execute('SELECT pgpartitioner.get_column_type(%s, %s);', (table_name, column_name))
return curs.fetchone()[0]
def get_constraint_defs(curs, table_name, fkeys=True):
'''
Returns a list of constraint definition fragments suitable for use
in SQL create table or alter table statements. fkeys are not included if
fkeys is false
'''
curs.execute('SELECT * FROM pgpartitioner.get_table_constraint_defs(%s, %s);', (table_name, fkeys))
return [res[0] for res in curs.fetchall()]
def get_index_defs(curs, table_name):
'''
Returns a list of 2-tuples consisting of each index creation def statement
for any non-primary key or unique indexes on the given table and the
index name.
'''
curs.execute('SELECT * FROM pgpartitioner.get_table_index_defs(%s);', (table_name,))
return [res[0] for res in curs.fetchall()]
def table_attributes(curs, table_name):
'''
Returns a | tuple of the given table's attributes
'''
curs.execute('SELECT * FROM pgpartitioner.get_table_attributes(%s);', (table_name,))
atts = tuple([res[0] for res in curs.fetchall()])
return atts
def normalize_date(curs, date_str, fmt, units='month', diff='0 months'):
'''
| Takes a valid date string in any format and formats it according to fmt.
'''
normalize_date_sql = \
'''
SELECT to_char(date_trunc(%s, %s::timestamp + %s), %s);
'''
curs.execute(normalize_date_sql, (units, date_str, diff, fmt))
return curs.fetchone()[0]
|
nicholas-leonard/hyperopt | hyperopt/ipy.py | Python | bsd-3-clause | 7,175 | 0.002787 | """Utilities for Parallel Model Selection with IPython
Author: James Bergstra <james.bergstra@gmail.com>
Licensed: MIT
"""
from time import sleep, time
import numpy as np
from IPython.parallel import interactive
#from IPython.parallel import TaskAborted
#from IPython.display import clear_output
from .base import Trials
from .base import Domain
from .base import JOB_STATE_NEW
from .base import JOB_STATE_RUNNING
from .base import JOB_STATE_DONE
from .base import JOB_STATE_ERROR
from .base import spec_from_misc
from .utils import coarse_utcnow
import sys
print >> sys.stderr, "WARNING: IPythonTrials is not as complete, stable"
print >> sys.stderr, " or well tested as Trials or MongoTrials."
class LostEngineError(RuntimeError):
"""An IPEngine disappeared during computation, and a job with it."""
class IPythonTrials(Trials):
def __init__(self, client,
job_error_reaction='raise',
save_ipy_metadata=True):
self._client = client
self.job_map = {}
self.job_error_reaction = job_error_reaction
self.save_ipy_metadata = save_ipy_metadata
Trials.__init__(self)
self._testing_fmin_was_called = False
def _insert_trial_docs(self, docs):
rval = [doc['tid'] for doc in docs]
self._dynamic_trials.extend(docs)
return rval
def refresh(self):
job_map = {}
# -- carry over state for active engines
for eid in self._client.ids:
job_map[eid] = self.job_map.pop(eid, (None, None))
# -- deal with lost engines, abandoned promises
for eid, (p, tt) in self.job_map.items():
if self.job_error_reaction == 'raise':
raise LostEngineError(p)
elif self.job_error_reaction == 'log':
tt['error'] = 'LostEngineError (%s)' % str(p)
tt['state'] = JOB_STATE_ERROR
else:
raise ValueError(self.job_error_reaction)
# -- remove completed jobs from job_map
for eid, (p, tt) in job_map.items():
if p is None:
continue
#print p
#assert eid == p.engine_id
if p.ready():
try:
tt['result'] = p.get()
tt['state'] = JOB_STATE_DONE
except Exception, e:
if self.job_error_reaction == 'raise':
raise
elif self.job_error_reaction == 'log':
tt['error'] = str(e)
tt['state'] = JOB_STATE_ERROR
else:
raise ValueError(self.job_error_reaction)
if self.save_ipy_metadata:
tt['ipy_metadata'] = p.metadata
tt['refresh_time'] = coarse_utcnow()
job_map[eid] = (None, None)
self.job_map = job_map
Trials.refresh(self)
def fmin(self, fn, space, algo, max_evals,
rstate=None,
verbose=0,
wait=True,
pass_expr_memo_ctrl=None,
):
if rstate is None:
rstate = np.random
# -- used in test_ipy
self._testing_fmin_was_called = True
if pass_expr_memo_ctrl is None:
try:
pass_expr_memo_ctrl = fn.pass_expr_memo_ctrl
except AttributeError:
pass_expr_memo_ctrl = False
domain = Domain(fn, space, rseed=rstate.randint(2 ** 31 - 1),
pass_expr_memo_ctrl=pass_expr_memo_ctrl)
last_print_time = 0
while len(self._dynamic_trials) < max_evals:
self.refresh()
if verbose and last_print_time + 1 < time():
print 'fmin: %4i/%4i/%4i/%4i %f' % (
self.count_by_state_unsynced(JOB_STATE_NEW),
self.count_by_state_unsynced(JOB_STATE_RUNNING),
self.count_by_state_unsynced(JOB_STATE_DONE),
self.count_by_state_unsynced(JOB_STATE_ERROR),
min([float('inf')] + [l for l in self.losses() if l is not None])
)
last_print_time = time()
idles = [eid for (eid, (p, tt)) in self.job_map.items() if p is None]
if idles:
new_ids = self.new_trial_ids(len(idles))
new_trials = algo(new_ids, domain, self)
if len(new_trials) == 0:
break
else:
assert len(idles) == len(new_trials)
for eid, new_trial in zip(idles, new_trials):
now = coarse_utcnow()
new_trial['book_time'] = now
new_trial['refresh_time'] = now
promise = self._client[eid].apply_async(
call_domain,
domain,
config=spec_from_misc(new_trial['misc']),
)
# -- XXX bypassing checks because 'ar'
# is not ok for SONify... but should check
# for all else being SONify
tid, = self.insert_trial_docs([new_trial])
tt = self._dynamic_trials[-1]
assert tt['tid'] == tid
self.job_map[eid] = (promise, tt)
tt['state'] = JOB_STATE_RUNNING
if wait:
if verbose:
print 'fmin: Waiting on remaining jobs...'
self.wait(verbose=verbose)
return self.argmin
def wait(self, verbose=False, verbose_print_interval=1.0):
last_print_time = 0
while True:
sel | f.refresh()
if verbose and last_print_time + verbose_print_interval < time():
print 'fmin: %4i/%4i/%4i/%4i %f' % (
self.count_by_state_unsynced(JOB_STATE_NEW),
self.count_by_state_unsynced(JOB_STATE_RUNNING),
self.count_by_state_unsynced(JOB_STATE_DONE),
self.count_by_state_unsynced(JOB_STATE_ERROR),
min([ | float('inf')]
+ [l for l in self.losses() if l is not None])
)
last_print_time = time()
if self.count_by_state_unsynced(JOB_STATE_NEW):
sleep(1e-1)
continue
if self.count_by_state_unsynced(JOB_STATE_RUNNING):
sleep(1e-1)
continue
break
def __getstate__(self):
rval = dict(self.__dict__)
del rval['_client']
del rval['_trials']
del rval['job_map']
#print rval.keys()
return rval
def __setstate__(self, dct):
self.__dict__ = dct
self.job_map = {}
Trials.refresh(self)
@interactive
def call_domain(domain, config):
ctrl = None # -- not implemented yet
return domain.evaluate(
config=config,
ctrl=ctrl,
attach_attachments=False, # -- Not implemented yet
)
|
SmartInfrastructures/fuel-web-dev | nailgun/nailgun/test/integration/test_node_handler.py | Python | apache-2.0 | 12,873 | 0.000078 | # -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from nailgun import consts
from nailgun import objects
from nailgun.db.sqlalchemy.models import Node
from nailgun.test.base import BaseIntegrationTest
from nailgun.test.base import fake_tasks
from nailgun.utils import reverse
class TestHandlers(BaseIntegrationTest):
def test_node_get(self):
node = self.env.create_node(api=False)
resp = self.app.get(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
headers=self.default_headers)
self.assertEqual(200, resp.status_code)
self.assertEqual(node.id, resp.json_body['id'])
self.assertEqual(node.name, resp.json_body['name'])
self.assertEqual(node.mac, resp.json_body['mac'])
self.assertEqual(
node.pending_addition, resp.json_body['pending_addition'])
self.assertEqual(
node.pending_deletion, resp.json_body['pending_deletion'])
self.assertEqual(node.status, resp.json_body['status'])
self.assertEqual(
node.meta['cpu']['total'],
resp.json_body['meta']['cpu']['total']
)
self.assertEqual(node.meta['disks'], resp.json_body['meta']['disks'])
self.assertEqual(node.meta['memory'], resp.json_body['meta']['memory'])
def test_node_creation_fails_with_wrong_id(self):
node_id = '080000000003'
resp = self.app.post(
reverse('NodeCollectionHandler'),
jsonutils.dumps({'id': node_id,
'mac': self.env.generate_random_mac(),
'status': 'discover'}),
headers=self.default_headers,
expect_errors=True)
self.assertEqual(400, resp.status_code)
def test_node_deletion(self):
node = self.env.create_node(api=False)
resp = self.app.delete(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
"",
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(resp.status_code, 200)
def test_node_valid_metadata_gets_updated(self):
new_metadata = self.env.default_metadata()
node = self.env.create_node(api=False)
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
| jsonutils.dumps({'meta': new_m | etadata}),
headers=self.default_headers)
self.assertEqual(resp.status_code, 200)
self.db.refresh(node)
nodes = self.db.query(Node).filter(
Node.id == node.id
).all()
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].meta, new_metadata)
def test_node_hostname_gets_updated(self):
node = self.env.create_node(api=False)
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
jsonutils.dumps({'hostname': 'new-name'}),
headers=self.default_headers)
self.assertEqual(200, resp.status_code)
self.db.refresh(node)
# lets put the same hostname again
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
jsonutils.dumps({'hostname': 'new-name'}),
headers=self.default_headers)
self.assertEqual(200, resp.status_code)
self.db.refresh(node)
nodes = self.db.query(Node).filter(
Node.id == node.id
).all()
self.assertEqual(len(nodes), 1)
self.assertEqual(nodes[0].hostname, 'new-name')
def test_node_hostname_gets_updated_invalid(self):
node = self.env.create_node(api=False)
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
jsonutils.dumps({'hostname': '!#invalid_%&name'}),
headers=self.default_headers,
expect_errors=True)
self.assertEqual(400, resp.status_code)
def test_node_hostname_gets_updated_ssl_conflict(self):
cluster = self.env.create_cluster(api=False)
node = self.env.create_node(cluster_id=cluster.id)
cluster_attrs = objects.Cluster.get_attributes(cluster).editable
test_hostname = 'test-hostname'
cluster_attrs['public_ssl']['hostname']['value'] = test_hostname
objects.Cluster.update_attributes(
cluster, {'editable': cluster_attrs})
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
jsonutils.dumps({'hostname': test_hostname}),
headers=self.default_headers,
expect_errors=True)
self.assertEqual(400, resp.status_code)
self.assertEqual(
"New hostname '{0}' conflicts with public TLS endpoint"
.format(test_hostname), resp.json_body['message'])
def test_node_hostname_gets_updated_after_provisioning_starts(self):
node = self.env.create_node(api=False,
status=consts.NODE_STATUSES.provisioning)
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
jsonutils.dumps({'hostname': 'new-name'}),
headers=self.default_headers,
expect_errors=True)
self.assertEqual(403, resp.status_code)
self.assertEqual(
'Node hostname may be changed only before provisioning.',
resp.json_body['message'])
def test_node_hostname_gets_updated_duplicate(self):
node = self.env.create_node(api=False)
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
jsonutils.dumps({'hostname': 'new-name'}),
headers=self.default_headers)
self.assertEqual(200, resp.status_code)
self.db.refresh(node)
node_2 = self.env.create_node(api=False)
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node_2.id}),
jsonutils.dumps({'hostname': 'new-name'}),
headers=self.default_headers,
expect_errors=True)
self.assertEqual(409, resp.status_code)
def test_node_valid_status_gets_updated(self):
node = self.env.create_node(api=False)
params = {'status': 'error'}
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
jsonutils.dumps(params),
headers=self.default_headers)
self.assertEqual(resp.status_code, 200)
def test_node_action_flags_are_set(self):
flags = ['pending_addition', 'pending_deletion']
node = self.env.create_node(api=False)
for flag in flags:
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
jsonutils.dumps({flag: True}),
headers=self.default_headers
)
self.assertEqual(resp.status_code, 200)
self.db.refresh(node)
node_from_db = self.db.query(Node).filter(
Node.id == node.id
).first()
for flag in flags:
self.assertEqual(getattr(node_from_db, flag), True)
def test_put_returns_400_if_no_body(self):
node = self.env.create_node(api=False)
resp = self.app.put(
reverse('NodeHandler', kwargs={'obj_id': node.id}),
"",
headers=self.default_headers,
expect_errors=True)
self.assertEqual(resp.status_code, 400)
def test_put_returns_400_if_wrong_status(self):
node = self.env.create_node(api=False)
pa |
pierg75/pier-sosreport | sos/plugins/openvswitch.py | Python | gpl-2.0 | 6,609 | 0 | # Copyright (C) 2014 Adam Stokes <adam.stokes@ubuntu.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class OpenVSwitch(Plugin):
""" OpenVSwitch networking
"""
plugin_name = "openvswitch"
profiles = ('network', 'virt')
def setup(self):
all_logs = self.get_option("all_logs")
limit = self.get_option("log_size")
if not all_logs:
self.add_copy_spec("/var/log/openvswitch/*.log",
sizelimit=limit)
else:
self.add_copy_spec("/var/log/openvswitch/",
sizelimit=limit)
self.add_copy_spec([
"/var/run/openvswitch/ovsdb-server.pid",
"/var/run/openvswitch/ovs-vswitchd.pid"
])
self.add_cmd_output([
# The '-s' option enables dumping of packet counters on the
# ports.
"ovs-dpctl -s show",
# Capture the in-kernel flow information if it exists
"ovs-dpctl dump-flows -m",
# The '-t 5' adds an upper bound on how long to wait to connect
# to the Open vSwitch server, avoiding hangs when running sos.
"ovs-vsctl -t 5 show",
# Gather the database.
"ovsdb-client -f list dump",
# List the contents of runtime directory
"ls -laZ /var/run/openvswitch",
# List devices and their drivers
"dpdk_nic_bind --status",
"dpdk_devbind.py --status",
"driverctl list-devices",
"driverctl list-overrides",
# Capture a list of all bond devices
"ovs-appctl bond/list",
# Capture more details from bond devices
"ovs-appctl bond/show",
# Capture LACP details
"ovs-appctl lacp/show",
# Capture coverage stats"
"ovs-appctl coverage/show",
# Capture cached routes
"ovs-appctl ovs/route/show",
# | Capture tnl arp table"
"ovs-appctl tnl/arp/show",
# Capture a list of listening ports"
"ovs-appctl tnl/ports/show",
# Captu | re upcall information
"ovs-appctl upcall/show",
# Capture DPDK and other parameters
"ovs-vsctl -t 5 get Open_vSwitch . other_config",
# Capture OVS list
"ovs-vsctl list Open_vSwitch",
# Capture DPDK datapath packet counters and config
"ovs-appctl dpctl/show -s",
# Capture DPDK queue to pmd mapping
"ovs-appctl dpif-netdev/pmd-rxq-show",
# Capture DPDK pmd stats
"ovs-appctl dpif-netdev/pmd-stats-show"
])
# Gather systemd services logs
self.add_journal(units="openvswitch")
self.add_journal(units="openvswitch-nonetwork")
self.add_journal(units="ovs-vswitchd")
self.add_journal(units="ovsdb-server")
# Gather additional output for each OVS bridge on the host.
br_list_result = self.call_ext_prog("ovs-vsctl list-br")
if br_list_result['status'] == 0:
for br in br_list_result['output'].splitlines():
self.add_cmd_output([
"ovs-appctl fdb/show %s" % br,
"ovs-ofctl dump-flows %s" % br,
"ovs-ofctl dump-ports-desc %s" % br,
"ovs-ofctl dump-ports %s" % br,
"ovs-ofctl queue-get-config %s" % br,
"ovs-ofctl queue-stats %s" % br,
"ovs-ofctl show %s" % br
])
# Flow protocols currently supported
flow_versions = [
"OpenFlow10",
"OpenFlow11",
"OpenFlow12",
"OpenFlow13"
]
# List protocols currently in use, if any
ovs_list_bridge_cmd = "ovs-vsctl list bridge %s" % br
br_info_file = self.get_cmd_output_now(ovs_list_bridge_cmd)
br_info = open(br_info_file).read()
for line in br_info.splitlines():
if "protocols" in line:
br_protos_ln = line[line.find("[")+1:line.find("]")]
br_protos = br_protos_ln.replace('"', '').split(", ")
# Collect flow information for relevant protocol versions only
for flow in flow_versions:
if flow in br_protos:
self.add_cmd_output([
"ovs-ofctl -O %s show %s" % (flow, br),
"ovs-ofctl -O %s dump-groups %s" % (flow, br),
"ovs-ofctl -O %s dump-group-stats %s" % (flow, br),
"ovs-ofctl -O %s dump-flows %s" % (flow, br),
"ovs-ofctl -O %s dump-ports-desc %s" % (flow, br)
])
# Gather info on the DPDK mempools associated with each DPDK port
br_list_result = self.call_ext_prog("ovs-vsctl -t 5 list-br")
if br_list_result['status'] == 0:
for br in br_list_result['output'].splitlines():
port_list_result = self.call_ext_prog("ovs-vsctl -t 5 "
"list-ports %s" % br)
if port_list_result['status'] == 0:
for port in port_list_result['output'].splitlines():
self.add_cmd_output(
"ovs-appctl netdev-dpdk/get-mempool-info %s" % port
)
class RedHatOpenVSwitch(OpenVSwitch, RedHatPlugin):
packages = ('openvswitch', 'openvswitch-dpdk')
class DebianOpenVSwitch(OpenVSwitch, DebianPlugin, UbuntuPlugin):
packages = ('openvswitch-switch',)
# vim: set et ts=4 sw=4 :
|
valmynd/pyrel | setup.py | Python | agpl-3.0 | 311 | 0.016077 | from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
cmdclass = {'build_ext': build_ext},
#ext_modules = [Extension("database", ["data | base.pyx", "cescape.c"])]
ext_modu | les = [Extension("database", ["database.py", "cescape.c"])]
)
|
docusign/docusign-python-client | docusign_esign/models/payment_gateway_accounts_info.py | Python | mit | 3,706 | 0.00027 | # coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class PaymentGatewayAccountsInfo(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_t | ypes (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value | is json key in definition.
"""
swagger_types = {
'payment_gateway_accounts': 'list[PaymentGatewayAccount]'
}
attribute_map = {
'payment_gateway_accounts': 'paymentGatewayAccounts'
}
def __init__(self, payment_gateway_accounts=None): # noqa: E501
"""PaymentGatewayAccountsInfo - a model defined in Swagger""" # noqa: E501
self._payment_gateway_accounts = None
self.discriminator = None
if payment_gateway_accounts is not None:
self.payment_gateway_accounts = payment_gateway_accounts
@property
def payment_gateway_accounts(self):
"""Gets the payment_gateway_accounts of this PaymentGatewayAccountsInfo. # noqa: E501
# noqa: E501
:return: The payment_gateway_accounts of this PaymentGatewayAccountsInfo. # noqa: E501
:rtype: list[PaymentGatewayAccount]
"""
return self._payment_gateway_accounts
@payment_gateway_accounts.setter
def payment_gateway_accounts(self, payment_gateway_accounts):
"""Sets the payment_gateway_accounts of this PaymentGatewayAccountsInfo.
# noqa: E501
:param payment_gateway_accounts: The payment_gateway_accounts of this PaymentGatewayAccountsInfo. # noqa: E501
:type: list[PaymentGatewayAccount]
"""
self._payment_gateway_accounts = payment_gateway_accounts
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PaymentGatewayAccountsInfo, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PaymentGatewayAccountsInfo):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
163gal/Time-Line | libs_arm/dependencies/pysvg-0.2.1/pysvg/gradient.py | Python | gpl-3.0 | 5,902 | 0.009658 | #!/usr/bin/python
# -*- coding: iso-8859-1 -*-
'''
(C) 2008, 2009 Kerim Mansour
For licensing information please refer to license.txt
'''
from attributes import *
from core import BaseElement, PointAttrib, DimensionAttrib
class linearGradient(BaseElement, CoreAttrib, XLinkAttrib, PaintAttrib, StyleAttrib, ExternalAttrib):
"""
Class representing the linearGradient element of an svg doc.
"""
def __init__(self, x1=None, y1=None, x2=None, y2=None, **kwargs):
BaseElement.__init__(self, 'linearGradient')
self.set_x1(x1)
self.set_y1(y1)
self.set_x2(x2)
self.set_y2(y2)
self.setKWARGS(**kwargs)
def set_x1(self, x1):
self._attributes['x1'] = x1
def get_x1(self):
return self._attributes.get('x1')
def set_y1(self, y1):
self._attributes['y1'] = y1
def get_y1(self):
return self._attributes.get('y1')
def set_x2(self, x2):
self._attributes['x2'] = x2
def get_x2(self):
return self._attributes.get('x2')
def set_y2(self, y2):
self._attributes['y2'] = y2
def get_y2(self):
return self._attributes.get('y2')
def set_gradientUnits(self, gradientUnits):
self._attributes['gradientUnits'] = gradientUnits
def get_gradientUnits(self):
return self._attributes.get('gradientUnits')
def set_gradientTransform(self, gradientTransform):
self._attributes['gradientTransform'] = gradientTransform
def get_gradientTransform(self):
return self._attributes.get('gradientTransform')
def set_spreadMethod(self, spreadMethod):
self._attributes['spreadMethod'] = spreadMethod
def get_spreadMethod(self):
return self._attributes.get('spreadMethod')
class radialGradient(BaseElement, CoreAttrib, XLinkAttrib, PaintAttrib, StyleAttrib, ExternalAttrib):
"""
Class representing the radialGradient element of an svg doc.
"""
def __init__(self, cx='50%', cy='50%', r='50%', fx='50%', fy='50%', **kwargs):
BaseElement.__init__(self, 'radialGradient')
self.set_cx(cx)
self.set_cy(cy)
self.set_fx(fx)
self.set_fy(fy)
self.set_r(r)
self.setKWARGS(**kwargs)
def set_cx(self, cx):
self._attributes['cx'] = cx
def get_cx(self):
return self._attributes.get('cx')
def set_cy(self, cy):
self._attributes['cy'] = cy
def get_cy(self):
return self._attributes.get('cy')
def set_r(self, r):
self._attributes['r'] = r
def get_r(self):
return self._attributes.get('r')
def set_fx(self, fx):
self._attributes['fx'] = fx
def get_fx(self):
return self._attributes.get('fx')
def set_fy(self, fy):
self._attributes['fy'] = fy
def get_fy(self):
return self._attributes.get('fy')
def set_gradientUnits(self, gradientUnits):
self._attributes['gradientUnits'] = gradientUnits
def get_gradientUnits(self):
return self._attributes.get('gradientUnits')
def set_gradientTransform(self, gradientTransform):
self._attributes['gradientTransform'] = gradientTransform
def get_gradientTransform(self):
return self._attributes.get('gradientTransform')
def set_spreadMethod(self, spreadMethod):
self._attributes['spreadMethod'] = spreadMethod
def get_spreadMethod(self):
return self._attributes.get('spreadMethod')
class stop(BaseElement, CoreAttrib, StyleAttrib, PaintAttrib, GradientAttrib):
"""
Class representing the stop element of an svg doc.
"""
def __init__(self, offset=None, **kwargs):
BaseElement.__init__(self, 'stop')
self.set_offset(offset)
self.setKWARGS(**kwargs)
def set_offset(self, offset):
self._attributes['offset'] = offset
def get_offset(self):
return self._attributes.get('offset')
class pattern(BaseElement, CoreAttrib, XLinkAttrib, ConditionalAttrib, ExternalAttrib, StyleAttrib, PresentationAttributes_All, PointAttrib, Dime | nsionAttrib | ):
"""
Class representing the pattern element of an svg doc.
"""
def __init__(self, x=None, y=None, width=None, height=None, patternUnits=None, patternContentUnits=None, patternTransform=None, viewBox=None, preserveAspectRatio=None, **kwargs):
BaseElement.__init__(self, 'pattern')
self.set_x(x)
self.set_y(y)
self.set_width(width)
self.set_height(height)
self.set_patternUnits(patternUnits)
self.set_patternContentUnits(patternContentUnits)
self.set_patternTransform(patternTransform)
self.set_viewBox(viewBox)
self.set_preserveAspectRatio(preserveAspectRatio)
self.setKWARGS(**kwargs)
def set_viewBox(self, viewBox):
self._attributes['viewBox'] = viewBox
def get_viewBox(self):
return self._attributes['viewBox']
def set_preserveAspectRatio(self, preserveAspectRatio):
self._attributes['preserveAspectRatio'] = preserveAspectRatio
def get_preserveAspectRatio(self):
return self._attributes['preserveAspectRatio']
def set_patternUnits(self, patternUnits):
self._attributes['patternUnits'] = patternUnits
def get_patternUnits(self):
return self._attributes['patternUnits']
def set_patternContentUnits(self, patternContentUnits):
self._attributes['patternContentUnits'] = patternContentUnits
def get_patternContentUnits(self):
return self._attributes['patternContentUnits']
def set_patternTransform(self, patternTransform):
self._attributes['patternTransform'] = patternTransform
def get_patternTransform(self):
return self._attributes['patternTransform']
|
iulian787/spack | var/spack/repos/builtin/packages/py-tap-py/package.py | Python | lgpl-2.1 | 1,128 | 0.003546 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyTapPy(PythonPackage):
"""Python TAP interface module for unit | tests"""
homepage = "https://github.com/python-tap/tappy"
url = "https://pypi.io/packages/source/t/tap.py | /tap.py-3.0.tar.gz"
version('3.0', sha256='f5eeeeebfd64e53d32661752bb4c288589a3babbb96db3f391a4ec29f1359c70')
version('2.6.2', sha256='5f219d92dbad5e378f8f7549cdfe655b0d5fd2a778f9c83bee51b61c6ca40efb')
version('1.6', sha256='3ee315567cd1cf444501c405b7f7146ffdb2e630bac58d0840d378a3b9a0dbe4')
extends('python', ignore='bin/nosetests|bin/pygmentize')
depends_on('python@3.5:3.7', when='@3.0:')
depends_on('python@2.7:2.8,3.5:3.7', when='@2.6')
depends_on('python@2.6:2.8,3.2:3.4', when='@:1.8')
depends_on('py-nose', type=('build', 'run'), when='@:1.99')
depends_on('py-pygments', type=('build', 'run'), when='@:1.99')
depends_on('py-setuptools', type=('build', 'run'))
|
Jasper-Koops/THESIS_LIFEBOAT | DJANGO_GUI/django_gui/gui/migrations/0002_auto_20170526_2213.py | Python | mit | 621 | 0.00161 | # -*- coding: utf-8 -*-
# Generated by Djan | go 1.11 on 2017-05-26 22:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gui', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='saveddata',
name='id',
),
migrations.AlterField(
model_name='saveddata',
name='row_id',
field=models.IntegerField(blank=True, default=0, primary | _key=True, serialize=False),
preserve_default=False,
),
]
|
odahoda/noisicaa | noisicaa/core/ipc_perftest.py | Python | gpl-2.0 | 1,459 | 0.001371 | #!/usr/bin/python3
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <pink@odahoda.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
import logging
import random
from . import ipc_test_pb2
from . import ipc_test
logger = logging.getLogger(__name__)
class IPCPerfTest(ipc_test.IPCPerfTestBase):
async def test_small_messages(self):
request = ipc_test_pb2.TestRequest()
request.t.add(numerator=random.randint(0, 4), denominator=random.randint(1, 2))
await self.run_test(request, 5000)
async def test_large_messages(self):
request = ipc_test_pb2.TestRequest()
for _ in range(10000):
reque | st.t.add(numerator=random.rand | int(0, 4), denominator=random.randint(1, 2))
await self.run_test(request, 100)
|
ZompaSenior/fask | fask/src/fask_dj/dashboard/urls.py | Python | agpl-3.0 | 1,461 | 0.002053 | """fask_dj URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from task.views import task_project
from . import views
urlpatterns = [
url(r'^$', views.dashboard, name='dashboard'),
url(r'^dashboard/$', views.dashboard, name='dashboard'),
url(r'^task_calendar/$', views.task_calendar, name='task_calendar'),
url(r'^task_calendar_details_request/(?P<s | elect_day>[0-9]+)/(?P<negative>[\w]+)/$', views.task_calendar_details_request, name='task_calendar_details_request'),
url(r'^task_project/(?P<task_id>[0-9]+)/(?P<project_id>[0-9]+)/(?P<redirect_name>[\w]+)/$', task_project, name='task_project'),
url(r'^import_export_page/$', views.import_export_page, name='import_export_page'),
url(r'^import/$', views.import_fask, name='import'),
url(r'^export/$', views.export_fask, name='expo | rt'),
]
|
saurabh6790/erpnext | erpnext/erpnext_integrations/doctype/shopify_settings/test_shopify_settings.py | Python | gpl-3.0 | 3,757 | 0.024754 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest, os, json
from frappe.utils import cstr
from erpnext.erpnext_integrations.connectors.shopify_connection import create_order
from erpnext.erpnext_integrations.doctype.shopify_settings.sync_product import make_item
from erpnext.erpnext_integrations.doctype.shopify_settings.sync_customer import create_customer
from frappe.core.doctype.data_import.data_import import import_doc
class ShopifySettings(unittest.TestCase):
def setUp(self):
frappe.set_user("Administrator")
# use | the fixture data
import_doc(path=frappe.get_app_path("erpnext", "erpnext_ | integrations/doctype/shopify_settings/test_data/custom_field.json"),
ignore_links=True, overwrite=True)
frappe.reload_doctype("Customer")
frappe.reload_doctype("Sales Order")
frappe.reload_doctype("Delivery Note")
frappe.reload_doctype("Sales Invoice")
self.setup_shopify()
def setup_shopify(self):
shopify_settings = frappe.get_doc("Shopify Settings")
shopify_settings.taxes = []
shopify_settings.update({
"app_type": "Private",
"shopify_url": "test.myshopify.com",
"api_key": "17702c7c4452b9c5d235240b6e7a39da",
"password": "17702c7c4452b9c5d235240b6e7a39da",
"shared_secret": "17702c7c4452b9c5d235240b6e7a39da",
"price_list": "_Test Price List",
"warehouse": "_Test Warehouse - _TC",
"cash_bank_account": "Cash - _TC",
"account": "Cash - _TC",
"customer_group": "_Test Customer Group",
"cost_center": "Main - _TC",
"taxes": [
{
"shopify_tax": "International Shipping",
"tax_account":"Legal Expenses - _TC"
}
],
"enable_shopify": 0,
"sales_order_series": "SO-",
"sync_sales_invoice": 1,
"sales_invoice_series": "SINV-",
"sync_delivery_note": 1,
"delivery_note_series": "DN-"
}).save(ignore_permissions=True)
self.shopify_settings = shopify_settings
def test_order(self):
### Create Customer ###
with open (os.path.join(os.path.dirname(__file__), "test_data", "shopify_customer.json")) as shopify_customer:
shopify_customer = json.load(shopify_customer)
create_customer(shopify_customer.get("customer"), self.shopify_settings)
### Create Item ###
with open (os.path.join(os.path.dirname(__file__), "test_data", "shopify_item.json")) as shopify_item:
shopify_item = json.load(shopify_item)
make_item("_Test Warehouse - _TC", shopify_item.get("product"))
### Create Order ###
with open (os.path.join(os.path.dirname(__file__), "test_data", "shopify_order.json")) as shopify_order:
shopify_order = json.load(shopify_order)
create_order(shopify_order.get("order"), self.shopify_settings, False, company="_Test Company")
sales_order = frappe.get_doc("Sales Order", {"shopify_order_id": cstr(shopify_order.get("order").get("id"))})
self.assertEqual(cstr(shopify_order.get("order").get("id")), sales_order.shopify_order_id)
#check for customer
shopify_order_customer_id = cstr(shopify_order.get("order").get("customer").get("id"))
sales_order_customer_id = frappe.get_value("Customer", sales_order.customer, "shopify_customer_id")
self.assertEqual(shopify_order_customer_id, sales_order_customer_id)
#check sales invoice
sales_invoice = frappe.get_doc("Sales Invoice", {"shopify_order_id": sales_order.shopify_order_id})
self.assertEqual(sales_invoice.rounded_total, sales_order.rounded_total)
#check delivery note
delivery_note_count = frappe.db.sql("""select count(*) from `tabDelivery Note`
where shopify_order_id = %s""", sales_order.shopify_order_id)[0][0]
self.assertEqual(delivery_note_count, len(shopify_order.get("order").get("fulfillments")))
|
JamesNickerson/py-junos-eznc | tests/unit/utils/test_fs.py | Python | apache-2.0 | 13,790 | 0 | __author__ = "Nitin Kumar, Rick Sherman"
__credits__ = "Jeremy Schulman"
import unittest
from nose.plugins.attrib import attr
import os
from ncclient.manager import Manager, make_device_handler
from ncclient.transport import SSHSession
from jnpr.junos import Device
from jnpr.junos.utils.fs import FS
from mock import patch, MagicMock, call
@attr('unit')
class TestFS(unittest.TestCase):
@patch('ncclient.manager.connect')
def setUp(self, mock_connect):
mock_connect.side_effect = self._mock_manager
self.dev = Device(host='1.1.1.1', user='rick', password='password123',
gather_facts=False)
self.dev.open()
self.fs = FS(self.dev)
def test_cat_wrong_path_return_none(self):
path = 'test/report'
self.assertEqual(self.fs.cat(path), None)
def test_cat(self):
self.fs._dev.rpc.file_show = MagicMock(side_effect=self._mock_manager)
path = 'test/cat.txt'
self.assertTrue('testing cat functionality' in self.fs.cat(path))
self.fs._dev.rpc.file_show.assert_called_with(filename='test/cat.txt')
def test_cwd(self):
self.fs._dev.rpc.set_cli_working_directory = MagicMock()
folder = 'test/report'
self.fs.cwd(folder)
self.fs._dev.rpc.set_cli_working_directory.\
assert_called_with(directory='test/report')
@patch('jnpr.junos.Device.execute')
def test_pwd(self, mock_execute):
mock_execute.side_effect = MagicMock(side_effect=self._mock_manager)
self.fs.pwd()
self.assertEqual(self.fs.pwd(), '/cf/var/home/rick')
def test_checksum_return_none(self):
path = 'test/report'
self.assertEqual(self.fs.checksum(path), None)
def test_checksum_unknown_calc(self):
path = 'test/report'
self.assertRaises(ValueError, self.fs.checksum, path=path, calc='abc')
def test_checksum_return_rsp(self):
self.fs.dev.rpc.get_sha256_checksum_information = \
MagicMock(side_effect=self._mock_manager)
path = 'test/checksum'
self.assertEqual(self.fs.checksum(path, 'sha256'), 'xxxx')
self.fs.dev.rpc.get_sha256_checksum_information.\
assert_called_with(path='test/checksum')
def test_stat_calling___decode_file(self):
path = 'test/stat/decode_file'
self.fs.dev.rpc.file_list = \
MagicMock(side_effect=self._mock_manager)
self.assertEqual(self.fs.stat(path),
{'owner': 'pqr', 'path': '/var/abc.sh',
'permissions': 755,
'permissions_text': '-rwxr-xr-x', 'size': 2,
'ts_date': 'Mar 13 06:54',
'ts_epoc': '1394693680',
'type': 'file'})
def test_stat_calling___decode_dir(self):
path = 'test/stat/decode_dir'
self.fs.dev.rpc.file_list = \
MagicMock(side_effect=self._mock_manager)
self.assertEqual(self.fs.stat(path),
{'path': '/var', 'type': 'dir', 'file_count': 1,
'size': 2})
def test_stat_return_none(self):
path = 'test/abc'
self.fs.dev.rpc.file_list = MagicMock()
self.fs.dev.rpc.file_list.find.return_value = 'output'
self.assertEqual(self.fs.stat(path), None)
def test_ls_calling___decode_file(self):
path = 'test/stat/decode_file'
self.fs.dev.rpc.file_list = \
MagicMock(side_effect=self._mock_manager)
self.assertEqual(self.fs.ls(path),
{'owner': 'pqr', 'path': '/var/abc.sh',
'permissions': 755,
'permissions_text': '-rwxr-xr-x', 'size': 2,
'ts_date': 'Mar 13 06:54',
'ts_epoc': '1394693680',
'type': 'file'})
def test_ls_calling___decode_dir(self):
path = 'test/stat/decode_dir'
self.fs.dev.rpc.file_list = \
MagicMock(side_effect=self._mock_manager)
self.assertEqual(self.fs.ls(path),
{'files':
{'abc': {'permissions_text': 'drwxr-xr-x',
'ts_date': 'Feb 17 15:30',
'ts_epoc': '139265 | 1039',
'owner': 'root', 'path': 'abc',
'size': 2, 'type': 'dir',
'permissions': 555}},
'path': '/var', 'type': 'dir',
| 'file_count': 1,
'size': 2})
def test_ls_return_none(self):
path = 'test/abc'
self.fs.dev.rpc.file_list = MagicMock()
self.fs.dev.rpc.file_list.find.return_value = 'output'
self.assertEqual(self.fs.ls(path), None)
@patch('jnpr.junos.utils.fs.FS._decode_file')
def test_ls_link_path_false(self, mock_decode_file):
mock_decode_file.get.return_value = False
path = 'test/stat/decode_file'
self.fs.dev.rpc.file_list = \
MagicMock(side_effect=self._mock_manager)
self.fs.ls(path, followlink=False)
mock_decode_file.assert_has_calls(call().get('link'))
def test_ls_brief_true(self):
path = 'test/stat/decode_dir'
self.fs.dev.rpc.file_list = \
MagicMock(side_effect=self._mock_manager)
self.assertEqual(self.fs.ls(path, brief=True),
{'files': ['abc'], 'path': '/var',
'type': 'dir', 'file_count': 1, 'size': 2})
def test_ls_calling___decode_dir_type_symbolic_link(self):
path = 'test/stat/decode_symbolic_link'
self.fs.dev.rpc.file_list = \
MagicMock(side_effect=self._mock_manager)
self.assertEqual(self.fs.ls(path),
{'files':
{'abc': {'permissions_text': 'drwxr-xr-x',
'ts_date': 'Feb 17 15:30',
'link': 'symlink test',
'ts_epoc': '1392651039',
'owner': 'root', 'path': 'abc',
'size': 2, 'type': 'link',
'permissions': 555}},
'path': '/var', 'type': 'dir', 'file_count': 1,
'size': 2})
def test_rm_return_true(self):
self.fs.dev.rpc.file_delete = MagicMock(return_value=True)
path = 'test/abc'
self.assertTrue(self.fs.rm(path))
self.fs.dev.rpc.file_delete.assert_called_once_with(
path='test/abc')
def test_rm_return_false(self):
path = 'test/abc'
self.fs.dev.rpc.file_delete = MagicMock(return_value=False)
self.assertFalse(self.fs.rm(path))
self.fs.dev.rpc.file_delete.assert_called_once_with(
path='test/abc')
def test_copy_return_true(self):
self.fs.dev.rpc.file_copy = MagicMock()
initial = 'test/abc'
final = 'test/xyz'
self.assertTrue(self.fs.cp(initial, final))
self.fs.dev.rpc.file_copy.assert_called_once_with(
source='test/abc',
destination='test/xyz')
def test_copy_return_false(self):
initial = 'test/abc'
final = 'test/xyz'
self.fs.dev.rpc.file_copy = MagicMock(side_effect=Exception)
self.assertFalse(self.fs.cp(initial, final))
self.fs.dev.rpc.file_copy.assert_called_once_with(
source='test/abc',
destination='test/xyz')
def test_move_return_true(self):
self.fs.dev.rpc.file_rename = MagicMock(return_value=True)
initial = 'test/abc'
final = 'test/xyz'
self.assertTrue(self.fs.mv(initial, final))
self.fs.dev.rpc.file_rename.assert_called_once_with(
source='test/abc',
destination='test/xyz')
def test_move_return_false(self):
initial = 'test/abc'
final = 'test/xyz'
self.fs.dev.rpc.file_rename = MagicMock(return_value=False)
|
zenoss/ZenPacks.community.TokyoTyrant | setup.py | Python | gpl-2.0 | 2,602 | 0.009992 | ################################
# These variables are overwritten by Zenoss when the ZenPack is exported
# or saved. Do not modify them directly here.
# NB: PACKAGES is deprecated
NAME = 'ZenPacks.community.TokyoTyrant'
VERSION = '1.0'
AUTHOR = 'B Maqueira'
LICENSE = "GPLv2"
NAMESPACE_PACKAGES = ['ZenPacks', 'ZenPacks.community']
PACKAGES = ['ZenPacks', 'ZenPacks.community', 'ZenPacks.community.TokyoTyrant']
INSTALL_REQUIRES = []
COMPAT_ZENOSS_VE | RS = '>=2.5.2'
PREV_ZENPACK_NAME = ''
# STOP_REPLACEMENTS
################################
# Zenoss will not overwrite any changes you make below here.
from setuptools import setup, find_packages
setup(
# This ZenPack metadata should usually be edited with the Zenoss
# ZenPack edit page. Whenever the edit page is submitted it will
# overwrite the values below (the ones it knows about) with new values.
name = NAME,
version = VERSION,
author = AUTHOR,
license = LICEN | SE,
# This is the version spec which indicates what versions of Zenoss
# this ZenPack is compatible with
compatZenossVers = COMPAT_ZENOSS_VERS,
# previousZenPackName is a facility for telling Zenoss that the name
# of this ZenPack has changed. If no ZenPack with the current name is
# installed then a zenpack of this name if installed will be upgraded.
prevZenPackName = PREV_ZENPACK_NAME,
# Indicate to setuptools which namespace packages the zenpack
# participates in
namespace_packages = NAMESPACE_PACKAGES,
# Tell setuptools what packages this zenpack provides.
packages = find_packages(),
# Tell setuptools to figure out for itself which files to include
# in the binary egg when it is built.
include_package_data = True,
# The MANIFEST.in file is the recommended way of including additional files
# in your ZenPack. package_data is another.
#package_data = {}
# Indicate dependencies on other python modules or ZenPacks. This line
# is modified by zenoss when the ZenPack edit page is submitted. Zenoss
# tries to put add/delete the names it manages at the beginning of this
# list, so any manual additions should be added to the end. Things will
# go poorly if this line is broken into multiple lines or modified to
# dramatically.
install_requires = INSTALL_REQUIRES,
# Every ZenPack egg must define exactly one zenoss.zenpacks entry point
# of this form.
entry_points = {
'zenoss.zenpacks': '%s = %s' % (NAME, NAME),
},
# All ZenPack eggs must be installed in unzipped form.
zip_safe = False,
)
|
django-json-api/rest_framework_ember | example/migrations/0007_artproject_description.py | Python | bsd-2-clause | 405 | 0 | # Generated by Django 2.2.2 on 2019-06-07 06:46
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
("example", "0006_auto_20181228_0752"),
]
| operations = [
migrations.AddField(
model_name="artproject",
name="description",
field=models.CharField(max_length=100, null=True),
),
]
|
JasonGross/time-worked | pastWeekTime.py | Python | mit | 128 | 0 | #!/usr/bin/python
from TimeWorked import *
import os
files = [T | IME_WORKED]
print get_total_time_by_day_files(files)
raw_i | nput()
|
Azure/azure-sdk-for-python | sdk/cognitiveservices/azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/prediction/models/_models.py | Python | mit | 10,932 | 0.000091 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class BoundingBox(Model):
"""Bounding box that defines a region of an image.
All required parameters must be populated in order to send to Azure.
:param left: Required. Coordinate of the left boundary.
:type left: float
:param top: Required. Coordinate of the top boundary.
:type top: float
:param width: Required. Width.
:type width: float
:param height: Required. Height.
:type height: float
"""
_validation = {
'left': {'required': True},
'top': {'required': True},
'width': {'required': True},
'height': {'required': True},
}
_attribute_map = {
'left': {'key': 'left', 'type': 'float'},
'top': {'key': 'top', 'type': 'float'},
'width': {'key': 'width', 'type': 'float'},
'height': {'key': 'height', 'type': 'float'},
}
def __init__(self, **kwargs):
super(BoundingBox, self).__init__(**kwargs)
self.left = kwargs.get('left', None)
self.top = kwargs.get('top', None)
self.width = kwargs.get('width', None)
self.height = kwargs.get('height', None)
class CustomVisionError(Model):
"""CustomVisionError.
All required parameters must be populated in order to send to Azure.
:param code: Required. The error code. Possible values include: 'NoError',
'BadRequest', 'BadRequestExceededBatchSize', 'BadRequestNotSupported',
'BadRequestInvalidIds', 'BadRequestProjectName',
'BadRequestProjectNameNotUnique', 'BadRequestProjectDescription',
'BadRequestProjectUnknownDomain',
'BadRequestProjectUnknownClassification',
'BadRequestProjectUnsupportedDomainTypeChange',
'BadRequestProjectUnsupportedExportPlatform',
'BadRequestProjectImagePreprocessingSettings',
'BadRequestProjectDuplicated', 'BadRequestIterationName',
'BadRequestIterationNameNotUnique', 'BadRequestIterationDescription',
'BadRequestIterationIsNotTrained', 'BadRequestIterationValidationFailed',
'BadRequestWorkspaceCannotBeModified', 'BadRequestWorkspaceNotDeletable',
'BadRequestTagName', 'BadRequestTagNameNotUnique',
'BadRequestTagDescription', 'BadRequestTagType',
'BadRequestMultipleNegativeTag', 'BadRequestMultipleGeneralProductTag',
'BadRequestImageTags', 'BadRequestImageRegions',
'BadRequestNegativeAndRegularTagOnSameImage',
'BadRequestUnsupportedDomain', 'BadRequestRequiredParamIsNull',
'BadRequestIterationIsPublished', 'BadRequestInvalidPublishName',
'BadRequestInvalidPublishTarget', 'BadRequestUnpublishFailed',
'BadRequestIterationNotPublished', 'BadRequestSubscriptionApi',
'BadRequestExceedProjectLimit',
'BadRequestExceedIterationPerProjectLimit',
'BadRequestExceedTagPerProjectLimit', 'BadRequestExceedTagPerImageLimit',
'BadRequestExceededQuota', 'BadRequestCannotMigrateProjectWithName',
'BadRequestNotLimitedTrial', 'BadRequestImageBatch',
'BadRequestImageStream', 'BadRequestImageUrl', 'BadRequestImageFormat',
'BadRequestImageSizeBytes', 'BadRequestImageDimensions',
'BadRequestImageExceededCount', 'BadRequestTrainingNotNeeded',
'BadRequestTrainingNotNeededButTrainingPipelineUpdated',
'BadRequestTrainingValidationFailed',
'BadRequestClassificationTrainingValidationFailed',
'BadRequestMultiClassClassificationTrainingValidationFailed',
'BadRequestMultiLabelClassificationTrainingValidationFailed',
'BadRequestDetectionTrainingValidationFailed',
'BadRequestTrainingAlreadyInProgress',
'BadRequestDetectionTrainingNotAllowNegativeTag',
'BadRequestInvalidEmailAddress',
'BadRequestDomainNotSupportedForAdvancedTraining',
'BadRequestExportPlatformNotSupportedForAdvancedTraining',
'BadRequestReservedBudgetInHoursNotEnoughForAdvancedTraining',
'BadRequestExportValidationFailed', 'BadRequestExportAlreadyInProgress',
'BadRequestPredictionIdsMissing', 'BadRequestPredictionIdsExceededCount',
'BadRequestPredictionTagsExceededCount',
'BadRequestPredictionResultsExceededCount',
'BadRequestPredictionInvalidApplicationName',
'BadRequestPredictionInvalidQueryParameters',
'BadRequestInvalidImportToken', 'BadRequestExportWhileTraining',
'BadRequestImageMetadataKey', 'BadRequestImageMetadataValue',
'BadRequestOperationNotSupported', 'BadRequestInvalidArtifactUri',
'BadRequestCustomerManagedKeyRevoked', 'BadRequestInvalid',
'UnsupportedMediaType', 'Forbidden', 'ForbiddenUser',
'ForbiddenUserResource', 'ForbiddenUserSignupDisabled',
'ForbiddenUserSignupAllowanceExceeded', 'ForbiddenUserDoesNotExist',
'ForbiddenUserDisabled', 'ForbiddenUserInsufficientCapability',
'ForbiddenDRModeEnabled', 'ForbiddenInvalid', 'NotFound',
'NotFoundProject', 'NotFoundProjectDefaultIteration', 'NotFoundIteration',
'NotFoundIterationPerformance', 'NotFoundTag', 'NotFoundImage',
'NotFoundDomain', 'NotFoundApimSubscription', 'NotFoundInvalid',
'Conflict', 'ConflictInvalid', 'ErrorUnknown', 'ErrorIterationCopyFailed',
'ErrorPreparePerformanceMigrationFailed', 'ErrorProjectInvalidWorkspace',
'ErrorProjectInvalidPipelineConfiguration', 'ErrorProjectInvalidDomain',
'ErrorProjectTrainingRequestFailed', 'ErrorProjectImportRequestFailed',
'ErrorProjectExportRequestFailed', 'ErrorFeaturizationServiceUnavailable',
'ErrorFeaturizationQueueTimeout', 'ErrorFeaturizationInvalidFeaturizer',
'ErrorFeaturizationAugmentationUnavailable',
'ErrorFeaturizationUnrecognizedJob',
'ErrorFeaturizationAugmentationError', 'ErrorExporterInvalidPlatform',
'ErrorExporterInvalidFeaturizer', 'ErrorExporterInvalidClassifier',
'ErrorPredictionServiceUnavailable', 'ErrorPredictionModelNotFound',
'ErrorPredictionModelNotCached', 'ErrorPrediction',
'ErrorPredictionStorage', 'ErrorRegionProposal', 'ErrorUnknownBaseModel',
'ErrorInvalid'
:type code: str or
~azure.cognitiveservices.vision.customvision.prediction.models.CustomVisionErrorCodes
:param message: Required. A message explaining the error reported by the
service.
:type message: str
"""
_validation = {
'code': {'required': True},
'message': {'required': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CustomVisionError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
class CustomVisionErrorException(HttpOperationError):
"""Server responded with exception of type: 'CustomVisionError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(CustomVisionErrorException, self).__init__(deserialize, response, 'CustomVisionError', *args)
class ImagePrediction(Model):
"""Result of an image prediction request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Prediction Id.
:vartype id: str
:ivar project: Project Id.
:vartype project: str
:ivar iteration: Iteration Id.
:vartype iteration: str
:ivar created: Date this prediction was created.
:vartype created: datetime
:ivar predictions: List of predictions.
:vartype predictions:
list[~azure.cognitiveservices.vision.customvision.prediction.models.Prediction]
"""
|
_validation = | {
'id' |
wesley1001/dokomoforms | tests/db_test.py | Python | gpl-3.0 | 47,704 | 0 | """
Tests for the dokomo database
"""
import unittest
import uuid
from sqlalchemy import cast, Text, Boolean
from datetime import timedelta
from passlib.hash import bcrypt_sha256
from dokomoforms.db import update_record, delete_record
from dokomoforms import db
from dokomoforms.db.answer import answer_insert, answer_table, get_answers, \
get_geo_json, \
get_answers_for_question
from dokomoforms.db.answer_choice import answer_choice_insert, \
get_answer_choices, \
get_answer_choices_for_choice_id
from dokomoforms.db.auth_user import auth_user_table, get_auth_user, \
create_auth_user, \
generate_api_token, set_api_token, verify_api_token, \
get_auth_user_by_email, \
UserDoesNotExistError
from dokomoforms.db.question import get_questions_no_credentials, \
question_select, \
question_table, \
get_free_sequence_number, question_insert, get_questions, \
QuestionDoesNotExistError, get_required
from dokomoforms.db.question_branch import get_branches, \
question_branch_insert, \
question_branch_table
from dokomoforms.db.question_choice import get_choices, \
question_choice_select, \
question_choice_insert, question_choice_table, \
QuestionChoiceDoesNotExistError
from dokomoforms.db.submission import submission_table, submission_insert, \
submission_select, get_submissions_by_email, get_number_of_submissions, \
SubmissionDoesNotExistError
from dokomoforms.db.survey import survey_table, survey_insert, survey_select, \
get_surveys_by_email, display, SurveyDoesNotExistError, \
get_survey_id_from_prefix, SurveyPrefixDoesNotIdentifyASurveyError, \
SurveyPrefixTooShortError, get_email_address, get_free_title, \
get_number_of_surveys
connection = db.engine.connect()
class TestAnswer(unittest.TestCase):
def tearDown(self):
connection.execute(submission_table.delete())
def testAnswerInsert(self):
survey_id = connection.execute(survey_table.select().where(
survey_table.c.survey_title == 'test_title')).first().survey_id
q_where = question_table.select().where(
question_table.c.type_constraint_name == 'integer')
question = connection.execute(q_where).first()
question_id = question.question_id
tcn = question.type_constraint_name
seq = question.sequence_number
mul = question.allow_multiple
submission_exec = connection.execute(
submission_insert(submitter='test_submitter',
submitter_email='anon@anon.org',
| survey_id=survey_id))
submission_id = submission_exec.inserted_primary_key[0]
answer_exec = connection.execute(answer_insert(
answer=1, question_id=question_id,
answer_metadata={},
submission_id=submission_id,
survey_id=survey_id,
type_constraint_name=tcn,
is_type_exception=False,
sequence_number=seq,
allow_multiple=mul))
answer_id = answer_exec.inse | rted_primary_key[0]
self.assertIsNotNone(answer_id)
def testAnswerInsertNoMetadata(self):
survey_id = connection.execute(survey_table.select().where(
survey_table.c.survey_title == 'test_title')).first().survey_id
q_where = question_table.select().where(
question_table.c.type_constraint_name == 'integer')
question = connection.execute(q_where).first()
question_id = question.question_id
tcn = question.type_constraint_name
seq = question.sequence_number
mul = question.allow_multiple
submission_exec = connection.execute(
submission_insert(submitter='test_submitter',
submitter_email='anon@anon.org',
survey_id=survey_id))
submission_id = submission_exec.inserted_primary_key[0]
answer_exec = connection.execute(answer_insert(
answer=1, question_id=question_id,
answer_metadata=None,
submission_id=submission_id,
survey_id=survey_id,
type_constraint_name=tcn,
is_type_exception=False,
sequence_number=seq,
allow_multiple=mul))
answer_id = answer_exec.inserted_primary_key[0]
self.assertIsNotNone(answer_id)
def testAnswerInsertOther(self):
survey_id = connection.execute(survey_table.select().where(
survey_table.c.survey_title == 'test_title')).first().survey_id
q_where = question_table.select().where(
question_table.c.type_constraint_name == 'integer')
question = connection.execute(q_where).first()
question_id = question.question_id
tcn = question.type_constraint_name
seq = question.sequence_number
mul = question.allow_multiple
submission_exec = connection.execute(
submission_insert(submitter='test_submitter',
submitter_email='anon@anon.org',
survey_id=survey_id))
submission_id = submission_exec.inserted_primary_key[0]
answer_exec = connection.execute(answer_insert(
answer='one', question_id=question_id,
answer_metadata={'type_exception': 'dont_know'},
submission_id=submission_id,
survey_id=survey_id,
type_constraint_name=tcn,
is_type_exception=True,
sequence_number=seq,
allow_multiple=mul))
answer_id = answer_exec.inserted_primary_key[0]
self.assertIsNotNone(answer_id)
def testInsertLocation(self):
survey_id = connection.execute(survey_table.select().where(
survey_table.c.survey_title == 'test_title')).first().survey_id
q_where = question_table.select().where(
question_table.c.type_constraint_name == 'location')
question = connection.execute(q_where).first()
question_id = question.question_id
tcn = question.type_constraint_name
seq = question.sequence_number
mul = question.allow_multiple
submission_exec = connection.execute(submission_insert(
submitter='test_submitter', submitter_email='anon@anon.org',
survey_id=survey_id))
submission_id = submission_exec.inserted_primary_key[0]
answer_exec = connection.execute(answer_insert(
answer={'lon': 90, 'lat': 0},
answer_metadata={},
question_id=question_id,
submission_id=submission_id,
survey_id=survey_id,
type_constraint_name=tcn,
is_type_exception=False,
sequence_number=seq,
allow_multiple=mul))
answer_id = answer_exec.inserted_primary_key[0]
self.assertIsNotNone(answer_id)
condition = answer_table.c.answer_id == answer_id
answer = connection.execute(
answer_table.select().where(condition)).first()
location = get_geo_json(connection, answer)['coordinates']
self.assertEqual(location, [90, 0])
submission_2_exec = connection.execute(
submission_insert(submitter='test_submitter',
submitter_email='anon@anon.org',
survey_id=survey_id))
submission_2_id = submission_2_exec.inserted_primary_key[0]
answer_2_exec = connection.execute(answer_insert(
answer=None, question_id=question_id,
answer_metadata={},
submission_id=submission_2_id,
survey_id=survey_id,
type_constraint_name=tcn,
is_type_exception=False,
sequence_number=seq,
allow_multiple=mul))
answer_2_id = answer_2_exec.inserted_primary_key[0]
condition_2 = answer_table.c.answer_id == answer_2_id
answer_2 = connection.execute(
answer_table.select().where(condition_2)).first()
location_2 = get_geo_json(connection, answer_2)
self.assertEqual(location_2,
{'coordinates': [], 'type': 'MultiPoint'})
|
sanguinariojoe/aquagpusph | examples/3D/spheric_testcase2_dambreak_mpi/cMake/plot_h.py | Python | gpl-3.0 | 4,882 | 0.003892 | #******************************************************************************
# *
# * ** * * * * *
# * * * * * * * * * *
# ***** * * * * ***** ** *** * * ** *** *** *
# * * * * * * * * * * * * * * * * * * * *
# * * * * * * * * * * * * * * * * * * * *
# * * ** * ** * * *** *** *** ** *** * * *
# * * * *
# ** * * *
# *
#******************************************************************************
# *
# This file is part of AQUAgpusph, a free CFD program based on SPH. *
# Copyright (C) 2012 Jose Luis Cercos Pita <jl.cercos@upm.es> *
# *
# AQUAgpusph is free software: you can redistribute it and/or modify *
# it under the terms of the GNU General Public License as published by *
# the Free Software Foundation, either version 3 of the License, or *
# (at your option) any later version. *
# *
# AQUAgpusph is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of | *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# GNU General Public License for more details. *
# | *
# You should have received a copy of the GNU General Public License *
# along with AQUAgpusph. If not, see <http://www.gnu.org/licenses/>. *
# *
#******************************************************************************
import os
from os import path
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
def readFile(filepath):
""" Read and extract data from a file
:param filepath File ot read
"""
abspath = filepath
if not path.isabs(filepath):
abspath = path.join(path.dirname(path.abspath(__file__)), filepath)
# Read the file by lines
f = open(abspath, "r")
lines = f.readlines()
f.close()
data = []
for l in lines[1:-1]: # Skip the last line, which may be unready
l = l.strip()
while l.find(' ') != -1:
l = l.replace(' ', ' ')
fields = l.split(' ')
try:
data.append(map(float, fields))
except:
continue
# Transpose the data
return [list(d) for d in zip(*data)]
lines = []
def update(frame_index):
plt.tight_layout()
try:
data = readFile('sensors_h_0.out')
t = data[0]
hh = (data[-4], data[-3], data[-2], data[-1])
except IndexError:
return
except FileNotFoundError:
return
for i, h in enumerate(hh):
lines[i].set_data(t, h)
fig = plt.figure()
ax11 = fig.add_subplot(221)
ax21 = fig.add_subplot(222, sharey=ax11)
ax12 = fig.add_subplot(223, sharex=ax11)
ax22 = fig.add_subplot(224, sharex=ax21, sharey=ax12)
axes = (ax11, ax21, ax12, ax22)
FNAME = path.join('@EXAMPLE_DEST_DIR@', 'test_case_2_exp_data.dat')
# For some reason the input file is bad sortened
T,_,_,_,_,_,_,_,_,H3,H2,H1,H4, = readFile(FNAME)
exp_t = T
exp_h = (H1, H2, H3, H4)
titles = ('H1', 'H2', 'H3', 'H4')
for i, ax in enumerate(axes):
ax.plot(exp_t,
exp_h[i],
label=r'$H_{Exp}$',
color="red",
linewidth=1.0)
t = [0.0]
h = [0.0]
line, = ax.plot(t,
h,
label=r'$H_{SPH}$',
color="black",
linewidth=1.0)
lines.append(line)
# Set some options
ax.grid()
ax.legend(loc='best')
ax.set_title(titles[i])
ax.set_xlim(0, 6)
ax.set_ylim(0.0, 0.6)
ax.set_autoscale_on(False)
if i > 1:
ax.set_xlabel(r"$t \, [\mathrm{s}]$")
else:
plt.setp(ax.get_xticklabels(), visible=False)
if i in (0, 2):
ax.set_ylabel(r"$H \, [\mathrm{m}]$")
else:
plt.setp(ax.get_yticklabels(), visible=False)
update(0)
ani = animation.FuncAnimation(fig, update, interval=5000)
plt.show()
|
broad-well/aoc2016 | day19p1.py | Python | unlicense | 89 | 0.011236 | # Day 19 | of Advent of Code
i = 0
for o in range(1, 3012211):
i = (i+2)%o
pr | int(i+1)
|
klen/adrest | tests/rpc/api.py | Python | lgpl-3.0 | 1,511 | 0.000662 | from adrest.api import Api
from adrest.utils.auth import AnonimousAuthenticator
from adrest.utils.emitter import XMLEmitter, JSONTemplateEmitter
from adrest.views import ResourceView
from adrest.resources.rpc import RPCResource, JSONEmitter, JSONPEmitter
API = Api('1.0.0', api_rpc=True, emitters=XMLEmitter)
class TestAuth(AnonimousAuthenticator):
def authenticate(self, request):
return request.META.get('HTTP_AUTHORIZATION')
def configure(self, request):
self.resource.identifier = request.META.get('HTTP_AUTHORIZATION')
class TestResource(ResourceView):
class Meta:
allowed_methods = 'GET', 'POST', 'PUT'
model = 'rpc.test'
def get(self, request, **resources):
assert not 'error' in request.GET, "Custom error"
return True
class RootResource(ResourceView):
class Meta:
allowed_methods = 'GET', 'POST', 'PUT'
model = 'rpc.root'
@API.register
class ChildResource(ResourceView):
class Meta:
allowed_methods = 'GET', 'POST', 'PUT'
parent = RootResource
| model = 'rpc.child'
class CustomResource(ResourceView):
class Meta:
model = 'rpc.custom'
API.register(ChildResource)
API.register(CustomResource, emitters=JSONTemplateEmitter)
API.register(RootResource, authenticators=TestAuth)
API.register(RPCResource, url_regex=r'^rpc2$', url_name='rpc2',
scheme='tests.rpc.dummy', emitters=(JSONEmitter, JSONPEmitter))
API.register(TestResource)
# lint_ignor | e=C
|
tadashi-aikawa/gemini | jumeaux/addons/res2res/json.py | Python | mit | 2,536 | 0.001183 | # -*- coding:utf-8 -*-
import json
import sys
from importlib import import_module
from importlib.util import find_spec
from owlmixin import OwlMixin, TOption
from owlmixin.util import load_json
from jumeaux.addons.res2res import Res2ResExecutor
from jumeaux.logger import Logger
from jumeaux.models import Res2ResAddOnPayload, Response, Request
from jumeaux.utils import when_filter
logger: Logger = Logger(__name__)
LOG_PREFIX = "[res2res/json]"
def wrap(anything: bytes, encoding: str) -> str:
"""Use for example of Transformer.function
"""
return json.dumps({"wrap": load_json(anything.decode(encoding))}, ensure_ascii=False)
class Transformer(OwlMixin):
module: str
function: str = "transform"
class Config(OwlMixin):
transformer: Transformer
default_encoding: str = "utf8"
when: TOption[str]
class Executor(Res2ResExecutor):
def __init__(self, config: dict) -> None:
self.config: Config = Config.from_dict(config or {})
t: Transformer = self.config.transformer
try:
if not find_spec(t.module):
raise ModuleNotFoundError
except ModuleNotFoundError as e:
logger.error(f"{LOG_PREFIX} Module {t.module} is not existed.")
sys.exit(1)
try:
self.module = getattr(import_module(t.module), t.function)
except AttributeError as e:
logger.error(f"{LOG_PREFIX} {t.function} is not existed in {t.m | odule} module")
sys.exit(1)
def exec(self, payload: Res2ResAddOnPayload) -> Res2ResAddOnPayload:
req: Request = payload.req
res: Response = payload.response
if not self.config.when.map(lambda x: when_filter(x, {"req": req, "res": res})).get_or(
True
):
return payload
js | on_str: str = self.module(res.body, res.encoding.get())
new_encoding: str = res.encoding.get_or(self.config.default_encoding)
return Res2ResAddOnPayload.from_dict(
{
"response": {
"body": json_str.encode(new_encoding, errors="replace"),
"type": "json",
"encoding": new_encoding,
"headers": res.headers,
"url": res.url,
"status_code": res.status_code,
"elapsed": res.elapsed,
"elapsed_sec": res.elapsed_sec,
},
"req": req,
"tags": payload.tags,
}
)
|
jadnohra/connect | proto_2/ddq/fol/equality.py | Python | unlicense | 119 | 0 | from .predicate import Predicate
class Equality(Predicate):
def __init__(self): |
super().__init__('=' | , 2)
|
the-zebulan/CodeWars | tests/kyu_7_tests/test_sum_of_numbers_from_zero_to_n.py | Python | mit | 566 | 0 | import unittest
from katas.kyu_7.sum_of_numbers_from_zero_to_n import show_sequence
class ShowSequenceTestCase(unittest.TestCase):
def test_equal_1(self):
self.assertEqual(show_sequence(6), '0+1+2+3+4+5+6 = 21')
def test_equal_2(self):
self.assertEqual | (show_sequence(7), '0+1+2+3+4+5+6+7 = 28')
def test_equal_3(self):
self.assertEqual(show_sequence(0), '0=0')
def test_equal_4(self):
self.assertEqual(show_s | equence(-1), '-1<0')
def test_equal_5(self):
self.assertEqual(show_sequence(-10), '-10<0')
|
micvision/neo-sdk | neopy/neopy/__main__.py | Python | mit | 3,657 | 0.011485 | #!/usr/bin/env python
# coding=utf-8
import itertools, threading
import sys, math
import neopy
def main():
### USAGE: python -m neopy /dev/ttyACM0
if len(sys.argv) < 2:
sys.exit('python -m neopy /dev/ttyACM0')
dev = sys.argv[1]
### Named a device as neo_device
with neopy.neo(dev) as neo_device:
### Set motor speed: 5
neo_device.set_motor_speed(5)
### Get temp motor speed
# speed = neo_device.get_motor_speed()
### Print out speed
# print('Motor Speed: {} Hz'.format(speed))
### Device start scanning, and get data
neo_device.start_scanning()
### Print out the scan data
for scan in itertools.islice(neo_device.get_scans(), 10):
print(scan)
### Device stop scanning
neo_device.stop_scanning()
### Device set motor speed 0 to stop the motor
neo_device.set_motor_speed(0)
import wx
from matplotlib.figure import Figure
import matplotlib.font_manager as font_manager
import numpy as np
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas
TIMER_ID = wx.NewId()
x = [None] * 360
y = [None] * 360
class AngleCircle(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, style = wx.SYSTEM_MENU | wx.CAPTION | wx.CLOSE_BOX, title='Neo LiDAR demo(python)', size=(800, 800))
self.fig = Figure((8, 8), 100)
self.canvas = FigureCanvas(self, wx.ID_ANY, self.fig)
self.ax = self.fig.add_subplot(111)
self.ax.set_ylim([-10, 10])
self.ax.set_xlim([-10, 10])
self.ax.set_autoscale_on(False)
self.ax.set_xticks(range(-10, 11, 2))
self.ax.set_yticks(range(-10, 11, 2))
self.ax.grid(True)
# | self.datax = [None] * 360
# self.datay = [None] * 360
#for i in range(360):
#x[i] = np.random.randint(-40, 40)
#y[i] = np.random.randint(-40, 40)
#self.datax = np.random.randn(100)
#self.datay = np.random.randn(100)
self.draw_data, = self.ax.plot(x, y, '.', ms = 3.0, mec = 'RED')
| self.canvas.draw()
self.bg = self.canvas.copy_from_bbox(self.ax.bbox)
wx.EVT_TIMER(self, TIMER_ID, self.onTimer)
th.start()
def onTimer(self, evt):
global x, y
global flag
self.canvas.restore_region(self.bg)
#self.draw_data.set_data(x, y)
#self.draw_data.set_ydata(y)
self.ax.draw_artist(self.draw_data)
self.canvas.blit(self.ax.bbox)
class updateData(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
global x, y
while True:
scan = neo_device.get_scans()
x = [None] * 360
y = [None] * 360
for i, s in enumerate(scan.next()[0]):
if i >= 360: break
angle_rad = s.angle / 1000.0 * math.pi / 180.0
distance = s.distance / 100.0
x[i] = distance * math.cos(angle_rad)
y[i] = distance * math.sin(angle_rad)
# print "angle: ", angle_rad, " , distance: ", distance
frame.draw_data.set_data(x, y)
#self.datax[i] = np.random.randint(-40, 40)
#self.datay[i] = np.random.randint(-40, 40)
neo_device = neopy.neo('/dev/ttyACM0')
neo_device.set_motor_speed(5)
neo_device.start_scanning()
# threading
th = updateData()
# app
app = wx.App()
frame = AngleCircle()
t = wx.Timer(frame, TIMER_ID)
t.Start(50)
frame.Show()
app.MainLoop()
neo_device.stop_scanning()
neo_device.set_motor_speed(0)
# main()
#viewer()
|
dstrockis/outlook-autocategories | shared/auth.py | Python | apache-2.0 | 5,109 | 0.007046 | from models import User
from google.appengine.ext import ndb
from datetime import timedelta, datetime
import logging
import urllib
import urllib2
import socket
import os
import json
import config
from shared import config
import socket
isDevEnv = True if 'localhost' in socket.gethostname() else False
# Get AT from cache or RT
def get_access_token(userId):
key = ndb.Key('User', userId)
entry = key.get()
delta = timedelta(seconds=3590)
if (entry is not None
and entry.acquired is not None
and entry.access_token is not None
and (datetime.utcnow() - entry.acquired) < delta and delta):
return entry.access_token
if entry is not None and entry.refresh_token is not None:
logging.debug('Sending POST to /token')
url = 'https://login.microsoftonline.com/common/oauth2/token'
values = {
'refresh_token': entry.refresh_token,
'grant_type': 'refresh_token',
'redirect_uri': config.get('aad_local_redirect_uri') if isDevEnv else config.get('aad_prod_redirect_uri'),
'client_id': config.get('aad_client_id'),
'client_secret': config.get('aad_client_secret'),
'resource': 'https://outlook.office365.com'
}
data = urllib.urlencode(values)
req = urllib2.Request(url, data)
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
# Send /token request
try:
response = urllib2.urlopen(req)
except urllib2.HTTPError as ex:
logging.error('Error requesting token via refresh_token')
logging.error(ex.reason)
logging.error(ex.read())
raise | ex
# Parse token response
auth_resp = json.loads(response.read())
# Store tokens
logging.debug('Storing r | efresh_token in ndb')
entry.access_token = auth_resp['access_token']
entry.refresh_token = auth_resp['refresh_token']
entry.acquired = datetime.utcnow()
entry.put()
return auth_resp['access_token']
raise Exception('Unable to get an access_token, sign in required')
def redeem_auth_code_for_token(code):
# Construct /token request
logging.debug('Sending POST to /token')
url = 'https://login.microsoftonline.com/common/oauth2/token'
values = {
'code': code,
'grant_type': 'authorization_code',
'redirect_uri': config.get('aad_local_redirect_uri') if isDevEnv else config.get('aad_prod_redirect_uri'),
'client_id': config.get('aad_client_id'),
'client_secret': config.get('aad_client_secret'),
'resource': 'https://outlook.office365.com'
}
data = urllib.urlencode(values)
req = urllib2.Request(url, data)
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
# Send /token request
try:
response = urllib2.urlopen(req)
except urllib2.HTTPError as ex:
logging.error('Error requesting token via auth code')
logging.error(ex.reason)
logging.error(ex.read())
raise ex
# Parse token response
auth_resp = json.loads(response.read())
logging.info(auth_resp)
try:
url = 'https://outlook.office.com/api/v2.0/me'
req = urllib2.Request(url)
req.add_header('Authorization', 'Bearer {0}'.format(auth_resp['access_token']))
response = urllib2.urlopen(req)
except urllib2.HTTPError as ex:
logging.error('Error getting mailbox address')
logging.error(ex.reason)
logging.error(ex.read())
raise ex
me_resp = json.loads(response.read())
mailbox_address = me_resp['EmailAddress']
id_token_body = auth_resp['id_token'].split('.')[1]
i = 0
while i < (len(id_token_body)%4):
id_token_body += '='
profile = json.loads(id_token_body.decode('base64'))
# Store tokens
logging.debug('Storing refresh_token in ndb')
key = ndb.Key('User', profile['sub'])
entry = key.get()
if entry is None:
entry = User(
id=profile['sub'],
access_token=auth_resp['access_token'],
refresh_token=auth_resp['refresh_token'],
acquired=datetime.utcnow(),
mailbox_address=mailbox_address,
sort_threshold=0.20)
entry.put()
else:
entry.access_token=auth_resp['access_token']
entry.refresh_token=auth_resp['refresh_token']
entry.acquired=datetime.utcnow()
entry.mailbox_address=mailbox_address
entry.put()
return
def generate_auth_request():
loginUrl = ("https://login.microsoftonline.com/common/oauth2/authorize?"
"client_id=d797131e-b01d-4259-b35a-595672904e8b"
"&response_mode=query"
"&response_type=code"
"&prompt=consent"
"&redirect_uri=%(redirect_uri)s")
redirect_uri = config.get('aad_local_redirect_uri') if isDevEnv else config.get('aad_prod_redirect_uri')
return loginUrl % { 'redirect_uri': redirect_uri } |
bp-kelley/rdkit | rdkit/sping/PS/psmetrics.py | Python | bsd-3-clause | 17,605 | 0.009543 | # $Id$
# Christopher Lee clee@users.sourceforge.net
# based upon pdfmetrics.py by Andy Robinson
from . import fontinfo
from . import latin1MetricsCache
##############################################################
#
# PDF Metrics
# This is a preamble to give us a stringWidth function.
# loads and caches AFM files, but won't need to as the
# standard fonts are there already
##############################################################
_stdenc_widths = {
'courier':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 600, 600,
600, 0, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 600, 0,
600, 0, 0, 0, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 600, 0, 0, 0, 600, 0, 0, 600, 600, 600, 600,
0, 0, 600],
'courier-bold':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 600, 600,
600, 0, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 600, 0,
600, 0, 0, 0, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 600, 0, 0, 0, 600, 0, 0, 600, 600, 600, 600,
0, 0, 600],
'courier-boldoblique':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 600, 600,
600, 0, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 600, 0,
600, 0, 0, 0, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 600, 0, 0, 0, 600, 0, 0, 600, 600, 600, 600,
0, 0, 600],
'courier-oblique':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, | 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 60 | 0, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 600, 600,
600, 0, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 600, 0,
600, 0, 0, 0, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 600, 0, 0, 0, 600, 0, 0, 600, 600, 600, 600,
0, 0, 600],
'helvetica':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
278, 278, 355, 556, 556, 889, 667, 222, 333, 333, 389, 584, 278, 333, 278, 278, 556, 556, 556,
556, 556, 556, 556, 556, 556, 556, 278, 278, 584, 584, 584, 556, 1015, 667, 667, 722, 722, 667,
611, 778, 722, 278, 500, 667, 556, 833, 722, 778, 667, 778, 722, 667, 611, 722, 667, 944, 667,
667, 611, 278, 278, 278, 469, 556, 222, 556, 556, 500, 556, 556, 278, 556, 556, 222, 222, 500,
222, 833, 556, 556, 556, 556, 333, 500, 278, 556, 500, 722, 500, 500, 500, 334, 260, 334, 584, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 556, 556, 167, 556, 556, 556, 556, 191, 333, 556, 333, 333, 500, 500, 0, 556, 556, 556,
278, 0, 537, 350, 222, 333, 333, 556, 1000, 1000, 0, 611, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000,
0, 370, 0, 0, 0, 0, 556, 778, 1000, 365, 0, 0, 0, 0, 0, 889, 0, 0, 0, 278, 0, 0, 222, 611, 944,
611, 0, 0, 834],
'helvetica-bold':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
278, 333, 474, 556, 556, 889, 722, 278, 333, 333, 389, 584, 278, 333, 278, 278, 556, 556, 556,
556, 556, 556, 556, 556, 556, 556, 333, 333, 584, 584, 584, 611, 975, 722, 722, 722, 722, 667,
611, 778, 722, 278, 556, 722, 611, 833, 722, 778, 667, 778, 722, 667, 611, 722, 667, 944, 667,
667, 611, 333, 278, 333, 584, 556, 278, 556, 611, 556, 611, 556, 333, 611, 611, 278, 278, 556,
278, 889, 611, 611, 611, 611, 389, 556, 333, 611, 556, 778, 556, 556, 500, 389, 280, 389, 584, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 556, 556, 167, 556, 556, 556, 556, 238, 500, 556, 333, 333, 611, 611, 0, 556, 556, 556,
278, 0, 556, 350, 278, 500, 500, 556, 1000, 1000, 0, 611, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000,
0, 370, 0, 0, 0, 0, 611, 778, 1000, 365, 0, 0, 0, 0, 0, 889, 0, 0, 0, 278, 0, 0, 278, 611, 944,
611, 0, 0, 834],
'helvetica-boldoblique':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
278, 333, 474, 556, 556, 889, 722, 278, 333, 333, 389, 584, 278, 333, 278, 278, 556, 556, 556,
556, 556, 556, 556, 556, 556, 556, 333, 333, 584, 584, 584, 611, 975, 722, 722, 722, 722, 667,
611, 778, 722, 278, 556, 722, 611, 833, 722, 778, 667, 778, 722, 667, 611, 722, 667, 944, 667,
667, 611, 333, 278, 333, 584, 556, 278, 556, 611, 556, 611, 556, 333, 611, 611, 278, 278, 556,
278, 889, 611, 611, 611, 611, 389, 556, 333, 611, 556, 778, 556, 556, 500, 389, 280, 389, 584, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 556, 556, 167, 556, 556, 556, 556, 238, 500, 556, 333, 333, 611, 611, 0, 556, 556, 556,
278, 0, 556, 350, 278, 500, 500, 556, 1000, 1000, 0, 611, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0 |
thehq/python-branchio | tests/test_client.py | Python | mit | 5,185 | 0.002314 | import unittest
import branchio
import os
import mock
class ClientCheckParamTests(unittest.TestCase):
def setUp(self):
self.branch_client = branchio.Client(None)
self.params = {}
def test_optional(self):
self.branch_client._check_param("test", None, self.params)
self.assertFalse("test" in self.params)
with self.assertRaises(Exception) as context:
self.branch_client._check_param("test", None, self.params, optional=False)
def test_max_length(self):
self.branch_client._check_param("test", "test_length", self.params, max_length=50)
self.assertTrue("test" in self.params)
with self.assertRaises(Exception) as context:
self.branch_client._check_param("test", "test_length", self.params, max_length=5)
def test_type(self):
self.branch_client._check_param("test", "test_length", self.params, type=str)
self.assertTrue("test" in self.params)
with self.assertRaises(Exception) as context:
self.branch_client._check_param("test", "test_length", self.params, type=list)
def test_gte(self):
# Ignored unless it is an integer
self.branch_client._check_param("test", "string", self.params, gte=5)
self.assertTrue("test" in self.params)
self.params = {}
self.branch_client._check_param("test", 10, self.params, gte=5)
self.assertTrue("test" in self.params)
with self.assertRaises(Exception) as context:
self.branch_client._check_param("test", 10, self.params, gte=15)
def test_lte(self):
# Ignored unless it is an integer
self.branch_client._check_param("test", "string", self.params, lte=5)
self.assertTrue("test" in self.params)
self.params = {}
self.branch_client._check_param("test", 10, self.params, lte=15)
self.assertTrue("test" in self.params)
with self.assertRaises(Exception) as context:
self.branch_client._check_param("test", 10, self.params, lte=5)
def test_sub_type(self):
self.branch_client._check_param("test", ["something", "something else"], self.params, sub_type=str, sub_max_length=15)
self.assertTrue("test" in self.params)
with self.assertRaises(Exception) as context:
self.branch_client._check_param("test", ["something", "something else"], self.params, sub_type=str, sub_max_length=5)
def test_sub_type_no_name_or_params(self):
no_exception = True
try:
self.branch_client._check_param(["something", "something else"], type=list, sub_type=str)
except Exception:
no_exception = False
self.assertTrue(no_exception)
class ClientCheckApiTests(unittest.TestCase):
def get_client(self, return_value):
branch_key = os.environ.get('BRANCH_IO_KEY')
if branch_key is None:
print("WARNING! Environment variable 'BRANCH_IO_KEY' is not defined."
" Branch API Tests will return stubbed responses.")
branch_key = "FAKE KEY"
branchio.Client.make_api_call = mock.MagicMock(return_value=return_value)
return branchio.Client(branch_key)
def test_create_deep_link_skip_api(self):
client = self.get_client(None)
params = client.create_deep_link_url(
data={
branchio.DATA_BRANCH_IOS_URL: "https://www.google.com",
"custom": "payload"
},
channel="facebook",
skip_api_call=True
)
self.assertTrue("data" in params)
self.assertEqual(params["data"][branchio.DATA_BRANCH_IOS_URL], "https://www.google.com")
self.assertEqual(params["data"]["custom"], "payload")
self.assertEqual(params["channel"], "facebook")
def test_create_deep_link(self):
client = self.get_client({"url": "https://www.example.com/"})
response = client.create_deep_link_url(
data={
branchio.DATA_BRANCH_IOS_URL: "https://www.google.com",
"custom": "payload"
},
channel="facebook",
tags=["signup"]
)
self.assertTrue(branchio.RETURN_URL in response)
def test_create_deep_links(self):
client = self.get_client([{"url": "https://w | ww.example | .com/"}, {"url": "https://www.example.com/"}])
params1 = client.create_deep_link_url(
data={
branchio.DATA_BRANCH_IOS_URL: "https://www.google.com",
"custom": "payload"
},
channel="facebook",
tags=["signup"],
skip_api_call=True
)
params2 = client.create_deep_link_url(
data={
branchio.DATA_BRANCH_IOS_URL: "https://www.google.com",
"custom": "payload"
},
channel="facebook",
tags=["signup"],
skip_api_call=True
)
response = client.create_deep_linking_urls([params1, params2])
self.assertTrue(branchio.RETURN_URL in response[0])
self.assertTrue(branchio.RETURN_URL in response[1])
|
avagin/p.haul | p_haul_img.py | Python | lgpl-2.1 | 2,640 | 0.030682 | #
# images driver for migration (without FS transfer)
#
import os
import tempfile
import rpyc
import tarfile
import time
import shutil
import time
img_path = "/var/local/p.haul-fs/"
img_tarfile = "images.tar"
xfer_size = 64 * 1024
def copy_file(s, d):
while True:
chunk = s.read(xfer_size)
if not chunk:
break
d.write(chunk)
class phaul_images:
def __init__(self):
self.current_iter = 0
self.current_dir = None
prefix = time.strftime("%y.%m.%d-%H.%M-", time.localtime())
self.wdir = tempfile.mkdtemp("", prefix, img_path)
self.img_path = os.path.join(self.wdir, "img")
os.mkdir(self.img_path)
self.sync_time = 0.0
def close(self, keep_images):
if not keep_images:
print "Removing images"
shutil.rmtree(self.wdir)
else:
print "Images are kept in %s" % self.wdir
pass
def img_sync_time(self):
return self.sync_time
def new_image_dir(self):
self.current_iter += 1
img_dir = "%s/%d" % (self.img_path, self.current_iter)
print "\tMaking directory %s" % img_dir
self.current_dir = img_dir
os.mkdir(img_dir)
def image_dir_fd(self):
return os.open(self.current_dir, os.O_DIRECTORY)
def work_dir_fd(self):
return os.open(self.wdir, os.O_DIRECTORY)
def image_dir(self):
return self.current_dir
def work_dir(self):
return self.wdir
def prev_image_dir(self):
if self.current_iter == 1:
return None
else:
return "../%d" % (self.current_iter - 1)
# Images transfer
# Are there better ways for doing this?
def sync_imgs_to_target(self, th, htype):
# Pre-dump doesn't generate a | ny images (yet?)
# so copy only those from the top dir
print "Sending images to target"
start = time.time()
print "\tPack"
tf_name = os.path.join(self.current_dir, img_tarfile)
tf = tarfile.open(tf_name, "w")
for img in os.listdir(self.current_dir):
if img.endswith(".img"):
tf.add(os.path.join(self.current_dir, img), img)
print "\tAdd htype images"
for | himg in htype.get_meta_images(self.current_dir):
tf.add(himg[0], himg[1])
tf.close()
print "\tCopy"
lfh = open(tf_name, "rb")
os.unlink(tf_name)
rfh = th.open_image_tar()
copy_file(lfh, rfh)
print "\tUnpack"
rfh.unpack_and_close()
self.sync_time = time.time() - start
# This one is created by target
class exposed_images_tar():
def __init__(self, dir):
self.dir = dir
self.fname = os.path.join(dir, img_tarfile)
self.fh = open(self.fname, "wb")
def exposed_write(self, chunk):
return self.fh.write(chunk)
def exposed_unpack_and_close(self):
self.fh.close()
tf = tarfile.open(self.fname, "r")
os.unlink(self.fname)
tf.extractall(self.dir)
tf.close()
|
kargakis/test-infra | experiment/fix_testgrid_config.py | Python | apache-2.0 | 2,759 | 0.000725 | #!/usr/bin/env python
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This script maps prow jobs to user readable testgrid tab names
Run this script when set up jobs for new releases
usage:
1. update the replace mapping in MAP
2. bazel run //experiment:fix_testgrid_config
3. if some testgroup name doesn't make sense, just manually edit the change.
"""
import argparse
import ruamel.yaml as yaml
MAP = {
"-beta": "-1.13",
"-stable1": "-1.12",
"-stable2": "-1.11",
"-stable3": "-1.10",
"-k8sbeta": "-1.13",
"-k8sstable1": "-1.12",
"-k8sstable2": "-1.11",
"-k8sstable3": "-1.10",
"-1-12": "-1.12",
"-1-11": "-1.11",
"-1-10": "-1.10",
"-1-9": "-1.9",
"ci-cri-": "",
"ci-kubernetes-": "",
"e2e-": "",
"periodic-kubernetes-": "periodic-",
}
DASHBOARD_PREFIX = [
"google-aws",
"google-gce",
"google-gke",
"google-unit",
"sig-cluster-lifecycle-all",
"sig-cluster-lifecycle-kops",
"sig-cluster-lifecycle-upgrade-skew",
"sig-gcp-release-1.",
"sig-instrumentation",
"sig-release-1.",
"sig-network-gce",
"sig-network-gke",
"sig-node-cri-1.",
"sig-node-kubelet",
"sig-release-master-upgrade",
"sig-scalability-gce",
]
def main(testgrid):
"""/shrug."""
with open(testgrid) as fp:
config = yaml.round_trip_load(fp)
for dashboard in config['dashboards']:
if any(prefix in dashboard['name'] for prefix in DASHBOAR | D_PREFIX):
for tab in dashboard['dashboard_tab']:
name = tab['test_group_name']
for key, val in MAP.iteritems():
name = name.replace(key, val)
tab['name'] = name
# write out yaml
with open(testgrid, 'w') as fp:
yaml.dump(
config, fp, Dumper=yaml.RoundTripDumper, width=float("inf"))
fp.write('\n')
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Hack t | estgrid configs')
PARSER.add_argument(
'--testgrid-config',
default='./testgrid/config.yaml',
help='Path to testgrid/config.yaml')
ARGS = PARSER.parse_args()
main(ARGS.testgrid_config)
|
johnstonskj/PyDL7 | setup.py | Python | mit | 915 | 0 | from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(
name='PyDL7',
version='0.0.3',
description='Python API for parsing DAN Dive Log files.',
long_description=readme(),
author='Simon Johnston',
author_email='johnstonskj@gmail.com',
| download_url='https://pypi.python.org/pypi/PyDL7',
url='https://github.com/johnstonskj/PyDL7',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
| ],
packages=['divelog'],
setup_requires=['pytest-runner'],
tests_require=[
'pytest',
'pytest-cov',
'pytest-catchlog',
'pytest-pep8'
],
entry_points={
'console_scripts': [
'dl7dump=divelog.command_line:main',
],
}
)
|
tschmorleiz/amcat | amcat/models/coding/codedarticle.py | Python | agpl-3.0 | 12,691 | 0.003703 | ###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
import collections
from functools import partial
from django.db import models, transaction, connection, IntegrityError
import logging
from django.db.models import sql
import itertools
from amcat.models.coding.codingschemafield import CodingSchemaField
from amcat.models.coding.coding import CodingValue, Coding
from amcat.tools.model import AmcatModel
log = logging.getLogger(__name__)
STATUS_NOTSTARTED, STATUS_INPROGRESS, STATUS_COMPLETE, STATUS_IRRELEVANT = 0, 1, 2, 9
class CodedArticleStatus(AmcatModel):
id = models.IntegerField(primary_key=True, db_column='status_id')
label = models.CharField(max_length=50)
class Meta():
db_table = 'coded_article_status'
app_label = 'amcat'
def _to_coding(coded_article, coding):
"""
Takes a dictionary with keys 'sentence_id', 'start', 'end', and creates
an (unsaved) Coding object.
@type codingjob: CodingJob
@type article: Article
@type coding: dict
"""
return Coding(
coded_article=coded_article, sentence_id=coding.get("sentence_id"),
start=coding.get("start"), end=coding.get("end")
)
def _to_codingvalue(coding, codingvalue):
"""
Takes a dictionary with keys 'codingschemafield_id', 'intval', 'strval' and creates
an (unsaved) CodingValue object.
@type coding: Coding
@type codingvalue: dict
"""
return CodingValue(
field_id=codingvalue.get("codingschemafield_id"),
intval=codingvalue.get("intval"),
strval=codingvalue.get("strval"),
coding=coding
)
def _to_codingvalues(coding, values):
"""
Takes an iterator with codingvalue dictionaries (see _to_coding) and a coding,
and retu | rns an iterator with CodingValue's.
"""
return map(partial(_to_codingvalue, coding), values)
class CodedArticle(models.Model):
"""
A CodedArticle is an article in a context of two other objects: a codingjob and an
article. It exist for e | very (codingjob, article) in {codingjobs} X {codingjobarticles}
and is created when creating a codingjob (see `create_coded_articles` in codingjob.py).
Each coded article contains codings (1:N) and each coding contains codingvalues (1:N).
"""
comments = models.TextField(blank=True, null=True)
status = models.ForeignKey(CodedArticleStatus, default=STATUS_NOTSTARTED)
article = models.ForeignKey("amcat.Article", related_name="coded_articles")
codingjob = models.ForeignKey("amcat.CodingJob", related_name="coded_articles")
def __unicode__(self):
return "Article: {self.article}, Codingjob: {self.codingjob}".format(**locals())
def set_status(self, status):
"""Set the status of this coding, deserialising status as needed"""
if type(status) == int:
status = CodedArticleStatus.objects.get(pk=status)
self.status = status
self.save()
def get_codings(self):
"""Returns a generator yielding tuples (coding, [codingvalues])"""
codings = Coding.objects.filter(coded_article=self)
values = CodingValue.objects.filter(coding__in=codings)
values_dict = collections.defaultdict(list)
for value in values:
values_dict[value.coding_id].append(value)
for coding in codings:
yield (coding, values_dict[coding.id])
def _replace_codings(self, new_codings):
# Updating tactic: delete all existing codings and codingvalues, then insert
# the new ones. This prevents calculating a delta, and confronting the
# database with (potentially) many update queries.
CodingValue.objects.filter(coding__coded_article=self).delete()
Coding.objects.filter(coded_article=self).delete()
new_coding_objects = map(partial(_to_coding, self), new_codings)
# Saving each coding is pretty inefficient, but Django doesn't allow retrieving
# id's when using bulk_create. See Django ticket #19527.
if connection.vendor == "postgresql":
query = sql.InsertQuery(Coding)
query.insert_values(Coding._meta.fields[1:], new_coding_objects)
raw_sql, params = query.sql_with_params()[0]
new_coding_objects = Coding.objects.raw("%s %s" % (raw_sql, "RETURNING coding_id"), params)
else:
# Do naive O(n) approach
for coding in new_coding_objects:
coding.save()
coding_values = itertools.chain.from_iterable(
_to_codingvalues(co, c["values"]) for c, co in itertools.izip(new_codings, new_coding_objects)
)
return (new_coding_objects, CodingValue.objects.bulk_create(coding_values))
def replace_codings(self, coding_dicts):
"""
Creates codings and replace currently existing ones. It takes one parameter
which has to be an iterator of dictionaries with each dictionary following
a specific format:
{
"sentence_id" : int,
"start" : int,
"end" : int,
"values" : [CodingDict]
}
with CodingDict being:
{
"codingschemafield_id" : int,
"intval" : int / NoneType,
"strval" : str / NoneType
}
@raises IntegrityError: codingschemafield_id is None
@raises ValueError: intval == strval == None
@raises ValueError: intval != None and strval != None
@returns: ([Coding], [CodingValue])
"""
coding_dicts = tuple(coding_dicts)
values = tuple(itertools.chain.from_iterable(cd["values"] for cd in coding_dicts))
if any(v.get("intval") == v.get("strval") == None for v in values):
raise ValueError("intval and strval cannot both be None")
if any(v.get("intval") is not None and v.get("strval") is not None for v in values):
raise ValueError("intval and strval cannot both be not None")
schemas = (self.codingjob.unitschema_id, self.codingjob.articleschema_id)
fields = CodingSchemaField.objects.filter(codingschema__id__in=schemas)
field_ids = set(fields.values_list("id", flat=True)) | {None}
if any(v.get("codingschemafield_id") not in field_ids for v in values):
raise ValueError("codingschemafield_id must be in codingjob")
with transaction.atomic():
return self._replace_codings(coding_dicts)
class Meta():
db_table = 'coded_articles'
app_label = 'amcat'
unique_together = ("codingjob", "article")
###########################################################################
# U N I T T E S T S #
###########################################################################
from amcat.tool |
shanezhiu/pyspider | pyspider/database/mysql/projectdb.py | Python | apache-2.0 | 2,401 | 0.001249 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-07-17 21:06:43
import time
import mysql.connector
from pyspider.database.base.projectdb import ProjectDB as BaseProjectDB
from pyspider.database.basedb import BaseDB
from .mysqlbase import MySQLMixin
class ProjectDB(MySQLMixin, BaseProjectDB, BaseDB):
__tablename__ = 'projectdb'
def __init__(self, host='localhost', port=3306, database='projectdb',
user='root', passwd=None):
self.database_name = database
self.conn = mysql.connector.connect(user=user, password=passwd,
host=host, port=port, autocommit=True)
if database not in [x[0] for x in self._execute('show databases')]:
self._execute('CREATE DATABASE %s' % self.escape(database))
self.conn.database = database
self._execute('''CREATE TABLE IF NOT EXISTS %s (
`name` varchar(64) PRIMARY KEY,
`group` varchar(64),
`status` varchar(16),
`script` TEXT,
`comments` varchar(1024),
`rate` float(11, 4),
`burst` float(11, 4),
`updatetime` double(16, 4)
) ENGINE=MyISAM CHARSET=utf8''' % self.escape(self.__tablename__))
def insert(self, name, obj={}):
obj = dict(obj)
obj['name'] = name
obj['updatetime'] = time.time()
return self._insert(**obj)
def update(self, name, obj={}, **kwargs):
obj = dict(obj)
obj.update(kwargs)
obj['updatetime'] = time.time()
ret = self._update(where="`name` = %s" % self.placeholder, where_values=(name, ), **obj)
return ret.rowcount
def get_all(self, fields=None):
return self._select2dic(what=fi | elds)
def get(self, name, fields=None):
where = "`name` = %s" % self.placeholder
for each in self._select2dic(what=fields, where=where, where_values=(name, )):
return each
return None
def drop(self, name):
where = "`name` = %s" % self.placeholder
return self._delete(where=where, where_values=(name, ))
|
def check_update(self, timestamp, fields=None):
where = "`updatetime` >= %f" % timestamp
return self._select2dic(what=fields, where=where)
|
tdi/cfn-inspect | cfn_inspect/__main__.py | Python | mit | 58 | 0 | from .cli import cli
if __n | ame__ == "__main__":
cli() | |
steveb/heat | heat/tests/engine/service/test_stack_resources.py | Python | apache-2.0 | 32,282 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_messaging.rpc import dispatcher
import six
from heat.common import exception
from heat.common import identifier
from heat.engine.clients.os import keystone
from heat.engine import dependencies
from heat.engine import resource as res
from heat.engine import service
from heat.engine import stack
from heat.engine import stack_lock
from heat.engine import template as templatem
from heat.objects import stack as stack_object
from heat.tests import common
from heat.tests.engine import tools
from heat.tests import fakes as test_fakes
from heat.tests import generic_resource as generic_rsrc
from heat.tests import utils
policy_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "alarming",
"Resources" : {
"WebServerScaleDownPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : "",
"Cooldown" : "60",
"ScalingAdjustment" : "-1"
}
},
"Random" : {
"Type" : "OS::Heat::RandomString"
}
}
}
'''
class StackResourcesServiceTest(common.HeatTestCase):
def setUp(self):
super(StackResourcesServiceTest, self).setUp()
self.ctx = utils.dummy_context(tenant_id='stack_resource_test_tenant')
self.eng = service.EngineService('a-host', 'a-topic')
self.eng.thread_group_mgr = tools.DummyThreadGroupManager()
self.eng.engine_id = 'engine-fake-uuid'
cfg.CONF.set_default('heat_stack_user_role', 'stack_user_role')
@mock.patch.object(stack.Stack, 'load')
def _test_describe_stack_resource(self, mock_load):
mock_load.return_value = self.stack
# Patch _resolve_all_attributes or it tries to call novaclient
self.patchobject(res.Resource, '_resolve_all_attributes',
return_value=None)
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertIn('resource_identity', r)
self.assertIn('description', r)
self.assertIn('updated_time', r)
self.assertIn('stack_identity', r)
self.assertIsNotNone(r['stack_identity'])
self.assertIn('stack_name', r)
self.assertEqual(self.stack.name, r['stack_name'])
self.assertIn('metadata', r)
self.assertIn('resource_status', r)
self.assertIn('resource_status_reason', r)
self.assertIn('resource_type', r)
self.assertIn('physical_resource_id', r)
self.assertIn('resource_name', r)
self.assertIn('attributes', r)
self.assertEqual('WebServer', r['resource_name'])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@tools.stack_context('service_stack_resource_describe__test_stack')
def test_stack_resource_describe(self):
self._test_describe_stack_resource()
@mock.patch.object(service.EngineService, '_get_stack')
def test_stack_resource_describe_nonexist_stack(self, mock_get):
non_exist_identifier = identifier.HeatIdentifier(
self.ctx.tenant_id, 'wibble',
'18d06e2e-44d3-4bef-9fbf-52480d604b02')
mock_get.side_effect = exception.EntityNotFound(
entity='Stack', name='test')
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.describe_stack_resource,
self.ctx, non_exist_identifier, 'WebServer')
self.assertEqual(exception.EntityNotFound, ex.exc_info[0])
mock_get.assert_called_once_with(self.ctx, non_exist_identifier)
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resource_describe_nonexist_test_stack')
def test_stack_resource_describe_nonexist_resource(self, mock_load):
mock_load.return_value = self.stack
ex = self.assert | Raises(dispatcher.ExpectedException,
self.eng.describe_stack_resource,
self.ctx, self.stack.identifier(), 'foo')
self.assertEqual(exception.ResourceNotFound, ex.exc_info[0])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@tools.stack_context('service_resou | rce_describe_noncreated_test_stack',
create_res=False)
def test_stack_resource_describe_noncreated_resource(self):
self._test_describe_stack_resource()
@mock.patch.object(service.EngineService, '_authorize_stack_user')
@tools.stack_context('service_resource_describe_user_deny_test_stack')
def test_stack_resource_describe_stack_user_deny(self, mock_auth):
self.ctx.roles = [cfg.CONF.heat_stack_user_role]
mock_auth.return_value = False
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.describe_stack_resource,
self.ctx, self.stack.identifier(), 'foo')
self.assertEqual(exception.Forbidden, ex.exc_info[0])
mock_auth.assert_called_once_with(self.ctx, mock.ANY, 'foo')
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_describe_test_stack')
def test_stack_resources_describe(self, mock_load):
mock_load.return_value = self.stack
resources = self.eng.describe_stack_resources(self.ctx,
self.stack.identifier(),
'WebServer')
self.assertEqual(1, len(resources))
r = resources[0]
self.assertIn('resource_identity', r)
self.assertIn('description', r)
self.assertIn('updated_time', r)
self.assertIn('stack_identity', r)
self.assertIsNotNone(r['stack_identity'])
self.assertIn('stack_name', r)
self.assertEqual(self.stack.name, r['stack_name'])
self.assertIn('resource_status', r)
self.assertIn('resource_status_reason', r)
self.assertIn('resource_type', r)
self.assertIn('physical_resource_id', r)
self.assertIn('resource_name', r)
self.assertEqual('WebServer', r['resource_name'])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_describe_no_filter_test_stack')
def test_stack_resources_describe_no_filter(self, mock_load):
mock_load.return_value = self.stack
resources = self.eng.describe_stack_resources(
self.ctx, self.stack.identifier(), None)
self.assertEqual(1, len(resources))
r = resources[0]
self.assertIn('resource_name', r)
self.assertEqual('WebServer', r['resource_name'])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@mock.patch.object(service.EngineService, '_get_stack')
def test_stack_resources_describe_bad_lookup(self, mock_get):
mock_get.side_effect = TypeError
self.assertRaises(TypeError,
self.eng.describe_stack_resources,
self.ctx, None, 'WebServer')
mock_get.assert_called_once_with(self.ctx, None)
def test_stack_resources_describe_nonexist_stack(self):
non_exist_identifier = identifier.HeatIdentifier(
self.ctx.tenant_id, 'wibble',
'18d06e2e-44d3-4bef-9fbf-52480d604b02')
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.describe_stack_resources,
|
flumotion-mirror/flumotion | flumotion/worker/checks/gst010.py | Python | lgpl-2.1 | 8,378 | 0.000716 | # -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
import gobject
import gst
import gst.interfaces
from twisted.internet.threads import deferToThread
from twisted.internet import defer
from flumotion.common import gstreamer, errors, log, messages
from flumotion.common.i18n import N_, gettexter
from flumotion.twisted import defer as fdefer
from flumotion.worker.checks import check
__version__ = "$Rev$"
T_ = gettexter()
class BusResolution(fdefer.Resolution):
pipeline = None
signal_id = None
def cleanup(self):
if self.pipeline:
if self.signal_id:
self.pipeline.get_bus().remove_signal_watch()
self.pipeline.get_bus().disconnect(self.signal_id)
self.signal_id = None
self.pipeline.set_state(gst.STATE_NULL)
self.pipeline = None
def do_element_check(pipeline_str, element_name, check_proc, state=None,
set_state_deferred=False):
"""
Parse the given pipeline and set it to the given state.
When the bin reaches that state, perform the given check function on the
element with the given name.
@param pipeline_str: description of the pipeline used to test
@param element_name: name of the element being checked
@param check_proc: a function to call with the GstElement as argument.
@param state: an unused keyword parameter that will be removed when
support for GStreamer 0.8 is dropped.
@param set_state_deferred: a flag to say whether the set_state is run in
a deferToThread
@type | set_state_deferred: bool
@returns: a deferred that will fire with the result of check_proc, or
fail.
@rtype: L{twisted.internet.defer.Deferred}
"""
def run_check(pipeline, resolution):
element = pipeline.get_by_name(element_name)
try:
retval = check_proc(element)
resolution.callback(retval)
except check.Chec | kProcError, e:
log.debug('check', 'CheckProcError when running %r: %r',
check_proc, e.data)
resolution.errback(errors.RemoteRunError(e.data))
except Exception, e:
log.debug('check', 'Unhandled exception while running %r: %r',
check_proc, e)
resolution.errback(errors.RemoteRunError(
log.getExceptionMessage(e)))
# set pipeline state to NULL so worker does not consume
# unnecessary resources
pipeline.set_state(gst.STATE_NULL)
def message_rcvd(bus, message, pipeline, resolution):
t = message.type
if t == gst.MESSAGE_STATE_CHANGED:
if message.src == pipeline:
old, new, pending = message.parse_state_changed()
if new == gst.STATE_PLAYING:
run_check(pipeline, resolution)
elif t == gst.MESSAGE_ERROR:
gerror, debug = message.parse_error()
# set pipeline state to NULL so worker does not consume
# unnecessary resources
pipeline.set_state(gst.STATE_NULL)
resolution.errback(errors.GStreamerGstError(
message.src, gerror, debug))
elif t == gst.MESSAGE_EOS:
resolution.errback(errors.GStreamerError(
"Unexpected end of stream"))
else:
log.debug('check', 'message: %s: %s:' % (
message.src.get_path_string(),
message.type.value_nicks[1]))
if message.structure:
log.debug('check', 'message: %s' %
message.structure.to_string())
else:
log.debug('check', 'message: (no structure)')
return True
resolution = BusResolution()
log.debug('check', 'parsing pipeline %s' % pipeline_str)
try:
pipeline = gst.parse_launch(pipeline_str)
log.debug('check', 'parsed pipeline %s' % pipeline_str)
except gobject.GError, e:
resolution.errback(errors.GStreamerError(e.message))
return resolution.d
bus = pipeline.get_bus()
bus.add_signal_watch()
signal_id = bus.connect('message', message_rcvd, pipeline, resolution)
resolution.signal_id = signal_id
resolution.pipeline = pipeline
log.debug('check', 'setting state to playing')
if set_state_deferred:
d = deferToThread(pipeline.set_state, gst.STATE_PLAYING)
def stateChanged(res):
return resolution.d
d.addCallback(stateChanged)
return d
else:
pipeline.set_state(gst.STATE_PLAYING)
return resolution.d
def check1394(mid, guid):
"""
Probe the firewire device.
Return a deferred firing a result.
The result is either:
- succesful, with a None value: no device found
- succesful, with a dictionary of width, height, and par as a num/den pair
- failed
@param mid: the id to set on the message.
@param guid: the id of the selected device.
@rtype: L{twisted.internet.defer.Deferred} of
L{flumotion.common.messages.Result}
"""
result = messages.Result()
def do_check(demux):
pad = demux.get_pad('video')
if not pad or pad.get_negotiated_caps() == None:
raise errors.GStreamerError('Pipeline failed to negotiate?')
caps = pad.get_negotiated_caps()
s = caps.get_structure(0)
w = s['width']
h = s['height']
par = s['pixel-aspect-ratio']
# FIXME: not a good idea to reuse the result name which
# also exists in the parent context.
# pychecker should warn; however it looks like
# the parent result doesn't get stored as name,
# but instead with STORE_DEREF
result = dict(width=w, height=h, par=(par.num, par.denom))
log.debug('check', 'returning dict %r' % result)
return result
pipeline = \
'dv1394src guid=%s ! dvdemux name=demux .video ! fakesink' % guid
d = do_element_check(pipeline, 'demux', do_check)
def errbackResult(failure):
log.debug('check', 'returning failed Result, %r' % failure)
m = None
if failure.check(errors.GStreamerGstError):
source, gerror, debug = failure.value.args
log.debug('check', 'GStreamer GError: %s (debug: %s)' % (
gerror.message, debug))
if gerror.domain == "gst-resource-error-quark":
if gerror.code == int(gst.RESOURCE_ERROR_NOT_FOUND):
# dv1394src was fixed after gst-plugins-good 0.10.2
# to distinguish NOT_FOUND and OPEN_READ
version = gstreamer.get_plugin_version('1394')
if version >= (0, 10, 0, 0) and version <= (0, 10, 2, 0):
m = messages.Error(T_(
N_("Could not find or open the Firewire device. "
"Check the device node and its permissions.")))
else:
m = messages.Error(T_(
N_("No Firewire device found.")))
elif gerror.code == int(gst.RESOURCE_ERROR_OPEN_READ):
m = messages.Error(T_(
N_("Could not open Firewire device for reading. "
"Check permissions on the device.")))
if not m:
m = check.handleGStreamerDeviceError(failure, 'Firewire',
mid=mid)
if not m:
m = messages.Error(T_(N_("Could not probe Firewire device.")),
|
therewillbecode/ichnaea | ichnaea/models/observation.py | Python | apache-2.0 | 6,285 | 0 | import operator
import colander
import mobile_codes
from ichnaea import geocalc
from ichnaea.models.base import (
CreationMixin,
ValidationMixin,
ValidPositionSchema,
)
from ichnaea.models.cell import (
decode_radio_dict,
encode_radio_dict,
CellKeyPsc,
ValidCellKeySchema,
ValidCellSignalSchema,
)
from ichnaea.models import constants
from ichnaea.models.hashkey import (
HashKey,
HashKeyMixin,
)
from ichnaea.models.schema import (
DefaultNode,
)
from ichnaea.models.wifi import (
WifiMacNode,
ValidWifiSignalSchema,
)
class WifiKey(HashKey):
_fields = ('key', )
class ValidReportSchema(ValidPositionSchema):
"""A schema which validates the fields present in a report."""
accuracy = DefaultNode(
colander.Float(), missing=None, validator=colander.Range(
0, constants.MAX_ACCURACY))
altitude = DefaultNode(
colander.Float(), missing=None, validator=colander.Range(
constants.MIN_ALTITUDE, constants.MAX_ALTITUDE))
altitude_ | accuracy = DefaultNode(
colander.Float(), missing=None, validator=colander.Range(
0, constants.MAX_ALTITUDE_ACCURACY))
heading = DefaultNode( |
colander.Float(), missing=None, validator=colander.Range(
0, constants.MAX_HEADING))
speed = DefaultNode(
colander.Float(), missing=None, validator=colander.Range(
0, constants.MAX_SPEED))
def validator(self, node, cstruct):
super(ValidReportSchema, self).validator(node, cstruct)
for field in ('lat', 'lon'):
if (cstruct[field] is None or
cstruct[field] is colander.null): # pragma: no cover
raise colander.Invalid(node, 'Report %s is required.' % field)
class Report(HashKey, CreationMixin, ValidationMixin):
_valid_schema = ValidReportSchema()
_fields = (
'lat',
'lon',
'accuracy',
'altitude',
'altitude_accuracy',
'heading',
'speed',
)
def _to_json_value(self):
# create a sparse representation of this instance
dct = {}
for field in self._fields:
value = getattr(self, field, None)
if value is not None:
dct[field] = value
return dct
@classmethod
def combine(cls, *reports):
values = {}
for report in reports:
values.update(report.__dict__)
return cls(**values)
class ValidCellReportSchema(ValidCellKeySchema, ValidCellSignalSchema):
"""A schema which validates the cell specific fields in a report."""
def validator(self, node, cstruct):
super(ValidCellReportSchema, self).validator(node, cstruct)
for field in ('radio', 'mcc', 'mnc', 'lac', 'cid'):
if (cstruct[field] is None or
cstruct[field] is colander.null):
raise colander.Invalid(node, 'Cell %s is required.' % field)
class CellReport(HashKey, HashKeyMixin, CreationMixin, ValidationMixin):
_hashkey_cls = CellKeyPsc
_valid_schema = ValidCellReportSchema()
_fields = (
'radio',
'mcc',
'mnc',
'lac',
'cid',
'psc',
'asu',
'signal',
'ta',
)
def better(self, other):
"""Is self better than the other?"""
comparators = [
('ta', operator.lt),
('signal', operator.gt),
('asu', operator.gt),
]
for field, better_than in comparators:
old_value = getattr(self, field, None)
new_value = getattr(other, field, None)
if (None not in (old_value, new_value) and
better_than(old_value, new_value)):
return True
return False
@classmethod
def _from_json_value(cls, value):
value = decode_radio_dict(value)
return super(CellReport, cls)._from_json_value(value)
def _to_json_value(self):
dct = super(CellReport, self)._to_json_value()
dct = encode_radio_dict(dct)
return dct
class ValidCellObservationSchema(ValidCellReportSchema, ValidReportSchema):
"""A schema which validates the fields present in a cell observation."""
def validator(self, node, cstruct):
super(ValidCellObservationSchema, self).validator(node, cstruct)
in_country = False
for code in mobile_codes.mcc(str(cstruct['mcc'])):
in_country = in_country or geocalc.country_matches_location(
cstruct['lat'], cstruct['lon'], code.alpha2, 1)
if not in_country:
raise colander.Invalid(node, (
'Lat/lon must be inside one of '
'the bounding boxes for the MCC'))
class CellObservation(CellReport, Report):
_valid_schema = ValidCellObservationSchema()
_fields = CellReport._fields + Report._fields
class ValidWifiReportSchema(ValidWifiSignalSchema):
"""A schema which validates the wifi specific fields in a report."""
key = WifiMacNode(colander.String())
def validator(self, node, cstruct):
super(ValidWifiReportSchema, self).validator(node, cstruct)
if (cstruct['key'] is None or
cstruct['key'] is colander.null): # pragma: no cover
raise colander.Invalid(node, 'Wifi key is required.')
class WifiReport(HashKey, HashKeyMixin, CreationMixin, ValidationMixin):
_hashkey_cls = WifiKey
_valid_schema = ValidWifiReportSchema()
_fields = (
'key',
'channel',
'signal',
'snr',
)
def better(self, other):
"""Is self better than the other?"""
old_value = getattr(self, 'signal', None)
new_value = getattr(other, 'signal', None)
if (None not in (old_value, new_value) and
old_value > new_value):
return True
return False
@property
def mac(self):
# BBB: alias
return self.key
class ValidWifiObservationSchema(ValidWifiReportSchema, ValidReportSchema):
"""A schema which validates the fields in wifi observation."""
class WifiObservation(WifiReport, Report):
_valid_schema = ValidWifiObservationSchema()
_fields = WifiReport._fields + Report._fields
|
Etenil/anvil | anvillib/avatar.py | Python | mit | 1,254 | 0.00319 | from hashlib import md5
from urllib import urlencode
import common
import httplib
import re
import config
def url_exists(site, path):
try:
conn = httplib.HTTPConnection(site)
conn.request('HEAD', path)
response = conn.getresponse()
conn.close()
return response.status == 200
except:
return False
def pavatar(url, avatar="pavatar.png"):
if not re.match("^http://.+", str(url)):
return False
sep = url.find('/', 8)
host = ""
path = ""
if sep == -1:
host = url[7:]
else:
| host = url[7:sep]
path = url[sep:]
if not path.endswith('/'):
path += "/"
path += avatar
if url_exists(host, path):
return "http://" + host + path
else:
return False
def gravatar(email):
url = "http://www.gravatar.com/avatar.php?"
url += urlencode({'gravatar_id': md5(email.lower()).hexdigest(),
'size': str(80)})
return url
def avatar(url, email):
avatar = pavatar(url)
if no | t avatar:
avatar = gravatar(email)
return avatar
def logo(url):
logo = pavatar(url, "logo.png")
if not logo:
logo = config.prefix + "/static/img/project.png"
return logo
|
shahsaifi/handystuff | wc.py | Python | unlicense | 388 | 0.03866 | imp | ort sys
def linecount(filename):
return len(open(filename).readlines())
def wordcount(filename):
return len(open(filename).read().split())
def charcount(filename):
return len(open(filename).read())
def main():
filename = sys.argv[1]
print linecount(filename), wordcount(filename), charcount(filename), file | name
if __name__ == "__main__":
main()
|
trep/opentrep | test/i18n/utf8/pyutf8.py | Python | lgpl-2.1 | 263 | 0.007605 | #!/usr/bin/python
charList = [ '\xd | 9', '\x83', '\xd8', '\xa7', '\xd9', '\x81', '\x20', '\xd8',
'\xa7', '\xd9', '\x84', '\xd8', '\xac', '\xd8', '\xa7', '\xd8',
'\xb9', '\x00' ]
location = ''
lo | cation = ''.join(charList)
print location
|
sdoran35/hate-to-hugs | venv/lib/python3.6/site-packages/nltk/classify/decisiontree.py | Python | mit | 12,314 | 0.002193 | # Natural Language Toolkit: Decision Tree Classifiers
#
# Copyright (C) 2001-2017 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
A classifier model that decides which label to assign to a token on
the basis of a tree structure, where branches correspond to conditions
on feature values, and leaves correspond to label assignments.
"""
from __future__ import print_function, unicode_literals, division
from collections import defaultdict
from nltk.probability import FreqDist, MLEProbDist, entropy
from nltk.classify.api import ClassifierI
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class DecisionTreeClassifier(ClassifierI):
def __init__(self, label, feature_name=None, decisions=None, default=None):
"""
:param label: The most likely label for tokens that reach
this node in the decision tree. If this decision tree
has no children, then this label will be assigned to
any token that reaches this decision tree.
:param feature_name: The name of the feature that this
decision tree selects for.
:param decisions: A dictionary mapping from feature values
for the feature identified by ``feature_name`` to
child decision trees.
:param default: The child that will be used if the value of
feature ``feature_name`` does not match any of the keys in
``decisions``. This is used when constructing binary
decision trees.
"""
self._label = label
self._fname = feature_name
self._decisions = decisions
self._default = default
def labels(self):
labels = [self._label]
if self._decisions is not None:
for dt in self._decisions.values():
labels.extend(dt.labels())
if self._default is not None:
labels.extend(self._default.labels())
return list(set(labels))
def classify(self, featureset):
# Decision leaf:
if self._fname is None:
return self._label
# Decision tree:
fval = featureset.get(self._fname)
if fval in self._decisions:
return self._decisions[fval].classify(featureset)
elif self._default is not None:
return self._default.classify(featureset)
else:
return self._label
def error(self, labeled_featuresets):
errors = 0
for featureset, label in labeled_featuresets:
if self.classify(featureset) != label:
errors += 1
return errors/len(labeled_featuresets)
def pretty_format(self, width=70, prefix='', depth=4):
"""
Return a string containing a pretty-printed version of this
decision tree. Each line in this string corresponds to a
single decision tree node or leaf, and indentation is used to
display the structure of the decision tree.
"""
# [xx] display default!!
if self._fname is None:
n = width-len(prefix)-15
return '{0}{1} {2}\n'.format(prefix, '.'*n, self._label)
s = ''
for i, (fval, result) in enumerate(sorted(self._decisions.items())):
hdr = '{0}{1}={2}? '.format(prefix, self._fname, fval)
n = width-15-len(hdr)
s += '{0}{1} {2}\n'.format(hdr, '.'*(n), result._label)
if result._fname is not None and depth>1:
s += result.pretty_format(width, prefix+' ', depth-1)
if self._default is not None:
n = width-len(prefix)-21
s += '{0}else: {1} {2}\n'.format(prefix, '.'*n, self._default._label)
if self._default._fname is not None and depth>1:
s += self._default.pretty_format(width, prefix+' ', depth-1)
return s
def pseudocode(self, prefix='', depth=4):
"""
Return a string representation of this decision tree that
expresses the decisions it makes as a nested set of pseudocode
if statements.
"""
if self._fname is None:
return "{0}return {1!r}\n".format(prefix, self._label)
s = ''
for (fval, result) in sorted(self._decisions.items()):
s += '{0}if {1} == {2!r}: '.format(prefix, self._fname, fval)
if result._fname is not None and depth>1:
s += '\n'+result.pseudocode(prefix+' ', depth-1)
else:
s += 'return {0!r}\n'.format(result._label)
if self._default is not None:
if len(self._decisions) == 1:
s += '{0}if {1} != {2!r}: '.format(prefix, self._fname,
list(self._decisions.keys())[0])
else:
s += '{0}else: '.format(prefix)
if self._default._fname is not None and depth>1:
s += '\n'+self._default.pseudocode(prefix+' ', depth-1)
else:
s += 'return {0!r}\n'.format(self._default._label)
return s
def __str__(self):
return self.pretty_format()
@staticmethod
def train(labeled_featuresets, entropy_cutoff=0.05, depth_cutoff=100,
support_cutoff=10, binary=False, feature_values=None,
verbose=False):
"""
:param binary: If true, then treat all feature/value pairs as
individual binary features, rather than using a single n-way
branch for each feature.
"""
# Collect a list of all feature names.
feature_names = set()
for featureset, label in labeled_featuresets:
for fname in featureset:
feature_names.add(fname)
# Collect a list of the values each feature can take.
if feature_values is None and binary:
feature_values = defaultdict(set)
for featureset, label in labeled_featuresets:
for fname, fval in featureset.items():
feature_values[fname].add(fval)
# Start with a stump.
if not binary:
tree = DecisionTreeClassifier.best_stump(
feature_names, labeled_featuresets, verbose)
else:
tree = DecisionTreeClassifier.best_binary_stump(
feature_names, labeled_featuresets, feature_values, verbose)
# Refine the stump.
tree.refine(labeled_featuresets, entropy_cutoff, depth_cutoff-1,
support_cutoff, binary, feature_values, verbose)
# Return it
return tree
| @staticmethod
def leaf(labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
return Decisio | nTreeClassifier(label)
@staticmethod
def stump(feature_name, labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
# Find the best label for each value.
freqs = defaultdict(FreqDist) # freq(label|value)
for featureset, label in labeled_featuresets:
feature_value = featureset.get(feature_name)
freqs[feature_value][label] += 1
decisions = dict((val, DecisionTreeClassifier(freqs[val].max()))
for val in freqs)
return DecisionTreeClassifier(label, feature_name, decisions)
def refine(self, labeled_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary=False, feature_values=None,
verbose=False):
if len(labeled_featuresets) <= support_cutoff: return
if self._fname is None: return
if depth_cutoff <= 0: return
for fval in self._decisions:
fval_featuresets = [(featureset, label) for (featureset, label)
in labeled_featuresets
if featureset.get(self._fname) == fval]
label_freqs = FreqDist(label for (featureset, label)
in fval_featuresets)
if entropy(ML |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/samba/dcerpc/netlogon/netr_ChangeLogEntry.py | Python | gpl-2.0 | 1,801 | 0.007773 | # encoding: utf-8
# module samba.dcerpc.netlogon
# from /usr/lib/python2.7/dist-packages/samba/dcerpc/netlogon.so
# by generator 1.135
""" netlogon DCE/RPC """
# imports
import dcerpc as __dcerpc
import talloc as __talloc
class netr_ChangeLogEntry(__talloc.Object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __ndr_pack__(self, *args, **kwargs): # real signature unknown
"""
S.ndr_pack(object) -> blob
NDR pack
"""
pass
def __ndr_print__(self, *args, **kwargs): # real signature unknown
"""
S.ndr_print(object) -> None
NDR print
"""
pass
def __ndr_unpack__(self, *args, **kwargs): # real signature unknown
"""
S.ndr_unpack(class, blob, allow_remaining=False) -> None
NDR unpack
"""
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
db_index = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
delta_type = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
flags = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
object = property(lambda self: object(), lambda self, v: None, lambda self: None) | # default
object_rid = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
serial_number1 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
serial_number2 = property(lambda self: object(), lambda self, v: | None, lambda self: None) # default
|
b0ttl3z/SickRage | sickbeard/clients/generic.py | Python | gpl-3.0 | 12,050 | 0.002656 | # coding=utf-8
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import re
import time
from base64 import b16encode, b32decode
from hashlib import sha1
import bencode
import six
from requests.compat import urlencode
from requests.models import HTTPError
import sickbeard
from sickbeard import helpers, logger
class GenericClient(object): # pylint: disable=too-many-instance-attributes
def __init__(self, name, host=None, username=None, password=None):
"""
Initializes the client
:name: str:name of the client
:host: str:url or ip of the client
:username: str: username for authenticating with the client
:password: str: password for authentication with the client
"""
self.name = name
self.username = sickbeard.TORRENT_USERNAME if not username else username
self.password = sickbeard.TORRENT_PASSWORD if not password else password
self.host = sickbeard.TORRENT_HOST if not host else host
self.url = None
self.response = None
self.auth = None
self.last_time = time.time()
self.session = helpers.make_session()
self.session.auth = (self.username, self.password)
def _request(self, method='get', params=None, data=None, files=None, cookies=None): # pylint: disable=too-many-arguments, too-many-return-statements
"""
Makes the actual request for the client, for everything except auth
"""
if time.time() > self.last_time + 1800 or not self.auth:
self.last_time = time.time()
self._get_auth()
log_string = '{0}: Requested a {1} connection to url {2}'.format(
self.name, method.upper(), self.url)
if params:
log_string += '?{0}'.format(urlencode(params))
if data:
log_string += ' and data: {0}{1}'.format(
str(data)[0:99], '...' if len(str(data)) > 100 else '')
logger.log(log_string, logger.DEBUG)
if not self.auth:
logger.log('{0}: Authentication Failed'.format(self.name), logger.WARNING)
return False
# Dict, loop through and change all key,value pairs to bytes
if isinstance(params, dict):
for key, value in six.iteritems(params):
if isinstance(key, six.text_type):
del params[key]
key = key.encode('utf-8')
if isinstance(value, six.text_type):
value = value.encode('utf-8')
params[key] = value
if isinstance(data, dict):
for key, value in six.iteritems(data):
if isinstance(key, six.text_type):
del data[key]
key = key.encode('utf-8')
if isinstance(value, six.text_type):
value = value.encode('utf-8')
data[key] = value
# List, loop through and change all indexes to bytes
if isinstance(params, list):
for index, value in enumerate(params):
if isinstance(value, six.text_type):
params[index] = value.encode('utf-8')
if isinstance(data, list):
for index, value in enumerate(data):
if isinstance(value, six.text_type):
data[index] = value.encode('utf-8')
# Unicode, encode to bytes
if isinstance(params, six.text_type):
params = params.encode('utf-8')
if isinstance(data, six.text_type):
data = data.encode('utf-8')
try:
self.response = self.session.request(
method.upper(), self.url, params=params, data=data,
files=files, cookies=cookies, timeout=120, verify=False)
self.response.raise_for_status()
except Exception as error:
helpers.handle_requests_exception(error)
return False
logger.log('{0}: Response to the {1} request is {2}'.format
(self.name, method.upper(), self.response.text), logger.DEBUG)
return True
def _get_auth(self): # pylint:disable=no-self-use
"""
This should be overridden and should return the auth_id needed for the client
"""
return None
def _add_torrent_uri(self, result): # pylint:disable=unused-argument, no-self-use
"""
This should be overridden should return the True/False from the client
when a torrent is added via url (magnet or .torrent link)
"""
return False
def _add_torrent_file(self, result): # pylint:disable=unused-argument, no-self-use
"""
This should be overridden should return the True/False from the client
when a torrent is added via result.content (only .torrent file)
"""
| return False
def _set_torrent_label(self, result): # pylint:disable=unused-argument, no-self-use
"""
This should be overridden should return the True/False from the client
when a torrent is set with label
"""
return True
def _set_torrent_ratio(self, result): # pylint:disable=unused-argument, no-self-use
"""
This should be | overridden should return the True/False from the client
when a torrent is set with ratio
"""
return True
def _set_torrent_seed_time(self, result): # pylint:disable=unused-argument, no-self-use
"""
This should be overridden should return the True/False from the client
when a torrent is set with a seed time
"""
return True
def _set_torrent_priority(self, result): # pylint:disable=unused-argument, no-self-use
"""
This should be overriden should return the True/False from the client
when a torrent is set with result.priority (-1 = low, 0 = normal, 1 = high)
"""
return True
def _set_torrent_path(self, torrent_path): # pylint:disable=unused-argument, no-self-use
"""
This should be overridden should return the True/False from the client
when a torrent is set with path
"""
return True
def _set_torrent_pause(self, result): # pylint:disable=unused-argument, no-self-use
"""
This should be overridden should return the True/False from the client
when a torrent is set with pause
params: :result: an instance of the searchResult class
"""
return True
@staticmethod
def _get_torrent_hash(result):
"""
Gets the torrent hash from either the magnet or torrent file content
params: :result: an instance of the searchResult class
"""
if result.url.startswith('magnet'):
result.hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0]
if len(result.hash) == 32:
result.hash = b16encode(b32decode(result.hash)).lower()
else:
if not result.content:
logger.log('Torrent without content', logger.ERROR)
raise Exception('Torrent without content')
try:
torrent_bdecode = helpers.bdecode(result.content, True)
except (bencode.BTL.BTFailure, Exception) as error:
logger.log('Unable to bdecode torrent', logger.ERROR)
logger.log('Error is: {0}'.format(error), logger.DEBUG)
# logger.log('Torrent bencoded data: {0! |
tongxindao/shiyanlou | shiyanlou_cs869/ershoufang_info/pic.py | Python | apache-2.0 | 766 | 0.001475 | # _*_ coding: utf-8 _*_
# filename: pic.py
import csv
import numpy
import matplotlib.pyplot as plt
# 读取 house.csv 文件中价格和面积列
price, size = numpy.loadtxt('house.csv', delimiter='|' | , usecols=(1, 2), unpack=True)
print price
print size
plt.figure()
plt.subplot(211)
# plt.title("price")
plt.title("/ 10000RMB")
plt.hist(price, bins=20)
plt.subplot(212)
# plt.title("area")
plt.xlabel("/ m**2")
plt.hist(size, bins=20)
plt.figure(2)
plt.title("price")
plt.plot(price)
plt.show()
# 求价格和面积的平均值
price_mean = nump | y.mean(price)
size_mean = numpy.mean(size)
# 求价格和面积的方差
price_var = numpy.var(price)
size_var = numpy.var(size)
print "价格的方差为:", price_var
print "面积的方差为:", size_var
|
stiphyMT/plantcv | plantcv/plantcv/rectangle_mask.py | Python | mit | 2,816 | 0.002841 | # Make masking rectangle
import cv2
import numpy as np
import os
from plantcv.plantcv import fatal_error
from plantcv.plantcv import print_image
from plantcv.plantcv import plot_image
from plantcv.plantcv import params
def rectangle_mask(img, p1, p2, color="black"):
"""Takes an input image and ret | urns a binary imag | e masked by a rectangular area denoted by p1 and p2. Note that
p1 = (0,0) is the top left hand corner bottom right hand corner is p2 = (max-value(x), max-value(y)).
Inputs:
img = RGB or grayscale image data
p1 = point 1
p2 = point 2
color = black,white, or gray
Returns:
masked = original image with masked image
bnk = binary image
contour = object contour vertices
hierarchy = contour hierarchy list
:param img: numpy.ndarray
:param p1: tuple
:param p2: tuple
:param color: str
:return masked:numpy.ndarray
:return bnk: numpy.ndarray
:return contour: list
:return hierarchy: list
"""
params.device += 1
# get the dimensions of the input image
if len(np.shape(img)) == 3:
ix, iy, iz = np.shape(img)
else:
ix, iy = np.shape(img)
# create a blank image of same size
bnk = np.zeros((ix, iy), dtype=np.uint8)
img1 = np.copy(img)
# draw a rectangle denoted by pt1 and pt2 on the blank image
cv2.rectangle(img=bnk, pt1=p1, pt2=p2, color=(255, 255, 255), thickness=-1)
ret, bnk = cv2.threshold(bnk, 127, 255, 0)
contour, hierarchy = cv2.findContours(bnk, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)[-2:]
# make sure entire rectangle is within (visable within) plotting region or else it will not fill with
# thickness = -1. Note that you should only print the first contour (contour[0]) if you want to fill with
# thickness = -1. otherwise two rectangles will be drawn and the space between them will get filled
if color.upper() == "WHITE":
cv2.drawContours(bnk, contour, 0, (255, 255, 255), -1)
cv2.drawContours(img1, contour, 0, (255, 255, 255), -1)
elif color.upper() == "BLACK":
bnk += 255
cv2.drawContours(bnk, contour, 0, (0, 0, 0), -1)
cv2.drawContours(img1, contour, 0, (0, 0, 0), -1)
elif color.upper() == "GRAY" or color.upper() == "GREY":
cv2.drawContours(bnk, contour, 0, (192, 192, 192), -1)
cv2.drawContours(img1, contour, 0, (192, 192, 192), -1)
else:
fatal_error(str(color) + " is not a valid color, must be 'white', 'black', or 'gray'.")
if params.debug == 'print':
print_image(bnk, os.path.join(params.debug_outdir, str(params.device) + '_roi.png'))
elif params.debug == 'plot':
plot_image(img1, cmap="gray")
plot_image(bnk, cmap="gray")
return img1, bnk, contour, hierarchy
|
rgayon/plaso | tests/parsers/sqlite_plugins/mac_notes.py | Python | apache-2.0 | 1,683 | 0.002377 | # -*- coding: utf-8 -*-
"""Tests for mac notes plugin."""
from __future__ import unicode_literals
import unittest
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import mac_notes
from tests.parsers.sqlite_plugins import test_lib
class MacNotesTest(test_lib.SQLitePluginTestCase):
"""Tests for mac notes database plugin."""
def testProcess(self):
"""Test the Process function on a Mac Notes file."""
plugin_object = mac_notes.MacNotesPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['NotesV7.storedata'], plugin_object)
self.assertEqual(storage_writer.number_of_events, 6)
self.assertEqual(storage_writer.number_of_warnings, 0)
events = list(storage_writer.GetEvents())
# Check the first note.
event = events[0]
self.CheckTimestamp(event.time | stamp, '2014-02-11 02:38:27.097813')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_CREATION)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_title = 'building 4th brandy gibs'
self.assertEqual(event_data.title, expected_title)
expected_text = (
'building 4th brandy gibs microsoft office body soul and peace '
'example.com 3015555555: plumbing a | nd heating claim#123456 Small '
'business ')
self.assertEqual(event_data.text, expected_text)
expected_short_message = 'title:{0:s}'.format(expected_title)
expected_message = 'title:{0:s} note_text:{1:s}'.format(
expected_title, expected_text)
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()
|
graingert/alembic | alembic/operations/ops.py | Python | mit | 69,224 | 0.000058 | from .. import util
from ..util import sqla_compat
from . import schemaobj
from sqlalchemy.types import NULLTYPE
from .base import Operations, BatchOperations
import re
class MigrateOperation(object):
"""base class for migration command and organization objects.
This system is part of the operation extensibility API.
.. versionadded:: 0.8.0
.. seealso::
:ref:`operation_objects`
:ref:`operation_plugins`
:ref:`customizing_revision`
"""
@util.memoized_property
def info(self):
"""A dictionary that may be used to store arbitrary information
along with this :class:`.MigrateOperation` object.
"""
return {}
class AddConstraintOp(MigrateOperation):
"""Represent an add constraint operation."""
@property
def constraint_type(self):
raise NotImplementedError()
@classmethod
def from_constraint(cls, constraint):
funcs = {
"unique_constraint": CreateUniqueConstraintOp.from_constraint,
"foreign_key_constraint": CreateForeignKeyOp.from_constraint,
"primary_key_constraint": CreatePrimaryKeyOp.from_constraint,
"check_constraint": CreateCheckConstraintOp.from_constraint,
"column_check_constraint": CreateCheckConstraintOp.from_constraint,
}
return funcs[constraint.__visit_name__](constraint)
def reverse(self):
return DropConstraintOp.from_constraint(self.to_constraint())
def to_diff_tuple(self):
return ("add_constraint", self.to_constraint())
@Operations.register_operation("drop_constraint")
@BatchOperations.register_operation("drop_constraint", "batch_drop_constraint")
class DropConstraintOp(MigrateOperation):
"""Represent a drop constraint operation."""
def __init__(
self,
constraint_name, table_name, type_=None, schema=None,
_orig_constraint=None):
self.constraint_name = constraint_name
self.table_name = table_name
self.constraint_type = type_
self.schema = schema
self._orig_constraint = _orig_constraint
def reverse(self):
if self._orig_constraint is None:
raise ValueError(
"operation is not reversible; "
"original constraint is not present")
return AddConstraintOp.from_constraint(self._orig_constraint)
def to_diff_tuple(self):
if self.constraint_type == "foreignkey":
return ("remove_fk", self.to_constraint())
else:
return ("remove_constraint", self.to_constraint())
@classmethod
def from_constraint(cls, constraint): |
types = {
"unique_constraint": "unique",
"foreign_key_constraint": "foreignkey",
"primary_key_constraint": "primary",
"check_constraint": "check",
"column_check_constraint": "check",
}
constraint_table = sqla_compat._table_for_co | nstraint(constraint)
return cls(
constraint.name,
constraint_table.name,
schema=constraint_table.schema,
type_=types[constraint.__visit_name__],
_orig_constraint=constraint
)
def to_constraint(self):
if self._orig_constraint is not None:
return self._orig_constraint
else:
raise ValueError(
"constraint cannot be produced; "
"original constraint is not present")
@classmethod
@util._with_legacy_names([
("type", "type_"),
("name", "constraint_name"),
])
def drop_constraint(
cls, operations, constraint_name, table_name,
type_=None, schema=None):
"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
:param constraint_name: name of the constraint.
:param table_name: table name.
:param ``type_``: optional, required on MySQL. can be
'foreignkey', 'primary', 'unique', or 'check'.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
"""
op = cls(constraint_name, table_name, type_=type_, schema=schema)
return operations.invoke(op)
@classmethod
def batch_drop_constraint(cls, operations, constraint_name, type_=None):
"""Issue a "drop constraint" instruction using the
current batch migration context.
The batch form of this call omits the ``table_name`` and ``schema``
arguments from the call.
.. seealso::
:meth:`.Operations.drop_constraint`
.. versionchanged:: 0.8.0 The following positional argument names
have been changed:
* name -> constraint_name
"""
op = cls(
constraint_name, operations.impl.table_name,
type_=type_, schema=operations.impl.schema
)
return operations.invoke(op)
@Operations.register_operation("create_primary_key")
@BatchOperations.register_operation(
"create_primary_key", "batch_create_primary_key")
class CreatePrimaryKeyOp(AddConstraintOp):
"""Represent a create primary key operation."""
constraint_type = "primarykey"
def __init__(
self, constraint_name, table_name, columns,
schema=None, _orig_constraint=None, **kw):
self.constraint_name = constraint_name
self.table_name = table_name
self.columns = columns
self.schema = schema
self._orig_constraint = _orig_constraint
self.kw = kw
@classmethod
def from_constraint(cls, constraint):
constraint_table = sqla_compat._table_for_constraint(constraint)
return cls(
constraint.name,
constraint_table.name,
constraint.columns,
schema=constraint_table.schema,
_orig_constraint=constraint
)
def to_constraint(self, migration_context=None):
if self._orig_constraint is not None:
return self._orig_constraint
schema_obj = schemaobj.SchemaObjects(migration_context)
return schema_obj.primary_key_constraint(
self.constraint_name, self.table_name,
self.columns, schema=self.schema)
@classmethod
@util._with_legacy_names([
('name', 'constraint_name'),
('cols', 'columns')
])
def create_primary_key(
cls, operations,
constraint_name, table_name, columns, schema=None):
"""Issue a "create primary key" instruction using the current
migration context.
e.g.::
from alembic import op
op.create_primary_key(
"pk_my_table", "my_table",
["id", "version"]
)
This internally generates a :class:`~sqlalchemy.schema.Table` object
containing the necessary columns, then generates a new
:class:`~sqlalchemy.schema.PrimaryKeyConstraint`
object which it then associates with the
:class:`~sqlalchemy.schema.Table`.
Any event listeners associated with this action will be fired
off normally. The :class:`~sqlalchemy.schema.AddConstraint`
construct is ultimately used to generate the ALTER statement.
:param name: Name of the primary key constraint. The name is necessary
so that an ALTER statement can be emitted. For setups that
use an automated naming scheme such as that described at
:ref:`sqla:constraint_naming_conventions`
``name`` here can be ``None``, as the event listener will
apply the name to the constraint object when it is associated
with the table.
|
p-morais/rl | rl/utils/plotting.py | Python | mit | 1,503 | 0.001331 | """This screws up visualize.py"""
"""
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.lines import Line2D
from torch.autograd import Variable as Var
from torch import Tensor
class RealtimePlot():
def __init__(self, style='ggplot'):
plt.style.use(style)
plt.ion()
self.fig, self.ax = plt.subplots()
self.xlim = 0
self.yvals = []
self.line = Line2D([], [])
self. | ax.add_line(self.line)
def config(self, ylabel, xlabel):
self.ax.set_ylabel(ylabel)
self.ax.set_xlabel(xlabel)
self.fig.tight_layout()
def plot(self, y):
self.yvals.append(y)
self.line.set_data(np.arange(len(self.yvals)), self.yvals)
self.ax.relim()
self.ax.autoscale_view()
self.ax.set_xlim(0, self.xlim)
self.xlim += 1
self.fig.canvas.flush_e | vents()
def done(self):
plt.ioff()
plt.show()
def policyplot(env, policy, trj_len):
obs_dim = env.observation_space.shape[0]
action_dim = env.action_space.shape[0]
y = np.zeros((trj_len, action_dim))
X = np.zeros((trj_len, obs_dim))
obs = env.reset()
for t in range(trj_len):
X[t, :] = obs
action = policy(Var(Tensor(obs[None, :]))).data.numpy()[0]
y[t, :] = action
obs = env.step(action)[0]
fig, axes = plt.subplots(1, action_dim)
for a in range(action_dim):
axes[a].plot(np.arange(trj_len), y[:, a])
plt.show()
""" |
nieklinnenbank/FreeNOS | support/scons/autoconf.py | Python | gpl-3.0 | 3,238 | 0.005868 | #
# Copyright (C) 2010 Niek Linnenbank
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from SCons.SConf import *
from SCons.Script import *
#
# SConf helper function.
#
def TryCompileC(context):
# Print out a debug message.
context.Message('Checking for ' + context.env['TRYFLAG'] + ' ... ')
# Use the simpelest C program possible.
source_file = "int main | (int argc, char **argv)" \
"{" \
" return 0;" \
"}\n"
# Try to compile and link it.
result = context.TryLink(source_file, '.c')
# Return the result status.
context.Result(result)
return result
#
# Checks if the compiler in the given en | vironment supports
# the given command-line CFLAGS, and unsets it if not.
#
# Thanks to loonycyborg on #scons for his help!
#
def CheckCCFlags(env):
cflags = env['_CCFLAGS'][:]
# Loop all CFLAGS.
for flag in cflags:
# Setup a temporary environment.
conf = Configure(env.Clone(),
custom_tests = { 'TryCompileC' : TryCompileC })
conf.env['CCFLAGS'].append(flag)
conf.env.Replace(TRYFLAG = flag)
# Try to link a dummy program.
result = conf.TryCompileC()
# If success, append to CFLAGS
if result:
env['CCFLAGS'].append(flag)
# Done. Try next.
conf.Finish()
#
# SConf helper function.
#
def TryCompileCXX(context):
# Print out a debug message.
context.Message('Checking for ' + context.env['TRYFLAG'] + ' ... ')
# Use the simpelest C program possible.
source_file = "int main(int argc, char **argv)" \
"{" \
" return 0;" \
"}\n"
# Try to compile and link it.
result = context.TryCompile(source_file, '.cpp')
# Return the result status.
context.Result(result)
return result
#
# Checks if the compiler in the given environment supports
# the given command-line CPPFLAGS, and unsets it if not.
#
# Thanks to loonycyborg on #scons for his help!
#
def CheckCXXFlags(env):
cppflags = env['_CXXFLAGS'][:]
# Loop all CPPFLAGS.
for flag in cppflags:
# Setup a temporary environment.
conf = Configure(env.Clone(),
custom_tests = { 'TryCompileCXX' : TryCompileCXX })
conf.env['CXXFLAGS'].append(flag)
conf.env.Replace(TRYFLAG = flag)
# Try to link a dummy program.
result = conf.TryCompileCXX()
# If success, append it to CXXFLAGS
if result:
env['CXXFLAGS'].append(flag)
# Done. Try next.
conf.Finish()
|
summer1988/pythunder | pythunder/libs/dispatch/dispatcher.py | Python | lgpl-3.0 | 11,389 | 0.001317 | import sys
import threading
import warnings
import weakref
import six
if six.PY2:
from .weakref_backports import WeakMethod
else:
from weakref import WeakMethod
from six.moves import range
def _make_id(target):
if hasattr(target, '__func__'):
return (id(target.__self__), id(target.__func__))
return id(target)
NONE_ID = _make_id(None)
# A marker for caching
NO_RECEIVERS = object()
class Signal(object):
"""
Base class for all signals
Internal attributes:
receivers
{ receiverkey (id) : weakref(receiver) }
"""
def __init__(self, providing_args=None, use_caching=False):
"""
Create a new signal.
providing_args
A list of the arguments this signal can pass along in a send() call.
"""
self.receivers = []
if providing_args is None:
providing_args = []
self.providing_args = set(providing_args)
self.lock = threading.Lock()
self.use_caching = use_caching
# For convenience we create empty caches even if they are not used.
# A note about caching: if use_caching is defined, then for each
# distinct sender we cache the receivers that sender has in
# 'sender_receivers_cache'. The cache is cleaned when .connect() or
# .disconnect() is called and populated on send().
self.sender_receivers_cache = weakref.WeakKeyDictionary() if use_caching else {}
self._dead_receivers = False
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
"""
Connect receiver to sender for signal.
Arguments:
receiver
A function or an instance method which is to receive signals.
Receivers must be hashable objects.
If weak is True, then receiver must be weak referenceable.
Receivers must be able to accept keyword arguments.
If a receiver is connected with a dispatch_uid argument, it
will not be added if another receiver was already connected
with that dispatch_uid.
sender
The sender to which the receiver should respond. Must either be
of type Signal, or None to receive events from any sender.
weak
Whether to use weak references to the receiver. By default, the
module will attempt to use weak references to the receiver
objects. If this parameter is false, then strong references will
be used.
dispatch_uid
An identifier used to uniquely identify a particular instance of
a receiver. This will usually be a string, though it may be
anything hashable.
"""
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
if weak:
ref = weakref.ref
receiver_object = receiver
# Check for bound methods
if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'):
ref = WeakMethod
receiver_object = receiver.__self__
if six.PY3:
receiver = ref(receiver)
weakref.finalize(receiver_object, self._remove_receiver)
else:
receiver = ref(receiver, self._remove_receiver)
with self.lock:
self._clear_dead_receivers()
for r_key, _ in self.receivers:
if r_key == lookup_key:
break
else:
self.receivers.append((lookup_key, receiver))
self.sender_receivers_cache.clear()
def disconnect(self, receiver=None, sender=None, weak=None, dispatch_uid=None):
"""
Disconnect receiver from sender for signal.
If weak references are used, disconnect need not be called. The receiver
will be remove from dispatch automatically.
Arguments:
receiver
The registered receiver to disconnect. May be none if
dispatch_uid is specified.
sender
The registered sender to disconnect
dispatch_uid
the unique identifier of the receiver to disconnect
"""
if weak is not None:
warnings.warn("Passing `weak` to disconnect has no effect.", stacklevel=2)
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
disconnected = False
with self.lock:
self._clear_dead_receivers()
for index in range(len(self.receivers)):
(r_key, _) = self.receivers[index]
if r_key == lookup_key:
disconnected = True
del self.receivers[index]
break
self.sender_receivers_cache.clear()
return disconnected
def has_listeners(self, sender=None):
return bool(self._live_receivers(sender))
def send(self, sender, **named):
"""
Send signal from sender to all connected receivers.
If any receiver raises an error, the error propagates back through send,
terminating the dispatch loop, so it is quite possible to not have all
receivers called if a raises an error.
Arguments:
sender
The sender of the signal Either a specific object or None.
named
Named arguments which will be passed to receivers.
Returns a list of tuple pairs [(receiver, response), ... ].
"""
responses = []
if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS:
return responses
for receiver in self._live_receivers(sender):
response = receiver(signal=self, sender=sender, **named)
responses.append((receiver, response))
return responses
def send_robust(self, sender, **named):
"""
Send signal from sender to all connected receivers catching errors.
Arguments:
sender
The sender of the signal. Can be any python object (normally one
registered with a connect if you actually want something to
occur).
named
Named arguments which will be passed to receivers. These
arguments must be a subset of the argument names defined in
providing_args.
Return a list of tuple pairs [(receiver, response), ... ]. May raise
DispatcherKeyError.
If any receiver raises an error (specifically any subclass of
Exception), the error instance is returned as the result for that
receiver. The traceback is always attached to the error at
``__traceback__``.
"""
responses = []
if not self.receivers or s | elf.sender_receivers_cache.get(sender) is NO_RECEIVERS:
return responses
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
for receiver in self._live_ | receivers(sender):
try:
response = receiver(signal=self, sender=sender, **named)
except Exception as err:
if not hasattr(err, '__traceback__'):
err.__traceback__ = sys.exc_info()[2]
responses.append((receiver, err))
else:
responses.append((receiver, response))
return responses
def _clear_dead_receivers(self):
# Note: caller is assumed to hold self.lock.
if self._dead_receivers:
self._dead_receivers = False
new_receivers = []
for r in self.receivers:
if isinstance(r[1], weakref.ReferenceType) and r[1]() is None:
continue
new_receivers.append(r)
|
DaveA50/lbry | lbrynet/create_network.py | Python | mit | 2,692 | 0.004086 | #!/usr/bin/env python
#
# This library is free software, distributed under the terms of
# the GNU Lesser General Public License Version 3, or any later version.
# See the COPYING file included in this archive
#
# Thanks to Paul Cannon for IP-address resolution functions (taken from aspn.activestate.com)
import argparse
import os, sys, time, signal
amount = 0
def destroyNetwork(nodes):
print 'Destroying Kademlia network...'
i = 0
for node in nodes:
i += 1
hashAmount = i*50/amount
hashbar = '#'*hashAmount
output = '\r[%-50s] %d/%d' % (hashbar, i, amount)
sys.stdout.write(output)
time.sleep(0.15)
os.kill(node, signal.SIGTERM)
print
def main():
parser = argparse.ArgumentParser(description="Launch a network of dht nodes")
parser.add_argument("amount_of_nodes",
help="The number of nodes to create",
type=int)
parser.add_argument("--nic_ip_address",
help="The network interface on which these nodes will listen for connections "
"from each other and from other nodes. If omitted, an attempt will be "
"made | to automatically determine the system's IP address, but this may "
"result in the nodes being reachable only from this system")
args = parser.parse_args()
global amount
amount = args.amount_of_nodes
if args.nic_ip_address:
ipAddress = args.nic_ip_address
else:
import socket
ipAddress = socket.gethostbyname(socket.gethostname())
print 'Network interface IP address om | itted; using %s...' % ipAddress
startPort = 4000
port = startPort+1
nodes = []
print 'Creating Kademlia network...'
try:
nodes.append(os.spawnlp(os.P_NOWAIT, 'lbrynet-launch-node', 'lbrynet-launch-node', str(startPort)))
for i in range(amount-1):
time.sleep(0.15)
hashAmount = i*50/amount
hashbar = '#'*hashAmount
output = '\r[%-50s] %d/%d' % (hashbar, i, amount)
sys.stdout.write(output)
nodes.append(os.spawnlp(os.P_NOWAIT, 'lbrynet-launch-node', 'lbrynet-launch-node', str(port), ipAddress, str(startPort)))
port += 1
except KeyboardInterrupt:
'\nNetwork creation cancelled.'
destroyNetwork(nodes)
sys.exit(1)
print '\n\n---------------\nNetwork running\n---------------\n'
try:
while 1:
time.sleep(1)
except KeyboardInterrupt:
pass
finally:
destroyNetwork(nodes)
if __name__ == '__main__':
main() |
clovis/PhiloLogic4 | www/scripts/get_notes.py | Python | gpl-3.0 | 1,038 | 0.009634 | #!/usr/bin/env python3
import json
import os
from wsgiref.handlers import CGIHandler
from philologic.runtime.DB import DB
from philologic.runtime import generate_text_object
import sys
sys.path.app | end("..")
import custom_functions
try:
from custom_functions import WebConfig
except ImportError:
from philologic.runtime import WebConfig
try:
from custom_functions import WSGIHandler
except ImportError:
from philologic.runtime import WSGIHandler
def get_notes(environ, start_response):
st | atus = '200 OK'
headers = [('Content-type', 'application/json; charset=UTF-8'),
("Access-Control-Allow-Origin", "*")]
start_response(status, headers)
config = WebConfig(os.path.abspath(os.path.dirname(__file__)).replace('scripts', ''))
db = DB(config.db_path + '/data/')
request = WSGIHandler(environ, config)
text_object = generate_text_object(request, config, note=True)
yield json.dumps(text_object).encode('utf8')
if __name__ == "__main__":
CGIHandler().run(get_notes)
|
ghostcode/django-blog | blogproject/blog/models.py | Python | mit | 3,967 | 0.002287 | from django.db import models
from django.contrib.auth.models import User
from django.urls import reverse
class Category(models.Model):
"""
Django 要求模型必须继承 models.Model 类。
Category 只需要一个简单的分类名 name 就可以了。
CharField 指定了分类名 name 的数据类型,CharField 是字符型,
CharField 的 max_length 参数指定其最大长度,超过这个长度的分类名就不能被存入数据库。
当然 Django 还为我们提供了多种其它的数据类型,如日期时间类型 DateTimeField、整数类型 IntegerField 等等。
Django 内置的全部类型可查看文档:
https://docs.djangoproject.com/en/1.10/ref/models/fields/#field-types
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Tag(models.Model):
"""
标签 Tag 也比较简单,和 Category 一样。
再次强调一定要继承 models.Model 类!
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Post(models.Model):
"""
文章的数据库表稍微复杂一点,主要是涉及的字段更多。
"""
# 文章标题
title = models.CharField(max_length=70)
# 阅读量
views = models.PositiveIntegerField(default=0)
# 文章正文,我们使用了 TextField。
# 存储比较短的字符串可以使用 CharField,但对于文章的正文来说可能会是一大段文本,因此使用 TextField 来存储大段文本。
body = models.TextField()
# 这两个列分别表示文章的创建时间和最后一次修改时间,存储时间的字段用 DateTimeField 类型。
created_time = models.DateTimeField()
modified_time = models.DateTimeField()
# 文章摘要,可以没有文章摘要,但默认情况下 CharField 要求我们必须存入数据,否则就会报错。
# 指定 CharField 的 blank=True 参数值后就可以允许空值了。
excerpt = models.CharField(max_length=200, blank=True)
# 这是分类与标签,分类与标签的模型我们已经定义在上面。
# 我们在这里把文章对应的数据库表和分类、标签对应的数据库表关联了起来,但是关联形式稍微有点不同。
# 我们规定一篇文章只能对应一个分类,但是一个分类下可以有多篇文章,所以我们使用的是 ForeignKey,即一对多的关联关系。
# 而对于标签来说,一篇文章可以有多个标签,同一个标签下也可能有多篇文章,所以我们使用 ManyToManyField,表明这是多对多的关联关系。
# 同时我们规定文章可以没有标签,因此为标签 tags 指定了 blank=True。
# 如果你对 ForeignKey、ManyToManyField 不了解,请看教程中的解释,亦可参考官方文档:
# https://docs.django | project.com/en/1.10/topics/db/models/#relationships
category = models.ForeignKey(
Category,
on_delete=models.CASCADE,
)
tags = models.ManyToManyField(Tag, blank=True)
# 文章作者,这里 User 是从 django.contrib.auth.mo | dels 导入的。
# django.contrib.auth 是 Django 内置的应用,专门用于处理网站用户的注册、登录等流程,User 是 Django 为我们已经写好的用户模型。
# 这里我们通过 ForeignKey 把文章和 User 关联了起来。
# 因为我们规定一篇文章只能有一个作者,而一个作者可能会写多篇文章,因此这是一对多的关联关系,和 Category 类似。
author = models.ForeignKey(
User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('blog:detail',kwargs={'pk':self.pk})
def increase_views(self):
self.views += 1
self.save(update_fields=['views'])
class Meta:
ordering = ['-created_time']
|
lodemo/CATANA | src/data_collection/sblade/sblade/spiders/makerstudio.py | Python | mit | 3,250 | 0.006154 | # -*- coding: utf-8 -*-
# MIT License
#
# Copyright (c) 2017 Moritz Lode
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import scrapy
import re
import json
class MakerStudioSpider(scrapy.Spider):
name = "makerstudio"
allowed_domains = ["socialblade.com"]
file_path = '../../../data/network_channel_id_broadtv.json'
network_url = 'https://socialblade.com/youtube/network/maker'
top_url = 'http://socialblade.com/youtube/network/maker/topusers'
custom_settings = {
'DOWNLOAD_DELAY': 1.8,
}
MAX_ITERATION = 10
i = 0
channelIDs = set()
def start_requests(self):
'''
returns iterable of Requests, either list or generator, which will be begin to crawled
'''
with open(self.file_path) as IDs:
for id in json.load(IDs):
if isinstance(id, list):
for i in id:
self.channelIDs.add(i)
else:
self.channelIDs.add(id)
self.log('%s old ids read' % len(self.channelIDs))
yield scrapy.Request(self.top_url, callback=self.parseTop, dont_filter=True)
yield scrapy.Request(self.network_url, callback=self.parse, dont_filter=True)
def parse(self, response):
p = re.compile('^/ | youtube/s/\?q=(.+)$')
links = response.css('a').xpath('@href').extr | act()
for ref in links:
m = p.match(ref)
if m:
self.channelIDs.add(m.groups()[0])
if self.i <= self.MAX_ITERATION:
self.i = self.i + 1
yield scrapy.Request(response.url, callback=self.parse, dont_filter=True)
else:
with open(self.file_path, 'wb') as f:
f.write(json.dumps(list(self.channelIDs)))
self.log('Saved {} IDs to file {}'.format(len(self.channelIDs), filename))
def parseTop(self, response):
p = re.compile('^/youtube/channel/(.+)$')
links = response.css('a').xpath('@href').extract()
for ref in links:
m = p.match(ref)
if m:
print 'top:', m.groups()[0]
self.channelIDs.add(m.groups()[0]) |
vbannai/neutron | neutron/db/migration/alembic_migrations/versions/24c7ea5160d7_cisco_csr_vpnaas.py | Python | apache-2.0 | 2,012 | 0.001491 | # Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# und | er the License.
"""Cisco CSR VPNaaS
Revision ID: 24c7ea5160d7
Revises: 492a106273f8
Create Date: 2014-02-03 13:06:50.407601
"""
# revision identifiers, us | ed by Alembic.
revision = '24c7ea5160d7'
down_revision = '492a106273f8'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.services.vpn.plugin.VPNDriverPlugin',
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'cisco_csr_identifier_map',
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('ipsec_site_conn_id', sa.String(length=64),
primary_key=True),
sa.Column('csr_tunnel_id', sa.Integer(), nullable=False),
sa.Column('csr_ike_policy_id', sa.Integer(), nullable=False),
sa.Column('csr_ipsec_policy_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['ipsec_site_conn_id'],
['ipsec_site_connections.id'],
ondelete='CASCADE')
)
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.drop_table('cisco_csr_identifier_map')
|
aitjcize/micropython | tests/io/file1.py | Python | mit | 79 | 0 | f = open | ("io/data/file1")
print(f.read(5))
print(f.readline( | ))
print(f.read())
|
openbudgets/openbudgets | openbudgets/apps/accounts/admin.py | Python | bsd-3-clause | 2,240 | 0.003125 | from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.sites.models import Site
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from registration.models import RegistrationProfile
from openbudgets.apps.accounts import models
from openbudgets.apps.accounts import forms
class AccountAdmin(UserAdmin):
"""Defines common settings for all our UserProxy forms"""
form = forms.AccountChangeForm
add_form = forms.AccountCreationForm
fieldsets = ((_('Account credentials'), {'fields': ('password', 'email',
'first_name', 'last_name',
'is_active')}),)
class CoreTeamAccountAdmin(AccountAdmin):
"""Admin form for Core Team members"""
def queryset(self, request):
core_team_users = Group.objects.filter(id=settings.OPENBUDGETS_GROUP_ID_CORE)
qs = super(CoreTeamAccountAdmin, self).get_queryset(request)
qs = qs.filter(groups=core_team_users)
return qs
class ContentTeamAccountAdmin(AccountAdmin):
"""Admin form for Content Team members"""
def queryset(self, request):
content_team_users = Group.objects.filter(id=settings.OPENBUDGETS_GROUP_ID_CONTENT)
qs = super(ContentTeamAccountAdmin, self).get_queryset(request)
qs = qs.filter(groups=content_team_users)
return qs
class PublicAccountAdmin(Accou | ntAdmin):
"""Admin form for Public users"""
def | queryset(self, request):
public_users = Group.objects.filter(id=settings.OPENBUDGETS_GROUP_ID_PUBLIC)
qs = super(PublicAccountAdmin, self).get_queryset(request)
qs = qs.filter(groups=public_users)
return qs
# Django Auth admin config
admin.site.unregister(Group)
# Django Sites admin config
admin.site.unregister(Site)
# Registration admin config
admin.site.unregister(RegistrationProfile)
# Open Budget Accounts admin config
#admin.site.register(CoreTeamAccount, CoreTeamAccountAdmin)
#admin.site.register(ContentTeamAccount, ContentTeamAccountAdmin)
#admin.site.register(PublicAccount, PublicAccountAdmin)
|
klim-/pyplane | core/Toolbar.py | Python | gpl-3.0 | 1,723 | 0.00058 | # -*- coding: utf-8 -*-
# Copyright (C) 2013
# by Klemens Fritzsche, pyplane@leckstrom.de
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__ = 'Klemens Fritzsche'
from PyQt4 import QtGui
from matplotlib.backend_bases import NavigationToolbar2 as NavigationToolbar
from matplotlib.backends.backend_qt4 import cursord
class Toolbar(NavigationToolbar):
"""
This class hides the functionality of NavigationToolbar, and only
provides the necessary functions (only zooming at the moment)
"""
def _init_toolbar(self):
pass
def draw_rubberband(self, event, x0, y0, x1, y1):
height = self.canvas.figure.bbox.height
| y1 = height - y1
y0 = height - y0
w = abs(x1 - x0)
h = abs(y1 - y0)
rect = [int(val) for val in mi | n(x0, x1), min(y0, y1), w, h]
self.canvas.drawRectangle(rect)
def set_cursor(self, cursor):
QtGui.QApplication.restoreOverrideCursor()
QtGui.QApplication.setOverrideCursor(QtGui.QCursor(cursord[cursor]))
if __package__ is None:
__package__ = "core.toolbar"
|
noemis-fr/old-custom | e3z_report_ipbox/report/report_account_invoice_layout.py | Python | agpl-3.0 | 8,491 | 0.007066 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it wi | ll be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If | not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
from lxml import etree
from openerp.osv import osv,fields
from openerp.tools.translate import _
class account_invoice_1(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_invoice_1, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'invoice_lines': self.invoice_lines,
'get_shipping_address': self._get_shipping_address,
})
self.context = context
self._node = None
def _add_header(self, rml_dom,header='external'):
in_rml_header = False
if header=='internal':
rml_head = self.rml_header2
elif header=='internal landscape':
rml_head = self.rml_header3
elif header=='external':
rml_head = self.rml_header
elif not header:
in_rml_header= True
else:
header_obj= self.pool.get('res.header')
rml_head_id = header_obj.search(self.cr,self.uid,[('name','=',header)])
if rml_head_id:
rml_head = header_obj.browse(self.cr, self.uid, rml_head_id[0]).rml_header
if not in_rml_header:
try:
head_dom = etree.XML(rml_head)
except:
raise osv.except_osv(_('Error in report header''s name !'), _('No proper report''s header defined for the selected report. Check that the report header defined in your report rml_parse line exist in Administration/reporting/Reporting headers.' ))
if not in_rml_header:
for tag in head_dom:
found = rml_dom.find('.//'+tag.tag)
if found is not None and len(found):
if tag.get('position'):
found.append(tag)
else :
found.getparent().replace(found,tag)
else:
head_dom = etree.XML(etree.tostring(rml_dom.find('.//pageTemplate')))
for tag in head_dom:
found = rml_dom.find('.//'+tag.tag)
if found is not None and len(found):
if tag.get('position'):
found.append(tag)
else :
found.getparent().replace(found,tag)
return True
def _get_shipping_address(self, invoice):
shipping_name = ''
shipping_address = ''
self.cr.execute('select order_id from sale_order_invoice_rel where invoice_id=%s', (invoice.id,))
ids = map(lambda x: x[0], self.cr.fetchall())
# ids = self.pool.get('sale.order').search(self.cr, self.uid, [('invoice_ids', 'child_of', invoice.id)])
if ids and len(ids) == 1:
order = self.pool.get('sale.order').browse(self.cr, self.uid, ids[0], self.context.copy())
shipping_name = ((order.partner_shipping_id and order.partner_id.title and order.partner_shipping_id.title.name) or '') + ' ' + ((order.partner_shipping_id and order.partner_shipping_id.name) or '')
shipping_address = order.partner_shipping_id and self.display_address(order.partner_shipping_id)
else:
ids = self.pool.get('stock.picking.out').search(self.cr, self.uid, [('name', 'in', invoice.origin.split(':'))])
if ids and len(ids) == 1:
picking = self.pool.get('stock.picking.out').browse(self.cr, self.uid, ids[0], self.context.copy())
shipping_name = ((picking.partner_shipping_id and picking.partner_id.title and picking.partner_shipping_id.title.name) or '') + ' ' + ((picking.partner_shipping_id and order.partner_shipping_id.name) or '')
shipping_address = picking.partner_shipping_id and self.display_address(picking.partner_shipping_id)
return shipping_name + '\n' + shipping_address
def invoice_lines(self, invoice):
result = []
sub_total = {}
info = []
res = {}
list_in_seq = {}
ids = self.pool.get('account.invoice.line').search(self.cr, self.uid, [('invoice_id', '=', invoice.id)])
ids.sort()
invoice_list = []
for id in range(0, len(ids)):
info = self.pool.get('account.invoice.line').browse(self.cr, self.uid, ids[id], self.context.copy())
invoice_list.append(info)
# Fusions des lignes
invoice_dict = {}
for invoice_browse in invoice_list:
if not invoice_dict.get(str(invoice_browse.product_id.id)+str(invoice_browse.price_unit), False):
invoice_dict.update({str(invoice_browse.product_id.id)+str(invoice_browse.price_unit): invoice_browse})
else:
invoice_cumulative = invoice_dict.get(str(invoice_browse.product_id.id)+str(invoice_browse.price_unit), False)
invoice_cumulative.quantity += invoice_browse.quantity
invoice_cumulative.price_subtotal += invoice_browse.price_subtotal
invoice_dict.update({str(invoice_browse.product_id.id)+str(invoice_browse.price_unit): invoice_cumulative})
invoices = [invoice_dict.get(i) for i in invoice_dict]
for invoice_browse in invoices:
list_in_seq[invoice_browse] = invoice_browse.sequence
i = 1
j = 0
final=sorted(list_in_seq.items(), lambda x, y: cmp(x[1], y[1]))
invoice_list = [x[0] for x in final]
sum_flag = {}
sum_flag[j] = -1
for entry in invoice_list:
res = {}
self.cr.execute('select tax_id from account_invoice_line_tax where invoice_line_id=%s', (entry.id,))
tax_ids = self.cr.fetchall()
if tax_ids == []:
res['tax_types'] = ''
else:
tax_names_dict = {}
for item in range(0, len(tax_ids)):
self.cr.execute('select name from account_tax where id=%s', (tax_ids[item][0],))
type = self.cr.fetchone()
tax_names_dict[item] = type[0]
tax_names = ','.join([tax_names_dict[x] for x in range(0, len(tax_names_dict))])
res['tax_types'] = tax_names
res['name'] = entry.name
res['default_code'] = entry.product_id.default_code
res['quantity'] = self.formatLang(entry.quantity, digits=self.get_digits(dp='Account'))
res['price_unit'] = self.formatLang(entry.price_unit, digits=self.get_digits(dp='Account'))
res['discount'] = self.formatLang(entry.discount, digits=self.get_digits(dp='Account'))
res['price_subtotal'] = self.formatLang(entry.price_subtotal, digits=self.get_digits(dp='Account'))
sub_total[i] = entry.price_subtotal
i = i + 1
res['note'] = '' #entry.note
res['currency'] = invoice.currency_id.symbol
res['type'] = 'article'
if entry.uos_id.id == False:
res['uos'] = ''
else:
uos_name = self.pool.get('product.uom').read(self.cr, self.uid, entry.uos_id.id, ['name'], self.context.copy())
res['uos'] = uos_name['name']
|
BhallaLab/moose-core | pymoose/__init__.py | Python | gpl-3.0 | 613 | 0.003263 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_ | function
# Bring everything from c++ module to global namespace.
from moose._moose import *
# Bring everything from moose.py to | global namespace.
# IMP: It will overwrite any c++ function with the same name. We can override
# some C++ here.
from moose.moose import *
from moose.server import *
# SBML and NML2 support.
from moose.model_utils import *
# create a shorthand for version() call here.
__version__ = version()
# C++ core override
from moose.wrapper import *
# Import moose test.
from moose.moose_test import test
|
1ukash/horizon | horizon/dashboards/admin/networks/panel.py | Python | apache-2.0 | 938 | 0.001066 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHO | UT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from horizon.dashboards.admin import dashboard
class Networks(horizon.Panel):
name = _("Networks")
sl | ug = 'networks'
permissions = ('openstack.services.network',)
dashboard.Admin.register(Networks)
|
edx/edx-enterprise | integrated_channels/sap_success_factors/exporters/learner_data.py | Python | agpl-3.0 | 9,167 | 0.003382 | # -*- coding: utf-8 -*-
"""
Learner data exporter for Enterprise Integrated Channel SAP SuccessFactors.
"""
from logging import getLogger
from requests import RequestException
from django.apps import apps
from enterprise.models import EnterpriseCustomerUser, PendingEnterpriseCustomerUser
from enterprise.tpa_pipeline import get_user_from_social_auth
from integrated_channels.catalog_service_utils import get_course_id_for_enrollment, get_course_run_for_enrollment
from integrated_channels.exceptions import ClientError
from integrated_channels.integrated_channel.exporters.learner_data import LearnerExporter
from integrated_channels.sap_success_factors.client import SAPSuccessFactorsAPIClient
from integrated_channels.utils import generate_formatted_log, parse_datetime_to_epoch_millis
LOGGER = getLogger(__name__)
class SapSuccessFactorsLearnerExporter(LearnerExporter):
"""
Class to provide a SAPSF learner data transmission audit prepared for serialization.
"""
INCLUDE_GRADE_FOR_COMPLETION_AUDIT_CHECK = False
def get_learner_data_records(
self,
enterprise_enrollment,
completed_date=None,
grade=None,
course_completed=False,
**kwargs,
): # pylint: disable=arguments-differ
"""
Return a SapSuccessFactorsLearnerDataTransmissionAudit with the given enrollment and course completion data.
If no remote ID can be found, return None.
"""
completed_timestamp = None
if completed_date is not None:
completed_timestamp = parse_datetime_to_epoch_millis(completed_date)
sapsf_user_id = enterprise_enrollment.enterprise_customer_user.get_remote_id(
self.enterprise_configuration.idp_id
)
if sapsf_user_id is not None:
SapSuccessFactorsLearnerDataTransmissionAudit = apps.get_model(
'sap_success_factors',
'SapSuccessFactorsLearnerDataTransmissionAudit'
)
# We return two records here, one with the course key and one with the course run id, to account for
# uncertai | nty about the type of content (course vs. course run) that was sent to the integrated channel.
course_run = get_course_run_for_enrollment(enterprise_enrollment | )
total_hours = 0.0
if course_run and self.enterprise_configuration.transmit_total_hours:
total_hours = course_run.get("estimated_hours", 0.0)
return [
SapSuccessFactorsLearnerDataTransmissionAudit(
enterprise_course_enrollment_id=enterprise_enrollment.id,
sapsf_user_id=sapsf_user_id,
course_id=get_course_id_for_enrollment(enterprise_enrollment),
course_completed=course_completed,
completed_timestamp=completed_timestamp,
grade=grade,
total_hours=total_hours,
credit_hours=total_hours,
),
SapSuccessFactorsLearnerDataTransmissionAudit(
enterprise_course_enrollment_id=enterprise_enrollment.id,
sapsf_user_id=sapsf_user_id,
course_id=enterprise_enrollment.course_id,
course_completed=course_completed,
completed_timestamp=completed_timestamp,
grade=grade,
total_hours=total_hours,
credit_hours=total_hours,
),
]
LOGGER.info(
generate_formatted_log(
self.enterprise_configuration.channel_code(),
self.enterprise_configuration.enterprise_customer.uuid,
enterprise_enrollment.enterprise_customer_user.user_id,
enterprise_enrollment.course_id,
'[Integrated Channel] No learner data was sent for user '
f'{enterprise_enrollment.enterprise_customer_user.username} because an SAP SuccessFactors user ID '
' could not be found.'
)
)
return None
class SapSuccessFactorsLearnerManger:
"""
Class to manage SAPSF learners data and their relation with enterprise.
"""
def __init__(self, enterprise_configuration, client=SAPSuccessFactorsAPIClient):
"""
Use the ``SAPSuccessFactorsAPIClient`` for content metadata transmission to SAPSF.
Arguments:
enterprise_configuration (required): SAPSF configuration connecting an enterprise to an integrated channel.
client: The REST API client that will fetch data from integrated channel.
"""
self.enterprise_configuration = enterprise_configuration
self.client = client(enterprise_configuration) if client else None
def _get_inactive_learners(self):
""" Gets inactive learners list from the client or raises ClientError on failure. """
try:
sap_inactive_learners = self.client.get_inactive_sap_learners()
except RequestException as exc:
raise ClientError(
'SAPSuccessFactorsAPIClient request failed: {error} {message}'.format(
error=exc.__class__.__name__,
message=str(exc)
)
) from exc
return sap_inactive_learners
def _get_identity_providers(self):
""" Logic check for getting an identity provider preflight validation, split out for unit testing."""
enterprise_customer = self.enterprise_configuration.enterprise_customer
providers = enterprise_customer.identity_providers
if not enterprise_customer.has_identity_providers:
LOGGER.info(
generate_formatted_log(
self.enterprise_configuration.channel_code(),
self.enterprise_configuration.enterprise_customer.uuid,
None,
None,
f'Enterprise customer {enterprise_customer.name} has no associated identity provider'
)
)
return None
return providers
def unlink_learners(self):
"""
Iterate over each learner and unlink inactive SAP channel learners.
This method iterates over each enterprise learner and unlink learner
from the enterprise if the learner is marked inactive in the related
integrated channel.
"""
sap_inactive_learners = self._get_inactive_learners()
total_sap_inactive_learners = len(sap_inactive_learners) if sap_inactive_learners else 0
enterprise_customer = self.enterprise_configuration.enterprise_customer
LOGGER.info(
generate_formatted_log(
self.enterprise_configuration.channel_code(),
self.enterprise_configuration.enterprise_customer.uuid,
None,
None,
f'Found {total_sap_inactive_learners} SAP inactive learners for '
f'enterprise customer {enterprise_customer.name}'
)
)
if not sap_inactive_learners:
return None
providers = self._get_identity_providers()
if not providers:
return None
for sap_inactive_learner in sap_inactive_learners:
sap_student_id = sap_inactive_learner['studentID']
social_auth_user = get_user_from_social_auth(providers, sap_student_id, enterprise_customer)
if not social_auth_user:
LOGGER.info(
generate_formatted_log(
self.enterprise_configuration.channel_code(),
self.enterprise_configuration.enterprise_customer.uuid,
None,
None,
f"No social auth data found for inactive user with SAP student id {sap_student_id} "
f"of enterprise customer {enterprise_customer.name} with identity providers "
f"{', '.join(map(lambda provider: provider.provider_id, providers) |
smacfiggen/horus | revert_osiris.py | Python | unlicense | 6,871 | 0.007131 | from __future__ import print_function
import httplib2
import io
import os
import sys
import time
import dateutil.parser
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
from apiclient.http import MediaIoBaseDownload
import pprint
#Change these to the day of the osiris infestation
YEAR_OF_INFECTION=2017
MONTH_OF_INFECTION=01
DAY_OF_INFECTION=01
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/drive'
#YOU NEED TO SET UP AN APPLICATION ON GOOGLE AND GENERATE A KEY AND CREATE THIS FILE
CLIENT_SECRET_FILE = 'revert_osiris.json'
APPLICATION_NAME = 'Revert Osiris'
#copy pasta form gdrive API help examples
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'drive-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
| credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def main():
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
pp = pprint.PrettyPrinter()
#grab | first batch of possible infected files
results = service.files().list(pageSize=1,
fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
next_page = results.get('nextPageToken', None)
bad_files = []
done = False
next_page = None
while True:
results = service.files().list(pageToken=next_page, pageSize=100,
fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
break
else:
for item in items:
#Only act on files with osiris in the name.
if 'osiris' in item['name']:
bad_files.append(item)
next_page = results.get('nextPageToken', None)
print("Found {} bad files".format(len(bad_files)))
#Download a backup of all files just in case
for bad_item in bad_files:
revisions = service.revisions().list(fileId=bad_item['id'], fields='*').execute()
assert(len(revisions['revisions']) >= 2)
dt = dateutil.parser.parse(revisions['revisions'][-1]['modifiedTime'])
if dt.day == DAY_OF_INFECTION and dt.month = MONTH_OF_INFECTION and dt.year == YEAR_OF_INFECTION:
print("Last revision dates from virus day")
else:
print("Skipping {}, datastamp on file isn't from virus day")
continue
dt = dateutil.parser.parse(revisions['revisions'][-2]['modifiedTime'])
print("Date of second to last revision is: {}".format(dt))
request = service.revisions().get_media(fileId=bad_item['id'],
revisionId=revisions['revisions'][-2]['id'])
#Filenames are not unique in gdrive so append with file ID as well
new_filename = os.path.join('backup',
revisions['revisions'][-2]['originalFilename'] + '_' + bad_item['id'])
#If we are re-running script see if we already downloaded this file
if os.path.isfile(new_filename):
print("File {} already backed up, skipping".format(new_filename))
continue
fh = io.FileIO(new_filename, 'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download {}".format(int(status.progress() * 100)) )
count = 0
for bad_item in bad_files:
count = count + 1
#Do in batches just to be kind of safe.
if count > 50:
break
file_id = bad_item['id']
revisions = service.revisions().list(fileId=file_id, fields='*').execute()
if len(revisions['revisions']) < 2:
print("File has only 1 revision, skipping: {}".format(bad_item))
continue
file_meta = service.files().get(fileId=file_id, fields='*').execute()
dt_last = dateutil.parser.parse(revisions['revisions'][-1]['modifiedTime'])
dt_2nd_last = dateutil.parser.parse(revisions['revisions'][-2]['modifiedTime'])
if dt_last.day == DAY_OF_INFECTION and dt_last.month == MONTH_OF_INFECTION and dt_last.year == YEAR_OF_INFECTION:
print("Last revision dates from virus day")
else:
print("Skipping {}, datestamp on file isn't from virus day")
continue
orig_file_name = file_meta['originalFilename']
target_rev_name = revisions['revisions'][-2]['originalFilename']
#If the 2nd to last revision is also osiris, we can't simply revert
if 'osiris' in target_rev_name:
print("2nd to last rev filename has osiris in the name, skipping: ({})".format(target_rev_name))
#print out some debug info so we can figure out what we have multipe revisions with osiris
pp.pprint(file_meta)
print(' ')
pp.pprint(revisions)
continue
print("{}: {} revisions found".format(target_rev_name, len(revisions['revisions'])) )
#THESE ARE THE REALLY DANGEROUS STEPS, ONLY UNCOMMMENT IF YOU KNOW WHAT YOU ARE DOING!!!
rev_id_to_delete = revisions['revisions'][-1]['id']
print("service.revisions().delete(fileId={}, revisionId={}).execute()".format(file_id, rev_id_to_delete))
#del_rev = service.revisions().delete(fileId=file_id, revisionId=rev_id_to_delete).execute()
update_body = { 'name': target_rev_name }
print("service.files().update(fileId={}, body={}).execute()".format(file_id, update_body))
#update_name = service.files().update(fileId=file_id, body=update_body).execute()
if __name__ == '__main__':
main()
|
balajikris/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyComplex/autorestcomplextestservice/models/date_wrapper.py | Python | mit | 878 | 0 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Dat | eWrapper(Model):
"""DateWrapper.
:param field:
:type field: date
:param leap:
:type leap: date
"""
_attribute_map = {
'field': {'key': 'field', 'type': 'date'},
'leap': {'key': 'leap', 'type': 'date'},
}
def | __init__(self, field=None, leap=None):
self.field = field
self.leap = leap
|
IvanMalison/okcupyd | okcupyd/profile.py | Python | mit | 14,154 | 0.001413 | import datetime
import logging
from lxml import html
import simplejson
from . import details
from . import essay
from . import helpers
from . import looking_for
from . import util
from .question import QuestionFetcher
from .xpath import xpb
log = logging.getLogger(__name__)
class Profile(object):
"""Represent the profile of an okcupid user.
Many of the attributes on this object are
:class:`~okcupyd.util.cached_property` instances which lazily load their
values, and cache them once they have been accessed. This makes it so
that this object avoids making unnecessary HTTP requests to retrieve the
same piece of information twice.
Because of this caching behavior, care must
be taken to invalidate cached attributes on the object if an up to date view
of the profile is needed. It is recommended that you call :meth:`.refresh`
to accomplish this, but it is also possible to use
:meth:`~okcupyd.util.cached_property.bust_self` to bust individual
properties if necessary.
"""
def __init__(self, session, username, **kwargs):
"""
:param session: A logged in :class:`~okcupyd.session.Session`
:param username: The username associated with the profile.
"""
self._session = session
#: The username of the user to whom this profile belongs.
self.username = username
#: A :class:`~okcupyd.util.fetchable.Fetchable` of
#: :class:`~okcupyd.question.Question` instances, each corresponding
#: to a question that has been answered by the user to whom this
#: profile belongs.
#: The fetchable consists of :class:`~okcupyd.question.UserQuestion`
#: instead when the profile belongs to the logged in user.
self.questions = self.question_fetchable()
#: A :class:`~okcupyd.details.Details` instance belonging to the same
#: user that this profile belongs to.
self.details = details.Details(self)
if kwargs:
self._set_cached_properties(kwargs)
def _set_cached_properties(self, values):
property_names = set(
name for name, _ in util.cached_property.get_cached_properties(self)
)
for key, value in values.items():
if key not in property_names:
log.warning("Unrecognized kwarg {0} with value {1} "
"passed to Profile constructor.")
self.__dict__[key] = value
def refresh(self, reload=False):
"""
:param reload: Make the request to return a new profile tree. This will
result in the caching of the profile_tree attribute. The
new profile_tree will be returned.
"""
util.cached_property.bust_caches(self, excludes=('authcode'))
self.questions = self.question_fetchable()
if reload:
return self.profile_tree
@property
def is_logged_in_user(self):
"""
:returns: `True` if this profile and the session it was created with
belong to the same user and False otherwise."""
return self._session.log_in_name.lower() == self.username.lower()
@util.cached_property
def _profile_response(self):
return self._session.okc_get(
u'profile/{0}'.format(self.username)
).content
@util.cached_property
def profile_tree(self):
"""
:returns: a :class:`lxml.etree` created from the html of the profile
page of the account associated with the username that this
profile was insantiated with.
"""
return html.fromstring(self._profile_response)
def message_request_parameters(self, content, thread_id):
return {
| 'ajax': 1,
'sendmsg': 1,
'r1': self.username,
'body': content,
'threadid': thread_id,
'authcode': self.authcode,
'reply': 1 if thread_id else 0,
'from_profile': 1
| }
@util.cached_property
def authcode(self):
return helpers.get_authcode(self.profile_tree)
_photo_info_xpb = xpb.div.with_class('photo').img.select_attribute_('src')
@util.cached_property
def photo_infos(self):
"""
:returns: list of :class:`~okcupyd.photo.Info` instances for each photo
displayed on okcupid.
"""
from . import photo
pics_request = self._session.okc_get(
u'profile/{0}/album/0'.format(self.username),
)
pics_tree = html.fromstring(u'{0}{1}{2}'.format(
u'<div>', pics_request.json()['fulls'], u'</div>'
))
return [photo.Info.from_cdn_uri(uri)
for uri in self._photo_info_xpb.apply_(pics_tree)]
@util.cached_property
def looking_for(self):
"""
:returns: A :class:`~okcupyd.looking_for.LookingFor` instance associated
with this profile.
"""
return looking_for.LookingFor(self)
_liked_xpb = xpb.button.with_class('binary_rating_button')
@property
def rating(self):
"""
Deprecated. Use :meth:`.liked` instead.
:returns: the rating that the logged in user has given this user or
0 if no rating has been given.
"""
return 5 if self.liked else 0
@util.cached_property
def liked(self):
"""
:returns: Whether or not the logged in user liked this profile
"""
if self.is_logged_in_user: return False
classes = self._liked_xpb.one_(self.profile_tree).attrib['class'].split()
return 'liked' in classes
_contacted_xpb = xpb.div.with_class('actions2015').button.\
with_classes('actions2015-chat', 'flatbutton', 'blue').\
select_attribute_('data-tooltip')
@util.cached_property
def contacted(self):
"""
:retuns: A boolean indicating whether the logged in user has contacted
the owner of this profile.
"""
try:
contacted_span = self._contacted_xpb.one_(self.profile_tree)
except:
return False
else:
timestamp = contacted_span.replace('Last contacted ', '')
return helpers.parse_date_updated(timestamp)
@util.cached_property
def responds(self):
"""
:returns: The frequency with which the user associated with this profile
responds to messages.
"""
contacted_text = self._contacted_xpb.\
get_text_(self.profile_tree).lower()
if 'contacted' not in contacted_text:
return contacted_text.strip().replace('replies ', '')
_id_xpb = xpb.button.with_class('binary_rating_button').\
select_attribute_("data-tuid")
@util.cached_property
def id(self):
"""
:returns: The id that okcupid.com associates with this profile.
"""
if self.is_logged_in_user: return self._current_user_id
return int(self._id_xpb.one_(self.profile_tree))
@util.cached_property
def _current_user_id(self):
return int(helpers.get_id(self.profile_tree))
@util.cached_property
def essays(self):
"""
:returns: A :class:`~okcupyd.essay.Essays` instance that is
associated with this profile.
"""
return essay.Essays(self)
_age_xpb = xpb.span.with_class('userinfo2015-basics-asl-age')
_user_age_xpb = xpb.span(id='ajax_age')
@util.cached_property
def age(self):
"""
:returns: The age of the user associated with this profile.
"""
if self.is_logged_in_user:
# Retrieve the logged-in user's profile age
return int(self._user_age_xpb.get_text_(self.profile_tree).strip())
else:
# Retrieve a non logged-in user's profile age
return int(self._age_xpb.get_text_(self.profile_tree))
_percentages_and_ratings_xpb = xpb.div.with_class('matchanalysis2015-graphs')
@util.cached_property
def mat |
liosha2007/temporary-groupdocs-python3-sdk | groupdocs/models/ChangePasswordResult.py | Python | apache-2.0 | 920 | 0.006522 | #!/usr/bin/env | python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at |
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class ChangePasswordResult:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'user_guid': 'str'
}
self.user_guid = None # str
|
SickGear/SickGear | sickbeard/metadata/generic.py | Python | gpl-3.0 | 53,307 | 0.003114 | # Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is | distributed in the hope that it will be useful,
# but WITHOUT ANY WAR | RANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
from collections import OrderedDict
import datetime
import io
import os.path
import re
from . import helpers as metadata_helpers
from .. import logger
import sg_helpers
from ..indexers import indexer_config
from ..indexers.indexer_config import TVINFO_TVDB, TVINFO_TMDB
from lib.tvinfo_base import TVInfoImage, TVInfoImageType, TVInfoImageSize
from lib.tvinfo_base.exceptions import *
import sickbeard
# noinspection PyPep8Naming
import encodingKludge as ek
from exceptions_helper import ex
from lib.fanart.core import Request as fanartRequest
import lib.fanart as fanart
from lxml_etree import etree
from _23 import filter_iter, list_keys
from six import iteritems, itervalues, string_types
# noinspection PyUnreachableCode
if False:
from typing import AnyStr, Dict, Generator, List, Optional, Tuple, Union
from lib.tvinfo_base import TVInfoShow
from ..tv import TVShow
map_image_types = {
'poster': TVInfoImageType.poster,
'banner': TVInfoImageType.banner,
'fanart': TVInfoImageType.fanart,
'poster_thumb': TVInfoImageType.poster,
'banner_thumb': TVInfoImageType.banner,
}
class ShowInfosDict(OrderedDict):
def __getitem__(self, k):
v = OrderedDict.__getitem__(self, k)
if callable(v):
v = v(k)
OrderedDict.__setitem__(self, k, v)
return v
class GenericMetadata(object):
"""
Base class for all metadata providers. Default behavior is meant to mostly
follow XBMC 12+ metadata standards. Has support for:
- show metadata file
- episode metadata file
- episode thumbnail
- show fanart
- show poster
- show banner
- season thumbnails (poster)
- season thumbnails (banner)
- season all poster
- season all banner
"""
def __init__(self,
show_metadata=False, # type: bool
episode_metadata=False, # type: bool
use_fanart=False, # type: bool
use_poster=False, # type: bool
use_banner=False, # type: bool
episode_thumbnails=False, # type: bool
season_posters=False, # type: bool
season_banners=False, # type: bool
season_all_poster=False, # type: bool
season_all_banner=False # type: bool
):
self.name = "Generic" # type: AnyStr
self._ep_nfo_extension = "nfo" # type: AnyStr
self._show_metadata_filename = "tvshow.nfo" # type: AnyStr
self.fanart_name = "fanart.jpg" # type: AnyStr
self.poster_name = "poster.jpg" # type: AnyStr
self.banner_name = "banner.jpg" # type: AnyStr
self.season_all_poster_name = "season-all-poster.jpg" # type: AnyStr
self.season_all_banner_name = "season-all-banner.jpg" # type: AnyStr
self.show_metadata = show_metadata
self.episode_metadata = episode_metadata
self.fanart = use_fanart
self.poster = use_poster
self.banner = use_banner
self.episode_thumbnails = episode_thumbnails
self.season_posters = season_posters
self.season_banners = season_banners
self.season_all_poster = season_all_poster
self.season_all_banner = season_all_banner
def get_config(self):
# type: (...) -> AnyStr
config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner,
self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster,
self.season_all_banner]
return '|'.join([str(int(x)) for x in config_list])
def get_id(self):
# type: (...) -> AnyStr
return GenericMetadata.makeID(self.name)
@staticmethod
def makeID(name):
# type: (AnyStr) -> AnyStr
name_id = re.sub("[+]", "plus", name)
name_id = re.sub(r"[^\w\d_]", "_", name_id).lower()
return name_id
def set_config(self, string):
# type: (AnyStr) -> None
config_list = [bool(int(x)) for x in string.split('|')]
self.show_metadata = config_list[0]
self.episode_metadata = config_list[1]
self.fanart = config_list[2]
self.poster = config_list[3]
self.banner = config_list[4]
self.episode_thumbnails = config_list[5]
self.season_posters = config_list[6]
self.season_banners = config_list[7]
self.season_all_poster = config_list[8]
self.season_all_banner = config_list[9]
def _has_show_metadata(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_show_file_path(show_obj))
logger.log(u"Checking if " + self.get_show_file_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def has_episode_metadata(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
result = ek.ek(os.path.isfile, self.get_episode_file_path(ep_obj))
logger.log(u"Checking if " + self.get_episode_file_path(ep_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_fanart(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_fanart_path(show_obj))
logger.log(u"Checking if " + self.get_fanart_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_poster_path(show_obj))
logger.log(u"Checking if " + self.get_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_banner_path(show_obj))
logger.log(u"Checking if " + self.get_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def has_episode_thumb(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
location = self.get_episode_thumb_path(ep_obj)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_poster(self, show_obj, season):
# type: (sickbeard.tv.TVShow,int) -> AnyStr
location = self.get_season_poster_path(show_obj, season)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_banner(self, show_obj, season):
# type: (sickbeard.tv.TVShow,int) -> AnyStr
location = self.get_season_banner_path(show_obj, season)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_all_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists |
ContinuumIO/multiuserblazeserver | build.py | Python | bsd-3-clause | 1,553 | 0.006439 | import os
from os.path import join, getctime, dirname
import glob
import subprocess
import argparse
def newest_file(root):
path = join(root, "*tar.bz2")
newest = max(glob.iglob(path), key=getctime)
return newest
def upload(pkg, user):
cmd = ["binstar", "upload", "--force","-u", user, pkg]
subprocess.check_call(cmd)
def build(recipe, build_path, pythons=[], platforms=[], binstar_user=None):
print (recipe, pythons, platforms)
for p in pythons:
cmd = ["conda", "build", recipe, "--python", p]
subprocess.check_call(cmd)
pkg = newest_file(build_path)
| if binstar_user:
upload(pkg, binstar_user)
for plat in platforms:
| cmd = ["conda", "convert", "-p", plat, pkg]
subprocess.check_call(cmd)
if binstar_user:
to_upload = newest_file(plat)
upload(to_upload, binstar_user)
if __name__ == "__main__":
p = argparse.ArgumentParser()
p.add_argument("build_dir")
p.add_argument("--py", action="append", default=[])
p.add_argument("--plat", action="append", default=[])
p.add_argument("-u", "--binstar-user", help="binstar user")
args = p.parse_args()
build_dir = p.add_argument("build_dir")
build("conda.recipe", args.build_dir, args.py, args.plat, args.binstar_user)
#build("../into/conda.recipe", args.build_dir, args.py, args.plat, args.binstar_user)
"""
python build.py /opt/anaconda/conda-bld/linux-64 --py 27 --py 34 --plat osx-64 --plat win-64 -u hugo
"""
|
unikmhz/npui | netprofile/netprofile/alembic/env.py | Python | agpl-3.0 | 8,705 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# NetProfile: DB migrations environment for Alembic
# Copyright © 2016-2017 Alex Unigovsky
#
# This file is part of NetProfile.
# NetProfile is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# NetProfile is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General
# Public License along with NetProfile. If not, see
# <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import decimal
from alembic import context
from alembic.operations import ops
from alembic.autogenerate import rewriter
from alembic.autogenerate.render import (
_ident,
_render_server_default,
_repr_type
)
from sqlalchemy import (
DefaultClause,
Sequence,
engine_from_config,
pool,
types
)
from sqlalchemy.sql.elements import TextClause
from netprofile.db import ddl as npd
from netprofile.db import fields as npf
from netprofile.db import migrations as npm
from netprofile.db.connection import DBMeta
from netprofile.ext.data import (
_INTEGER_SET,
_FLOAT_SET,
_DECIMAL_SET,
_DATE_SET,
_table_to_class
)
_NULL_DATES = (
'0000-00-00',
'0000-00-00 00:00',
'0000-00-00 00:00:00'
)
config = context.config
moddef_filter = config.attributes.get('module', None)
writer = rewriter.Rewriter()
@writer.rewrites(ops.CreateTableOp)
def _create_table(context, revision, op):
new_rev_id = npm._get_new_rev(context)
train = [op]
table = op.to_table(context)
if hasattr(table, 'comment'):
train.append(npm.SetTableCommentOp(op.table_name, table.comment))
if hasattr(table, 'triggers'):
for trigger in table.triggers:
train.append(npm.CreateTriggerOp(
trigger.module,
op.table_name,
trigger.when,
trigger.event,
new_rev_id
))
if len(train) > 1:
return train
return op
def _include_object(obj, name, type_, reflected, compare_to):
if moddef_filter:
if type_ == 'table':
if reflected:
return False
cls = _table_to_class(obj.name)
if cls.__moddef__ != moddef_filter:
return False
return True
def _compare_default(context, insp_col, meta_col, insp_default, meta_default,
rendered_meta_default):
if isinstance(meta_col.type, _DATE_SET):
if isinstance(insp_default, str):
insp_default = insp_default.strip('\'')
if meta_default is None and insp_default in _NULL_DATES:
return False
if isinstance(meta_default, npd.CurrentTimestampDefault):
on_update = meta_default.on_update
if npf._is_mysql(context.dialect):
compare_to = 'CURRENT_TIMESTAMP'
if on_update:
compare_to += ' ON UPDATE CURRENT_TIMESTAMP'
return compare_to != insp_default
# TODO: compare for other dialects
return False
elif isinstance(meta_d | efault, DefaultClause):
meta_arg = meta_default.arg
if isinstance(meta_arg, npf.npbool):
proc = meta_col.type.result_processor(context.dialect,
types.Unicode)
insp_ | default = insp_default.strip('\'')
if proc:
insp_default = proc(insp_default)
return meta_arg.val != insp_default
elif isinstance(meta_arg, TextClause):
meta_text = meta_arg.text
if meta_text.upper() == 'NULL' and insp_default is None:
return False
meta_text = meta_text.strip('\'')
if isinstance(insp_default, str):
insp_default = insp_default.strip('\'')
if isinstance(meta_col.type, _INTEGER_SET):
meta_text = int(meta_text)
insp_default = int(insp_default) \
if isinstance(insp_default, str) else None
elif isinstance(meta_col.type, _FLOAT_SET + _DECIMAL_SET):
meta_text = decimal.Decimal(meta_text)
insp_default = decimal.Decimal(insp_default) \
if isinstance(insp_default, str) else None
return meta_text != insp_default
return None
def render_item(type_, obj, autogen_context):
if type_ == 'type':
if isinstance(obj, npf.DeclEnumType):
if obj.enum:
return 'npf.DeclEnumType(name=%r, values=%r)' % (
obj.enum.__name__,
list(obj.enum.values()))
if obj.__module__ == 'netprofile.db.fields':
autogen_context.imports.add(
'from netprofile.db import fields as npf')
return 'npf.%r' % (obj,)
elif type_ == 'column' and hasattr(obj, 'comment'):
autogen_context.imports.add('from netprofile.db import ddl as npd')
# Copied from alembic.autogenerate.render:_render_column
opts = []
if obj.server_default:
rendered = _render_server_default(obj.server_default,
autogen_context)
if rendered:
opts.append(('server_default', rendered))
if not obj.autoincrement:
opts.append(('autoincrement', obj.autoincrement))
if obj.nullable is not None:
opts.append(('nullable', obj.nullable))
if isinstance(obj.default, Sequence):
seq = obj.default
seq_rendered = 'sa.Sequence(' + repr(seq.name)
if seq.start is not None:
seq_rendered += ', start=' + repr(seq.start)
if seq.increment is not None:
seq_rendered += ', increment=' + repr(seq.increment)
seq_rendered += ')'
opts.append(('default', seq_rendered))
return 'sa.Column' \
'(%(name)r, %(type)s, npd.Comment(%(comment)r), %(kw)s)' % {
'name': _ident(obj.name),
'type': _repr_type(obj.type, autogen_context),
'comment': obj.comment,
'kw': ', '.join(['%s=%s' % (kwname, val)
for kwname, val in opts])
}
elif type_ == 'server_default':
if isinstance(obj, npd.CurrentTimestampDefault):
autogen_context.imports.add('from netprofile.db import ddl as npd')
return 'npd.CurrentTimestampDefault(on_update=%r)' % (
obj.on_update,)
if isinstance(obj, DefaultClause):
if isinstance(obj.arg, npf.npbool):
autogen_context.imports.add(
'from netprofile.db import fields as npf')
return 'npf.npbool(%r)' % (obj.arg.val,)
return False
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=DBMeta,
literal_binds=True,
process_revision_directives=writer
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool
)
with connecta |
acfogarty/espressopp | bench/polymer_melt/espressopp/espressopp_polymer_melt.py | Python | gpl-3.0 | 5,227 | 0.006122 | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
###########################################################################
# #
# ESPResSo++ Benchmark Python script for a polymer melt #
# #
###########################################################################
import sys
import time
import espresso
import mpi4py.MPI as MPI
import logging
from espresso import Real3D, Int3D
from espresso.tools import lammps, gromacs
from espresso.tools import decomp, timers, replicate
# simulation parameters (nvt = False is nve)
steps = 1000
rc = 1.12
skin = 0.3
nvt = True
timestep = 0.01
######################################################################
### IT SHOULD BE UNNECESSARY TO MAKE MODIFICATIONS BELOW THIS LINE ###
######################################################################
sys.stdout.write('Setting up simulation ...\n')
bonds, angles, x, y, z, Lx, Ly, Lz = lammps.read('espressopp_polymer_melt.start')
bonds, angles, x, y, z, Lx, Ly, Lz = replicate(bonds, angles, x, y, z, Lx, Ly, Lz, xdim=1, ydim=1, zdim=1)
num_particles = len(x)
density = num_particles / (Lx * Ly * Lz)
size = (Lx, Ly, Lz)
system = espresso.System()
system.rng = espresso.esutil.RNG()
system.bc = espresso.bc.OrthorhombicBC(system.rng, size)
system.skin = skin
comm = MPI.COMM_WORLD
nodeGrid = espresso.tools.decomp.nodeGrid(comm.size)
cellGrid = espresso.tools.decomp.cellGrid(size,nodeGrid,rc,skin)
system.storage = espresso.storage.DomainDecomposition(system, nodeGrid, cellGrid)
# add particles to the system and then decompose
# do this in chunks of 1000 particles to speed it up
props = ['id', 'type', 'mass', 'pos']
new_particles = []
for i in range(num_particles):
part = [i + 1, 0, 1.0, Real3D(x[i], y[i], z[i])]
new_particles.append(part)
if i % 1000 == 0:
system.storage.addParticles(new_particles, *props)
system.storage.decompose()
new_particles = []
system.storage.addParticles(new_particles, *props)
system.storage.decompose()
# Lennard-Jones with Verlet list
vl = espresso.VerletList(system, cutoff = rc + system.skin)
potLJ = espresso.interaction.LennardJones(1.0, 1.0, cutoff = rc, shift = False)
interLJ = espresso.interaction.VerletListLennardJones(vl)
interLJ.setPotential(type1 = 0, type2 = 0, potential = potLJ)
system.addInteraction(interLJ)
# FENE bonds
fpl = espresso.FixedPairList(system.storage)
fpl.addBonds(bonds)
potFENE = espresso.interaction.FENE(K=30.0, r0=0.0, rMax=1.5)
interFENE = espresso.interaction.FixedPairListFENE(system, fpl, potFENE)
system.addInteraction(interFENE)
# Cosine with FixedTriple list
ftl = espresso.FixedTripleList(system.storage)
ftl.addTriples(angles)
potCosine = espresso.interaction.Cosine(K=1.5, theta0=3.1415926)
interCosine = espresso.interaction.FixedTripleListCosine(system, ftl, potCosine)
#interCosine.setPotential(type1 = 0, type2 = 0, potential = potCosine)
system.addInteraction(interCosine)
# integrator
integrator = espresso.integrator.VelocityVerlet(system)
integrator.dt = timestep
if(nvt):
langevin = espresso.integrator.LangevinThermostat(system)
langevin.gamma = 1.0
langevin.temperature = 1.0
integrator.addExtension(langevin)
# print simulation parameters
print ''
print 'number of particles =', num_particles
print 'density = %.4f' % (density)
print 'rc =', rc
print 'dt =', integrator.dt
print 'skin =', system.skin
print 'nvt =', nvt
print 'steps =', steps
print 'NodeGrid = %s' % (nodeGrid)
print 'CellGrid = %s' % (cellGrid)
print ''
# analysis
# configurations = espresso.analysis.Configurations(system)
# configurations.gather()
temperature = espresso.analysis.Temperature(system)
pressure = espresso.analysis.Pressure(system)
pressureTensor = espresso.analysis.PressureTensor(system)
fmt = '%5d %8.4f %10.5f %8.5f %12.3f %12.3f %12.3f %12.3f %12.3f\n'
T = temperature.compute()
P = pressure.compute()
Pij = pressureTensor.compute()
Ek = 0.5 * T * (3 * num_particles)
Ep = interLJ.computeEnergy()
Eb = interFENE.computeEnergy()
Ea = interCosine.computeEnergy()
Etotal = Ek + Ep + Eb + Ea
sys.stdout.write(' step T | P Pxy etotal ekinetic epair ebond eangle\n')
sys.stdout.write(fmt % (0, T, P, Pij[3], Etotal, Ek, Ep, Eb, Ea))
start_time = time.clock()
integrator.run(steps)
end_time = time.clock()
T = temperature.compute()
P = pressure.compute()
Pij = pressureTensor.compute()
Ek = 0.5 * T * (3 * num_part | icles)
Ep = interLJ.computeEnergy()
Eb = interFENE.computeEnergy()
Ea = interCosine.computeEnergy()
Etotal = Ek + Ep + Eb + Ea
sys.stdout.write(fmt % (steps, T, P, Pij[3], Etotal, Ek, Ep, Eb, Ea))
sys.stdout.write('\n')
# print timings and neighbor list information
timers.show(integrator.getTimers(), precision=2)
sys.stdout.write('Total # of neighbors = %d\n' % vl.totalSize())
sys.stdout.write('Ave neighs/atom = %.1f\n' % (vl.totalSize() / float(num_particles)))
sys.stdout.write('Neighbor list builds = %d\n' % vl.builds)
sys.stdout.write('Integration steps = %d\n' % integrator.step)
sys.stdout.write('CPUs = %i CPU time per CPU = %.1f\n' % (comm.size,end_time - start_time))
|
olemb/mido | extras/hid_joystick.py | Python | mit | 6,820 | 0.000733 | """Read from /dev/input/js0 and return as dictionaries | .
If yo | u have pygame it is easier and more portable to do something
like::
import pygame.joystick
from pygame.event import event_name
pygame.init()
pygame.joystick.init()
js = pygame.joystick.Joystick(0)
js.init()
while True:
for event in pygame.event.get():
if event.axis == 0:
print(event)
Init:
8 = init?
Time stamp |
(ms since boot) |
--+--+--+-- | -- Button number
f0 fb 37 09 00 00 81 00
f0 fb 37 09 00 00 81 01
f0 fb 37 09 00 00 81 02
f0 fb 37 09 00 00 81 03
f0 fb 37 09 00 00 81 04
f0 fb 37 09 00 00 81 05
f0 fb 37 09 00 00 81 06
f0 fb 37 09 00 00 81 07
f0 fb 37 09 00 00 81 08
f0 fb 37 09 00 00 81 09
f0 fb 37 09 00 00 81 0a
f0 fb 37 09 00 00 81 0b
f0 fb 37 09 00 00 82 00
f0 fb 37 09 00 00 82 01
f0 fb 37 09 00 00 82 02
f0 fb 37 09 00 00 82 03
f0 fb 37 09 00 00 82 04
f0 fb 37 09 00 00 82 05
--+-- |
| 1 = button, 2 =
|
value (little endian unsigned)
button down
|
98 f0 2f 09 01 00 01 00 1 down
08 fa 2f 09 00 00 01 00 1 up
2c 6a 31 09 01 00 01 01 2 down
04 73 31 09 00 00 01 01 2 up
48 bf 32 09 01 00 01 02 3 down
f8 c4 32 09 00 00 01 02 3 up
Logitech PS2-style gamepad:
axis 0 == left stick -left / right (left is negative)
axis 1 == left stick -up / down (up is negative)
axis 2 == right stick -left / right
axis 3 == right stick -up / down
axis 4 == plus stick -left / right (when mode is off), values min/0/max
axis 5 == plus stick -up / down (when mode is off, values min/0/max
The + stick has two modes. When the mode light is off, it sends axis
4/5. When mode is on, it sends axis 0/1. The values are -32767, 0, and 32767.
Other axis have values from -32767 to 32767 as well.
"""
import struct
import select
JS_EVENT_BUTTON = 0x1
JS_EVENT_AXIS = 0x2
JS_EVENT_INIT = 0x80
def read_event(device):
data = device.read(8)
event = {}
(event['time'],
event['value'],
event['type'],
event['number']) = struct.unpack('IhBB', data)
event['init'] = bool(event['type'] & JS_EVENT_INIT)
event['type'] &= 0x7f # Strip away the flag bits (JS_EVENT_INIT etc.)
if event['type'] != JS_EVENT_BUTTON:
event['normalized_value'] = \
float(event['value']) / 0x7fff # Normalize to -1..1
event['type'] = {1: 'button', 2: 'axis'}[event['type']]
return event
def read_events(device_name):
with open(device_name, 'rb') as device:
while True:
yield read_event(device)
def panic(port):
"""
Send "All Notes Off" and "Reset All Controllers" on
all channels.
"""
for channel in range(16):
for control in [121, 123]:
message = mido.Message('control_change',
channel=channel,
control=control, value=0)
print(message)
port.send(message)
class Monophonic(object):
# Todo: this assumes everything is on channel 0!
def __init__(self, output, channel=0):
self.output = output
self.notes = set()
self.current_note = None
self.channel = channel
def send(self, message):
if message.type not in ['note_on', 'note_off'] or \
message.channel != self.channel:
self.output.send(message)
return
if message.type == 'note_on':
self.notes.add(message.note)
elif message.type == 'note_off':
if message.note in self.notes:
self.notes.remove(message.note)
print(self.notes)
try:
note = min(self.notes)
except ValueError:
note = None
if note == self.current_note:
return # Same note as before, no change.
if self.current_note is not None:
off = mido.Message('note_off',
note=self.current_note,
velocity=message.velocity)
print(off)
self.output.send(off)
self.current_note = None
if note is not None:
on = mido.Message('note_on',
note=note,
velocity=message.velocity)
print(on)
self.output.send(on)
self.current_note = note
def play_scale(dev, out):
# out = Monophonic(out, channel=0)
# Major scale.
scale = [0, 2, 4, 5, 7, 9, 11, 12, 14, 16, 17, 19]
# program = 16 # Organ
program = 74
out.send(mido.Message('program_change', program=program))
while True:
event = read_event(dev)
if event['init']:
continue # Skip init events.
if event['type'] == 'button':
# Convert to D-major scale starting at middle D.
note = 62 + 12 + scale[event['number']]
if event['value']:
type_ = 'note_on'
else:
type_ = 'note_off'
message = mido.Message(type_, note=note, velocity=64)
out.send(message)
# elif event['type'] == 'axis':
# if event['number'] == 0:
# pitch_scale = mido.messages.MAX_PITCHWHEEL
# pitch = int(event['normalized_value'] * pitch_scale)
# out.send(mido.Message('pitchwheel', pitch=pitch))
def play_drums(dev, out):
# http://www.midi.org/techspecs/gm1sound.php
note_mapping = {
2: 35, # Acoustic Bass Drum
6: 38, # Acoustic Snare
1: 41, # Low Floor Tom
4: 47, # Low Mid Tom
3: 50, # High Tom
8: 51, # Ride Cymbal
5: 42, # Closed Hi Hat
7: 46, # Open Hi Hat
9: 52, # Chinese Cymbal
10: 55, # Splash Cymbal
}
while True:
event = read_event(dev)
if event['init']:
continue
if event['type'] == 'button':
print(event)
button = event['number'] + 1 # Number buttons starting with 1.
if button not in note_mapping:
continue
if event['value']:
type_ = 'note_on'
else:
type_ = 'note_off'
note = note_mapping[button]
message = mido.Message(type_, channel=9, note=note, velocity=64)
print(message)
out.send(message)
if __name__ == '__main__':
import sys
import mido
with open('/dev/input/js0') as dev:
with mido.open_output('SD-20 Part A') as out:
try:
# play_drums(dev, out)
play_scale(dev, out)
finally:
panic(out)
|
jeremiahyan/odoo | addons/payment_alipay/models/payment_transaction.py | Python | gpl-3.0 | 7,132 | 0.002944 | # Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from werkzeug import urls
from odoo import _, api, models
from odoo.exceptions import ValidationError
from odoo.tools.float_utils import float_compare
from odoo.addons.payment_alipay.controllers.main import AlipayController
_logger = logging.getLogger(__name__)
class PaymentTransaction(models.Model):
_inherit = 'payment.transaction'
def _get_specific_rendering_values(self, processing_values):
""" Override of payment to return Alipay-specific rendering values.
Note: self.ensure_one() from `_get_processing_values`
:param dict processing_values: The generic and specific processing values of the transaction
:return: The dict of acquirer-specific processing values
:rtype: dict
"""
res = super()._get_specific_rendering_values(processing_values)
if self.provider != 'alipay':
return res
base_url = self.acquirer_id.get_base_url()
if self.fees:
# Similarly to what is done in `payment::payment.transaction.create`, we need to round
# the sum of the amount and of the fees to avoid inconsistent string representations.
# E.g., str(1111.11 + 7.09) == '1118.1999999999998'
total_fee = self.currency_id.round(self.amount + self.fees)
else:
total_fee = self.amount
rendering_values = {
'_input_charset': 'utf-8',
'notify_url': urls.url_join(base_url, AlipayController._notify_url),
'out_trade_no': self.reference,
'partner': self.acquirer_id.alipay_merchant_partner_id,
'return_url': urls.url_join(base_url, AlipayController._return_url),
'subject': self.reference,
'total_fee': total_fee,
}
if self.acquirer_id.alipay_payment_method == 'standard_checkout':
# https://global.alipay.com/docs/ac/global/create_forex_trade
rendering_values.update({
'service': 'create_forex_trade',
'product_code': 'NEW_OVERSEAS_SELLER',
'currency': self.currency_id.name,
})
else:
rendering_values.update({
'service': 'create_direct_pay_by_user',
'payment_type': 1,
'seller_email': self.acquirer_id.alipay_seller_email,
})
sign = self.acquirer_id._alipay_build_sign(rendering_values)
rendering_values.update({
'sign_type': 'MD5',
'sign': sign,
'api_url': self.acquirer_id._alipay_get_api_url(),
})
return rendering_values
@api.model
def _get_tx_from_feedback_data(self, provider, data):
""" Override of payment to find the transaction based on Alipay data.
:param str provider: The provider of the acquirer that handled the transaction
:param dict data: The feedback data sent by the provider
:return: The transaction if found
:rtype: recordset of `payment.transaction`
:raise: ValidationError if inconsistent data were received
:raise: ValidationError if the data match no transaction
"""
tx = super()._get_tx_from_feedback_data(provider, data)
if provider != 'alipay':
return tx
reference = data.get('reference') or data.get('out_trade_no')
txn_id = data.get('trade_no')
if not reference or not txn_id:
| raise ValidationError(
"Alipay: " + _(
"Received data with missing reference %(r)s or txn_id %(t)s.",
r=reference, t=txn_id
)
)
tx = self.search([('reference', '=', reference), ('provider', '=', 'alipay')])
if not tx:
raise ValidationError(
"Alipay: " + _("No transaction found matching reference %s.", reference)
)
# Verify signature (done here because we | need the reference to get the acquirer)
sign_check = tx.acquirer_id._alipay_build_sign(data)
sign = data.get('sign')
if sign != sign_check:
raise ValidationError(
"Alipay: " + _(
"Expected signature %(sc) but received %(sign)s.", sc=sign_check, sign=sign
)
)
return tx
def _process_feedback_data(self, data):
""" Override of payment to process the transaction based on Alipay data.
Note: self.ensure_one()
:param dict data: The feedback data sent by the provider
:return: None
:raise: ValidationError if inconsistent data were received
"""
super()._process_feedback_data(data)
if self.provider != 'alipay':
return
if float_compare(float(data.get('total_fee', '0.0')), (self.amount + self.fees), 2) != 0:
# mc_gross is amount + fees
_logger.error(
"the paid amount (%(amount)s) does not match the total + fees (%(total)s + "
"%(fees)s) for transaction with reference %(ref)s",
{
'amount': data.get('total_fee', '0.0'),
'total': self.amount,
'fees': self.fees,
'ref': self.reference,
}
)
raise ValidationError("Alipay: " + _("The amount does not match the total + fees."))
if self.acquirer_id.alipay_payment_method == 'standard_checkout':
if data.get('currency') != self.currency_id.name:
raise ValidationError(
"Alipay: " + _(
"The currency returned by Alipay %(rc)s does not match the transaction "
"currency %(tc)s.", rc=data.get('currency'), tc=self.currency_id.name
)
)
elif data.get('seller_email') != self.acquirer_id.alipay_seller_email:
_logger.error(
"the seller email (%(email)s) does not match the configured Alipay account "
"(%(acc_email)s) for transaction with reference %(ref)s",
{
'email': data.get('seller_email'),
'acc_email:': self.acquirer_id.alipay_seller_email,
'ref': self.reference,
},
)
raise ValidationError(
"Alipay: " + _("The seller email does not match the configured Alipay account.")
)
self.acquirer_reference = data.get('trade_no')
status = data.get('trade_status')
if status in ['TRADE_FINISHED', 'TRADE_SUCCESS']:
self._set_done()
elif status == 'TRADE_CLOSED':
self._set_canceled()
else:
_logger.info(
"received data with invalid payment status (%s) for transaction with reference %s",
status, self.reference,
)
self._set_error("Alipay: " + _("received invalid transaction status: %s", status))
|
adsabs/biblib-service | biblib/tests/functional_tests/test_returned_data_epic.py | Python | mit | 14,756 | 0.000339 | """
Functional test
Returned Data Epic
Storyboard is defined within the comments of the program itself
"""
import time
import unittest
from datetime import datetime, timedelta
from flask import url_for
from biblib.tests.stubdata.stub_data import UserShop, LibraryShop
from biblib.tests.base import MockEmailService, TestCaseDatabase, \
MockSolrBigqueryService, MockEndPoint
class TestReturnedDataEpic(TestCaseDatabase):
"""
Base class used to test the Big Share Epic
"""
def test_returned_data_user_view_epic(self):
"""
Carries out the epic 'Returned Data', for the UserView GET end point
:return: no return
"""
# Stub data
user_dave = UserShop()
user_mary = UserShop()
stub_library = LibraryShop()
# Librarian Dave makes a library (no bibcodes)
url = url_for('userview')
response = self.client.post(
url,
data=stub_library.user_view_post_data_json,
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200, response)
library_id_dave = response.json['id']
# Dave looks at the library from the user view page and checks some
# of the parameters displayed to him.
with MockEmailService(user_dave, end_type='uid'):
response = self.client.get(
url,
headers=user_dave.headers
)
self.assertTrue(len(response.json['libraries']) == 1)
library = response.json['libraries'][0]
self.assertTrue(library['num_documents'] == 0)
self.assertTrue(library['num_users'] == 1)
self.assertTrue(library['permission'] == 'owner')
| self.assertEqual(library['public'], False)
self.assertEqual(library['owner'], user_dave.email.split('@')[0])
date_created = datetime.strptime(library['date_created'],
'%Y-%m-%dT%H:%M:%S.%f')
date_last_modified = datetime.strptime(library['date_last_modified'],
'%Y-%m-%dT%H:%M:%S.%f')
self.assertAlmostEqual(date_created,
| date_last_modified,
delta=timedelta(seconds=1))
# Dave adds content to his library
number_of_documents = 20
for i in range(number_of_documents):
# Stub data
library = LibraryShop()
# Add document
url = url_for('documentview', library=library_id_dave)
response = self.client.post(
url,
data=library.document_view_post_data_json('add'),
headers=user_dave.headers
)
self.assertEqual(response.json['number_added'],
len(library.bibcode))
self.assertEqual(response.status_code, 200, response)
# Dave looks in the library overview and sees that his library size
# has increased
url = url_for('userview')
with MockEmailService(user_dave, end_type='uid'):
response = self.client.get(
url,
headers=user_dave.headers
)
self.assertTrue(len(response.json['libraries'])==1)
self.assertEqual(response.status_code, 200)
library = response.json['libraries'][0]
self.assertTrue(library['num_documents'] == number_of_documents)
# Dave adds mary so that she can see the library and add content
url = url_for('permissionview', library=library_id_dave)
with MockEmailService(user_mary):
response = self.client.post(
url,
data=user_mary.permission_view_post_data_json({'read': False, 'write': False, 'admin': True, 'owner': False}),
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200)
# Mary sees that the number of users of the library has increased by 1
url = url_for('userview')
with MockEmailService(user_mary, end_type='uid'):
with MockEmailService(user_dave, end_type='uid'):
response = self.client.get(
url,
headers=user_mary.headers
)
library = response.json['libraries'][0]
self.assertEqual(response.status_code, 200)
self.assertTrue(library['num_users'] == 2)
self.assertTrue(library['permission'] == 'admin')
# Mary adds content to the library
number_of_documents_second = 1
for i in range(number_of_documents_second):
# Stub data
library = LibraryShop()
# Add document
url = url_for('documentview', library=library_id_dave)
response = self.client.post(
url,
data=library.document_view_post_data_json('add'),
headers=user_mary.headers
)
self.assertEqual(response.json['number_added'],
len(library.bibcode))
self.assertEqual(response.status_code, 200, response)
# Dave sees that the number of bibcodes has increased and that the
# last modified date has changed, but the created date has not
url = url_for('userview')
with MockEmailService(user_dave, end_type='uid'):
response = self.client.get(
url,
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200)
self.assertTrue(len(response.json['libraries']) == 1)
self.assertTrue(
response.json['libraries'][0]['num_documents']
== (number_of_documents+number_of_documents_second)
)
# This is to artificial alter the update time
time.sleep(1)
# Dave makes the library public.
url = url_for('documentview', library=library_id_dave)
response = self.client.put(
url,
data=library.document_view_put_data_json(public=True),
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200)
# Dave sees that the lock sign from his library page has dissapeared
url = url_for('userview')
with MockEmailService(user_dave, end_type='uid'):
response = self.client.get(
url,
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200)
libraries = response.json['libraries']
self.assertTrue(len(libraries) == 1)
self.assertTrue(
libraries[0]['num_documents'] == number_of_documents+1
)
self.assertTrue(libraries[0]['public'])
date_created_2 = datetime.strptime(libraries[0]['date_created'],
'%Y-%m-%dT%H:%M:%S.%f')
date_last_modified_2 = \
datetime.strptime(libraries[0]['date_last_modified'],
'%Y-%m-%dT%H:%M:%S.%f')
self.assertEqual(date_created, date_created_2)
self.assertNotAlmostEqual(date_created_2,
date_last_modified_2,
delta=timedelta(seconds=1))
def test_returned_data_library_view_epic(self):
"""
Carries out the epic 'Returned Data', for the LibraryView GET end point
that should return content similar to the UserView GET end point. This
ensures the responses are as expected.
:return: no return
"""
# Stub data
user_dave = UserShop()
user_mary = UserShop()
stub_library = LibraryShop()
# Librarian Dave makes a library (no bibcodes)
url = url_for('userview')
response = self.client.post(
url,
data=stub_library.user_view_post_data_json,
headers=user_dave.headers
)
self.assertEqual(response.status_code, 200, response)
library_id_dave = response.json['id']
# Dave looks at the library from the user view page and checks some
# of the pa |
sapo/python-kyototycoon | tests/config.py | Python | bsd-3-clause | 271 | 0 | #!/usr/bin/env python
#
# Copyright 2011, Toru Maesaka
#
# Redistr | ibution and use of this source code is licensed under
# the BSD license. See COPYING file for license description.
import os
import sys
sys.path.insert(0, os.path.join(os.path.split(__file__)[0], | '..'))
|
rinsewester/SchemaViz | edge.py | Python | mit | 15,908 | 0.006726 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Widget to display simulation data of a CSDF graph.
author: Sander Giesselink
"""
from PyQt5.QtWidgets import QGraphicsItem, QInputDialog, QMessageBox
from PyQt5.QtCore import Qt, QRectF, QPointF
from PyQt5.QtGui import QColor, QPen, QBrush, QPainterPath, QFont
import schemastyle
class Edge(QGraphicsItem):
def __init__(self, beginPoint, endPoint, beginSide, endSide, edgeSelfLoops, pRates, cRates):
super().__init__()
self.edgeSelfLoops = edgeSelfLoops
self.penWidth = 4
self.beginSide = beginSide
self.endSide = endSide
self.beginPoint = beginPoint
self.endPoint = endPoint
self.calculateCurvePoints(beginPoint, endPoint)
self.cRates = cRates
self.pRates = pRates
self.updatePCRects()
self.calculateEdgeColors(schemastyle.LINK_BLUE_COLOR)
#self.setFlags(QGraphicsItem.ItemIsSelectable | QGraphicsItem.ItemIsMovable)
self.setAcceptHoverEvents(True)
self.hover = False
self.debugOn = False
def boundingRect(self):
#Used for collision detection and repaint
path = self.getEdgePath()
path.setFillRule(Qt.WindingFill)
path.addRect(self.cRect)
path.addRect(self.pRect)
path.addPath(self.getLargerEdgePath())
return path.boundingRect()
def shape(self):
#Determines the collision area
path = self.getEdgePath()
path.setFillRule(Qt.WindingFill)
path.addRect(self.cRect)
path.addRect(self.pRect)
path.addPath(self.getLargerEdgePath())
return path
def paint(self, painter, option, widget):
lod = option.levelOfDetailFromTransform(painter.worldTransform())
if lod > 0.05:
self.paintEdge(painter, lod)
if lod > 0.15 and self.debugOn:
self.debug(painter) #Uncomment to turn on debug mode
def paintEdge(self, painter, lod):
pen = QPen(Qt.black)
pen.setWidth(1)
pen.setCapStyle(Qt.RoundCap)
brush = QBrush(self.edgeColor)
if self.hover:
#pen.setColor(self.edgeColorHover)
brush.setColor(self.edgeColorHover)
if QGraphicsItem.isSelected(self):
#pen.setColor(self.edgeColorSelected)
brush.setColor(self.edgeColorSelected)
painter.setPen(pen)
painter.setBrush(brush)
edgePath = self.getEdgePath()
edgePath = edgePath.simplified()
painter.drawPath(edgePath)
def getEdgePath(self):
yTranslation = 2
#Curve 1
beginPoint = QPointF(self.beginPoint.x(), self.beginPoint.y() + yTranslation)
curvePoint1 = QPointF(self.curvePoint1.x(), self.curvePoint1.y() + yTranslation)
curvePoint2 = QPointF(self.curvePoint2.x(), self.curvePoint2.y() + yTranslation)
endPoint = QPointF(self.endPoint.x(), self.endPoint.y() + yTranslation)
path = QPainterPath(beginPoint)
point1 = QPointF(curvePoint1.x(), curvePoint1.y())
point2 = QPointF(curvePoint2.x(), curvePoint2.y())
path.cubicTo(point1, point2, endPoint)
#Arrow
arrowBeginPoint = QPointF(self.endPoint.x(), self.endPoint.y() + 4)
path.lineTo(arrowBeginPoint)
if self.endSide == 'right':
path.lineTo(QPointF(self.endPoint.x() - 10, self.endPoint.y()))
else:
path.lineTo(QPointF(self.endPoint.x() + 10, self.endPoint.y()))
path.lineTo(QPointF(self.endPoint.x(), self.endPoint.y() - 4))
path.lineTo(QPointF(self.endPoint.x(), self.endPoint.y() - 2))
#Curve 2 (back)
endPoint = QPointF(self.beginPoint.x(), self.beginPoint.y() - yTranslation)
curvePoint2 = QPointF(self.curvePoint1.x(), self.curvePoint1.y() - yTranslation)
curvePoint1 = QPointF(self.curvePoint2.x(), self.curvePoint2.y() - yTranslation)
beginPoint = QPointF(self.endPoint.x(), self.endPoint.y() - yTranslation)
point1 = QPointF(curvePoint1.x(), curvePoint1.y())
point2 = QPointF(curvePoint2.x(), curvePoint2.y())
path.cubicTo(point1, point2, endPoint)
if self.beginSide == 'right':
path.lineTo(QPointF(self.beginPoint.x() - 10, self.beginPoint.y() - 2))
path.lineTo(QPointF(self.beginPoint.x() - 10, self.beginPoint.y() + 2))
else:
path.lineTo(QPointF(self.beginPoint.x() + 10, self.beginPoint.y() - 2))
path.lineTo(QPointF(self.beginPoint.x() + 10, self.beginPoint.y() + 2))
path.lineTo(QPointF(self.beginPoint.x(), self.beginPoint.y() + 2))
return path
def getLargerEdgePath(self):
#Used to fill in the small areas on the edge
#This makes it easier to select the edge
yTranslation = 2
#Curve 1
beginPoint = QPointF(self.beginPoint.x(), self.beginPoint.y() + yTranslation)
curvePoint1 = QPointF(self.curvePoint1.x()+4, se | lf.curvePoint1.y() + yTranslation)
curvePoint2 | = QPointF(self.curvePoint2.x()+4, self.curvePoint2.y() + yTranslation)
endPoint = QPointF(self.endPoint.x(), self.endPoint.y() + yTranslation)
path = QPainterPath(beginPoint)
point1 = QPointF(curvePoint1.x(), curvePoint1.y())
point2 = QPointF(curvePoint2.x(), curvePoint2.y())
path.cubicTo(point1, point2, endPoint)
#Arrow
arrowBeginPoint = QPointF(self.endPoint.x(), self.endPoint.y() + 4)
path.lineTo(arrowBeginPoint)
if self.endSide == 'right':
path.lineTo(QPointF(self.endPoint.x() - 10, self.endPoint.y()))
else:
path.lineTo(QPointF(self.endPoint.x() + 10, self.endPoint.y()))
path.lineTo(QPointF(self.endPoint.x(), self.endPoint.y() - 4))
path.lineTo(QPointF(self.endPoint.x(), self.endPoint.y() - 2))
#Curve 2 (back)
endPoint = QPointF(self.beginPoint.x(), self.beginPoint.y() - yTranslation)
curvePoint2 = QPointF(self.curvePoint1.x(), self.curvePoint1.y() - yTranslation)
curvePoint1 = QPointF(self.curvePoint2.x(), self.curvePoint2.y() - yTranslation)
beginPoint = QPointF(self.endPoint.x(), self.endPoint.y() - yTranslation)
point1 = QPointF(curvePoint1.x(), curvePoint1.y())
point2 = QPointF(curvePoint2.x(), curvePoint2.y())
path.cubicTo(point1, point2, endPoint)
if self.beginSide == 'right':
path.lineTo(QPointF(self.beginPoint.x() - 10, self.beginPoint.y() - 2))
path.lineTo(QPointF(self.beginPoint.x() - 10, self.beginPoint.y() + 2))
else:
path.lineTo(QPointF(self.beginPoint.x() + 10, self.beginPoint.y() - 2))
path.lineTo(QPointF(self.beginPoint.x() + 10, self.beginPoint.y() + 2))
path.lineTo(QPointF(self.beginPoint.x(), self.beginPoint.y() + 2))
return path
def debug(self, painter):
#Paint path
painter.setBrush(QBrush(QColor(0, 0, 0, 25)))
pen = QPen(QColor(255, 0, 0, 100))
pen.setWidth(1)
painter.setPen(pen)
#Curve area
path = QPainterPath()
path.addPath(self.shape())
painter.drawPath(path)
#Curve controll points
painter.drawEllipse(self.curvePoint1, 2, 2)
painter.drawEllipse(self.curvePoint2, 2, 2)
#Draw area
painter.setPen(QPen(QColor(0, 255, 0, 100)))
painter.setBrush(QBrush(QColor(0, 0, 0, 15)))
path2 = QPainterPath()
rect = self.boundingRect()
path2.addRect(rect)
painter.drawPath(path2)
#Middel point
painter.setPen(QPen(QColor(0, 0, 255, 100)))
painter.drawEllipse(self.midPoint, 2, 2)
def calculateCurvePoints(self, beginPoint, endPoint):
x = (beginPoint.x() + endPoint.x()) / 2
y = (beginPoint.y() + endPoint.y()) / 2
#Calculate the point in the midd |
EnEff-BIM/EnEffBIM-Framework | SimModel_Python_API/simmodel_swig/Release/SimModel_PyCallBack.py | Python | mit | 9,889 | 0.005157 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimModel_PyCallBack', [dirname(__file__)])
except ImportError:
import _SimModel_PyCallBack
return _SimModel_PyCallBack
if fp is not None:
try:
_mod = imp.load_module('_SimModel_PyCallBack', fp, pathname, description)
finally:
fp.close()
return _mod
_SimModel_PyCallBack = swig_import_helper()
del swig_import_helper
else:
import _SimModel_PyCallBack
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _SimModel_PyCallBack.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _SimModel_PyCallBack.SwigPyIterator_value(self)
def incr(self, n=1):
return _SimModel_PyCallBack.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _SimModel_PyCallBack.SwigPyIterator_decr(self, n)
def distance(self, x):
return _SimModel_PyCallBack.SwigPyIterator_distance(self, x)
def equal(self, x):
return _SimModel_PyCallBack.SwigPyIterator_equal(self, x)
def copy(self):
return _SimModel_PyCallBack.SwigPyIterator_copy(self)
def next(self):
return _SimModel_PyCallBack.SwigPyIterator_next(self)
def __next__(self):
return _SimModel_PyCallBack.SwigPyIterator___next__(self)
def previous(self):
return _SimModel_PyCallBack.SwigPyIterator_previous(self)
def advance(self, n):
return _SimModel_PyCallBack.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _SimModel_PyCallBack.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _SimModel_PyCallBack.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _SimModel_PyCallBack.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _SimModel_PyCallBack.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _SimModel_PyCallBack.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _SimModel_PyCallBack.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _SimModel_PyCallBack.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
import SimModel
import base
class SimPyCallBack(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SimPyCallBack, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SimPyCallBack, name)
__repr__ = _swig_repr
def __init__(self):
if self.__class__ == SimPyCallBack:
_self = None
else:
_self = self
this = _SimModel_PyCallBack.new_SimPyCallBack(_self, )
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _SimModel_PyCallBack.delete_SimPyCallBack
__del__ = lambda self: None
def loadSimClassObj(self, _dataName, _simClassList):
return _SimModel_PyCallBack.SimPyCallBack_loadSimClassObj(self, _dataName, _simClassList)
def loadSimGeomClassObj(self, _geomDataName, _simGeomClassList):
return _SimModel_PyCallBack.SimPyCallBack_loadSimGeomClassObj(self, _geomDataName, _simGeomClassList)
def loadSimSysClassObj(self, _sysDataName, _simSysClassList):
return _SimModel_PyCallBack.SimPyCallBack_loadSimSysClassObj(self, _sysDataName, _simSysClassList)
def getSimClassObj(self, _id):
return _SimModel_PyCallBack.SimPyCallBack_getSimClassObj(self, _id)
def getReferenceId(self, _classId, _propertyName):
return _SimModel_PyCallBack.SimPyCallBack_getReferenceId(self, _classId, _propertyName)
def getIO(self, _name):
return _SimModel_PyCallBack.SimPyCallBack_getIO(self, _name)
def getRefValueType(self, _classId, _propertyName):
return _SimModel_PyCallBack.SimPyCallBack_getRefValueType(self, _classId, _propertyName)
def getRefNumberValue(self, _classId, _propertyName):
return _SimModel_PyCallBack.SimPyCallBack_getRefNumberValue(self, _classId, _propertyName)
def getRefStringValue(self, _classId, _propertyName):
return _SimModel_PyCallBack.SimPyCallBack_getRefStringValue(self, _classId, _propertyName)
def __disown__(self):
self.this.disown()
_SimModel_PyCallBack.disown_SimPyCallBack(self)
return weakref_proxy(self)
SimPyCallBack_swigregister = _SimModel_PyCallBack.SimPyCallBack_swigregister
SimPyCallBack_swigregister(SimPyCallBack)
class CallBack(SimPyCallBack):
def __init__(self):
super(CallBack, self).__init__()
self._sim = None
self._simGeom = None
self._simSys = None
self._dict = {}
def loadSimClassObj(self, _dataName, _simClassList):
if self._sim == None:
self._sim = SimModel.SimModel_(_dataName)
for id in range(0, _simClassList.size()):
_objList = getattr(self._sim, _simClassList[id])()
for su | bId in range(0, _objList.sizeInt()):
self._dict[_objList.at(subId).RefId()] = _objList.at(subId)
def loadSimGeomClassObj(self, _geomDataName, _simGeomClassList):
if self._simGeom == None:
self._simGeom = SimModel.SimModel_(_geomDataName)
for id in range(0, _simGeomClassList.size()):
_objLi | st = getattr(self._simGeom, _simGeomClassList[id])()
for subId in range(0, _objList.sizeInt()):
self._dict[_objList.at(subId).RefId()] = _objList.at(subId)
def loadSimSysClassObj(self, _sysDataName, _simSysClassList):
if self._ |
SavenR/Bingether | app/admin.py | Python | artistic-2.0 | 250 | 0 | fr | om django.contrib import admi | n
from .models import personalBinge
from .models import Bingether
from .models import Comment
# Register your models here.
admin.site.register(personalBinge)
admin.site.register(Bingether)
admin.site.register(Comment)
|
dbrattli/RxPY | rx/linq/observable/ignoreelements.py | Python | apache-2.0 | 587 | 0 | from rx import Observable, AnonymousObservable
from rx.internal import noop
from rx.internal import extensionmethod
@extensionmethod(Observable)
def ignore_elements(self):
"""Ignores all elements in an observable sequence leavin | g only the
termination messages.
Returns an empty observable {Observable} sequence that signals
termination, successful or exceptional, of the source sequence.
"""
source = self
def subscribe(observer):
return source.subscribe(noop, observer.on_error, observer.on_completed)
| return AnonymousObservable(subscribe)
|
streamlink/streamlink | tests/plugins/test_sportschau.py | Python | bsd-2-clause | 411 | 0.004866 | from streamlink.plugins.sportschau import Sportschau
from tests.plugins import PluginCanHandleUrl
c | lass TestPluginCanHandleUrlSportschau(PluginCanHandleUrl):
__plugin__ = Sportschau
should_match = [
'http://www.sportschau.de/wintersport/videostream-livestream---wintersport-im-ersten-242.html',
| 'https://www.sportschau.de/weitere/allgemein/video-kite-surf-world-tour-100.html',
]
|
d3banjan/polyamide | webdev/lib/python2.7/site-packages/pagedown/widgets.py | Python | bsd-2-clause | 2,219 | 0.002253 | from django import forms
from django.contrib.admin import widgets as admin_widgets
from django.forms.widgets import flatatt
from django.utils.html import conditional_escape
from django.template.loader import render_to_string
from pagedown import settings as pagedown_settings
from pagedown.utils import compatible_staticpath
try:
from django.utils.encoding import force_unicode
except ImportError: #python3
# https://docs.djangoproject.com/en/1.5/topics/python3/#string-handling
from django.utils.encoding import force_text as force_unicode
from django.utils.safestring import mark_safe
class PagedownWidget(forms.Textarea):
def __init__(self, *args, **kwargs):
self.show_preview = kwargs.pop('show_preview', pagedown_settings.SHOW_PREVIEW)
self.template = kwargs.pop('template', pagedown_settings.WIDGET_TEMPLATE)
self.css = kwargs.pop('css', pagedown_settings.WIDGET_CSS)
super(PagedownWidget, self).__init__(*args, **kwargs)
def _media(self):
return forms.Media(
css={
'all': self.css
},
js=(
compatible_staticpath('pagedown/Markdown.Converter.js'),
compatible_staticpath('pagedown/Markdown.Sanitizer.js'),
compatible_staticpath('pagedown/Markdown.Editor. | js'),
compatible_staticpath('pagedown_init.js'),
))
media = property(_media)
def render(self, name, value, attrs=None):
if value is None:
value = ''
if 'class' not in attrs:
attrs['class'] = ""
attrs['class'] += " wmd-input"
final_attrs = self.build_attrs(attrs, name=name)
return render_to_string(self.template, {
'attrs': flat | att(final_attrs),
'body': conditional_escape(force_unicode(value)),
'id': final_attrs['id'],
'show_preview': self.show_preview,
})
class AdminPagedownWidget(PagedownWidget, admin_widgets.AdminTextareaWidget):
class Media:
css = {
'all': (compatible_staticpath('admin/css/pagedown.css'),)
}
js = (
compatible_staticpath('admin/js/pagedown.js'),
)
|
rouxcode/django-cms-plugins | cmsplugins/sliders/migrations/0008_auto_20190823_1324.py | Python | mit | 977 | 0.003071 | # Generated by Django 2.1.11 o | n 2019-08-23 11:24
from django.db import migrations, models
import django.db.models.deletio | n
class Migration(migrations.Migration):
dependencies = [
('sliders', '0007_slide_name_sub'),
]
operations = [
migrations.AlterField(
model_name='link',
name='plugin',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='plugin_link', to='cms.CMSPlugin'),
),
migrations.AlterField(
model_name='slide',
name='cms_page',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cms.Page'),
),
migrations.AlterField(
model_name='slider',
name='cms_page',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cms.Page'),
),
]
|
vpv11110000/pyss | other/binominal.py | Python | mit | 4,240 | 0.018462 | ##!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Две модели биноминального распределения (N=2 и N=40)
Запуск:
python ./binominal.py
Пример визуализации возникновения событий в соответствии с биноминальным распределением.
Биномиальное распределение представляется как сумма исходов событий, которые следуют распределению Бернулли. Его параметры – n (в модели константа N), число испытаний, и p – вероятность «успеха».
Здесь p равно 0.5 (монета).
Каждую единицу времени моделируется n-бросков симметричной монеты. Количество решек (значение - 1) является предметом построения графиков.
Формируется 5 одинаковых моделей с 5 таблицами, собирающими факты возникновения событий.
После моделирования выполняется построение графиков возникновения событий.
"""
# pylint: disable=line-too-long,missing-docstring,bad-whitespace
import sys
import os
import random
import math
import os
DIRNAME_MODULE = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))))) + os.sep
sys.path.append(DIRNAME_MODULE)
sys.path.append(DIRNAME_MODULE + "pyss" + os.sep)
from pyss import pyssobject
from pyss.pyss_model import PyssModel
from pyss.segment import Segment
from pyss import generate
from pyss.generate import Generate
from pyss.terminate import Terminate
from pyss import logger
from pyss.table import Table
from pyss.assemble import Assemble
from pyss.qtable import Qtable
from pyss.handle import Handle
from pyss.enter import Enter
from pyss.leave import Leave
from pyss.storage import Storage
from pyss.advance import Advance
from pyss.assign import Assign
from pyss.preempt import Preempt
from pyss.g_return import GReturn
from pyss.facility import Facility
from pyss.seize import Seize
from pyss.release import Release
from pyss.transfer import Transfer
from pyss.tabulate import Tabulate
from pyss.test import Test
from pyss.queue import Queue
from pyss.depart import Depart
from pyss.split import Split
from pyss.test import Test
from pyss.bprint import Bprint
from pyss.gate import Gate
from pyss.pyss_const import *
from pyss.func_discrete import FuncDiscrete
from pyss.func_exponential import Exponential
from pyss.func_normal import Normal
from pyss.plot_func import PlotFunc
from pyss.simpleobject import SimpleObject
def main(N=2):
logger.info("--- Биноминальное распределение (монета) ---")
random.seed()
CAPTION="Binominal "+str(N)
def valFunc_T_1(owner, transact):
l=[random.choice([0,1]) for x in range(N)]
print str(l)
return sum(l)
#
### MODEL ----------------------------------
m = PyssModel()
sgm = Segment(m)
#
m[OPTIONS].setAllFalse()
m[OPTIONS].printResult = True
#
MAX_TIME=20
# tables
F_1="F_1"
def argFunc_T_1(owner, transact):
return transact[TIME_CREATED]
tables = Table(m,
tableName="T_1",
argFunc=argFunc_T_1,
limitUpFirst=1,
widthInt=1,
countInt=MAX_TIME).setDisplaying(displaying=False)
#
def mf(owner, currentTime):
#бросок монеты
return 1
| #генерится см. mf()
Generate(sgm, med_value=0, modificatorFunc=mf,first_tx=0, max_amount=1000)
Tabulate(sgm, table=m.getTables()[0],valFunc=valFunc_T_1 | )
Terminate(sgm, deltaTerminate=0)
#
m.initPlotTable(title=CAPTION)
#
m.start(terminationCount=MAX_TIME, maxTime=MAX_TIME)
#
m.plotByModulesAndSave(CAPTION)
m.plotByModulesAndShow()
if __name__ == '__main__':
main(N=2)
main(N=40)
|
kgao/MediaDrop | mediadrop/plugin/tests/events_test.py | Python | gpl-3.0 | 2,599 | 0.005387 | # This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2014 MediaDrop contributors
# For the exact contribution history, see the git revision log.
# The source code contained in this file is licensed under the GPLv3 or
# (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
from mediadrop.lib.test.pythonic_testcase import *
from mediadrop.plugin.events import Event, FetchFirstResultEvent, GeneratorEvent
class EventTest(PythonicTestCase):
def setUp(self):
self.observers_called = 0
self.event = Event()
def probe(self):
self.observers_called += 1
def test_can_notify_all_observers(self):
self.event.post_observers.append(self.probe)
s | elf.event.pre_observers.append(self.probe)
assert_equals(0, self.observers_called)
self.event()
assert_equals(2, self.observers_called)
class FetchFirstResultEventTest(PythonicTestCase):
def test_returns_first_non_null_result(self):
event = FetchFirstResultEvent([])
event.post_observers.append(lambda: None)
event.post_observers.append(lambda: 1)
event.post_obse | rvers.append(lambda: 2)
assert_equals(1, event())
def test_passes_all_event_parameters_to_observers(self):
event = FetchFirstResultEvent([])
event.post_observers.append(lambda foo, bar=None: foo)
event.post_observers.append(lambda foo, bar=None: bar or foo)
assert_equals(4, event(4))
assert_equals(6, event(None, bar=6))
class GeneratorEventTest(PythonicTestCase):
def test_can_unroll_lists(self):
event = GeneratorEvent([])
event.post_observers.append(lambda: [1, 2, 3])
event.post_observers.append(lambda: ('a', 'b'))
assert_equals([1, 2, 3, 'a', 'b'], list(event()))
def test_can_return_non_iterable_items(self):
event = GeneratorEvent([])
event.post_observers.append(lambda: [1, ])
event.post_observers.append(lambda: None)
event.post_observers.append(lambda: 5)
event.post_observers.append(lambda: 'some value')
assert_equals([1, None, 5, 'some value'], list(event()))
import unittest
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(EventTest))
suite.addTest(unittest.makeSuite(FetchFirstResultEventTest))
suite.addTest(unittest.makeSuite(GeneratorEventTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
alexkasko/krakatau-java | krakatau-lib/src/main/resources/Lib/Krakatau/verifier/verifier_types.py | Python | gpl-3.0 | 4,399 | 0.01273 | import collections
#Define types for Inference
nt = collections.namedtuple
fullinfo_t = nt('fullinfo_t', ['tag','dim','extra'])
#Differences from Hotspot with our tags:
#BOGUS changed to None. Array omitted as it is unused. Void omitted as unecessary. Boolean added
valid_tags = ['.' | +_x for _x in 'int float double double2 long long2 obj new init address byte short char boolean'.split()]
valid_tags = frozenset([None] + valid_tags)
def _makeinfo(tag, dim=0, extra=None):
assert(tag in valid_tags)
return fullinfo_t(tag, dim, extra)
T_INVALID = _makeinfo(None)
T_INT = _makeinfo('.int')
T_FLOAT = _makeinfo('.float')
T_DOUBLE = _ma | keinfo('.double')
T_DOUBLE2 = _makeinfo('.double2') #Hotspot only uses these in locals, but we use them on the stack too to simplify things
T_LONG = _makeinfo('.long')
T_LONG2 = _makeinfo('.long2')
T_NULL = _makeinfo('.obj')
T_UNINIT_THIS = _makeinfo('.init')
T_BYTE = _makeinfo('.byte')
T_SHORT = _makeinfo('.short')
T_CHAR = _makeinfo('.char')
T_BOOL = _makeinfo('.boolean') #Hotspot doesn't have a bool type, but we can use this elsewhere
cat2tops = {T_LONG:T_LONG2, T_DOUBLE:T_DOUBLE2}
#types with arguments
def T_ADDRESS(entry):
return _makeinfo('.address', extra=entry)
def T_OBJECT(name):
return _makeinfo('.obj', extra=name)
def T_ARRAY(baset, newDimensions=1):
assert(0 <= baset.dim <= 255-newDimensions)
return _makeinfo(baset.tag, baset.dim+newDimensions, baset.extra)
def T_UNINIT_OBJECT(origin):
return _makeinfo('.new', extra=origin)
OBJECT_INFO = T_OBJECT('java/lang/Object')
CLONE_INFO = T_OBJECT('java/lang/Cloneable')
SERIAL_INFO = T_OBJECT('java/io/Serializable')
THROWABLE_INFO = T_OBJECT('java/lang/Throwable')
def objOrArray(fi): #False on uninitialized
return fi.tag == '.obj' or fi.dim > 0
def unSynthesizeType(t):
if t in (T_BOOL, T_BYTE, T_CHAR, T_SHORT):
return T_INT
return t
def decrementDim(fi):
if fi == T_NULL:
return T_NULL
assert(fi.dim)
tag = unSynthesizeType(fi).tag if fi.dim <= 1 else fi.tag
return _makeinfo(tag, fi.dim-1, fi.extra)
def withNoDimension(fi):
return _makeinfo(fi.tag, 0, fi.extra)
def _decToObjArray(fi):
return fi if fi.tag == '.obj' else T_ARRAY(OBJECT_INFO, fi.dim-1)
def _arrbase(fi):
return _makeinfo(fi.tag, 0, fi.extra)
def mergeTypes(env, t1, t2, forAssignment=False):
#Note: This function is intended to have the same results as the equivalent function in Hotspot's old inference verifier
if t1 == t2:
return t1
#non objects must match exactly
if not objOrArray(t1) or not objOrArray(t2):
return T_INVALID
if t1 == T_NULL:
return t2
elif t2 == T_NULL:
return t1
if t1 == OBJECT_INFO or t2 == OBJECT_INFO:
if forAssignment and t2.dim == 0 and 'INTERFACE' in env.getFlags(t2.extra):
return t2 #Hack for interface assignment
return OBJECT_INFO
if t1.dim or t2.dim:
for x in (t1,t2):
if x in (CLONE_INFO,SERIAL_INFO):
return x
t1 = _decToObjArray(t1)
t2 = _decToObjArray(t2)
if t1.dim > t2.dim:
t1, t2 = t2, t1
if t1.dim == t2.dim:
res = mergeTypes(env, _arrbase(t1), _arrbase(t2), forAssignment)
return res if res == T_INVALID else _makeinfo('.obj', t1.dim, res.extra)
else: #t1.dim < t2.dim
return t1 if _arrbase(t1) in (CLONE_INFO,SERIAL_INFO) else T_ARRAY(OBJECT_INFO, t1.dim)
else: #neither is array
if 'INTERFACE' in env.getFlags(t2.extra):
return t2 if forAssignment else OBJECT_INFO
hierarchy1 = env.getSupers(t1.extra)
hierarchy2 = env.getSupers(t2.extra)
matches = [x for x,y in zip(hierarchy1, hierarchy2) if x==y]
assert(matches[0] == 'java/lang/Object') #internal assertion
return T_OBJECT(matches[-1])
def isAssignable(env, t1, t2):
return mergeTypes(env, t1, t2, True) == t2
#Make verifier types printable for easy debugging
def vt_toStr(self):
if self == T_INVALID:
return '.none'
elif self == T_NULL:
return '.null'
if self.tag == '.obj':
base = self.extra
elif self.extra is not None:
base = '{}<{}>'.format(self.tag, self.extra)
else:
base = self.tag
return base + '[]'*self.dim
fullinfo_t.__str__ = fullinfo_t.__repr__ = vt_toStr |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.