repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
keenerd/namcap | Namcap/rules/fileownership.py | Python | gpl-2.0 | 1,345 | 0.015613 | #
# namcap rules - fileownership
# Copyright (C) 2003-2009 Jason Chu <jason@archlinux.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is di | stributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Ge | neral Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from Namcap.ruleclass import *
class package(TarballRule):
name = "fileownership"
description = "Checks file ownership."
def analyze(self, pkginfo, tar):
for i in tar.getmembers():
if i.uname != 'root' or i.gname != 'root':
uname = ""
gname = ""
if i.uname == "":
uname = str(i.uid)
else:
uname = i.uname
if i.gname == "":
gname = str(i.gid)
else:
gname = i.gname
self.errors.append(("incorrect-owner %s (%s:%s)", (i.name, uname, gname)))
# vim: set ts=4 sw=4 noet:
|
ai-se/x-effort | Technix/sdivUtil.py | Python | mit | 2,716 | 0.039764 | from __future__ import division,print_function
import sys
sys.dont_write_bytecode = True
def sdiv(lst, tiny=2,cohen=0.3,
num1=lambda x:x[0], num2=lambda x:x[1]):
"Divide lst of (num1,num2) using variance of num2."
#----------------------------------------------
class Counts(): # Add/delete counts of numbers.
def __init__(i,inits=[]):
i.zero()
for number in inits: i + number
def zero(i): i.n = i.mu = i.m2 = 0.0
def sd(i) :
if i.n < 2: return i.mu
else:
return (max(0,i.m2)*1.0/(i.n - 1))**0.5
def __add__(i,x):
i.n += 1
delta = x - i.mu
i.mu += delta/(1.0*i.n)
| i.m2 += delta*(x - i.mu)
def __sub__(i,x):
if i.n < 2: return i.zero()
i.n -= 1
| delta = x - i.mu
i.mu -= delta/(1.0*i.n)
i.m2 -= delta*(x - i.mu)
#----------------------------------------------
def divide(this,small): #Find best divide of 'this'
lhs,rhs = Counts(), Counts(num2(x) for x in this)
n0, least, cut = 1.0*rhs.n, rhs.sd(), None
for j,x in enumerate(this):
if lhs.n > tiny and rhs.n > tiny:
maybe= lhs.n/n0*lhs.sd()+ rhs.n/n0*rhs.sd()
if maybe < least :
if abs(lhs.mu - rhs.mu) >= small:
cut,least = j,maybe
rhs - num2(x)
lhs + num2(x)
return cut,least
#----------------------------------------------
def recurse(this, small,cuts):
cut,sd = divide(this,small)
if cut:
recurse(this[:cut], small, cuts)
recurse(this[cut:], small, cuts)
else:
cuts += [(sd * len(this)/len(lst),this)]
#cuts += [(sd * len(this)/len(lst),[num2(row) for row in this])]
return cuts
#---| main |-----------------------------------
small = Counts(num2(x) for x in lst).sd()*cohen
if lst:
return recurse(sorted(lst,key=num1),small,[])
def cells(dataset, rows=None):
if rows == None:
rows = dataset._rows
rowCells = []
for row in rows:
rowCells += [row.cells]
return rowCells
def fss(d):
rank = []
maxVal, minVal = 0, sys.maxint
for i in range(len(d.indep)):
xs = sdiv(cells(d),
num1 = lambda x:x[i],
num2 = lambda x:x[len(d.indep)])
xpect = sum(map(lambda x: x[0],xs))
if xpect < minVal:
minVal = xpect
elif xpect > maxVal:
maxVal = xpect
rank += [(xpect,i)]
return normalize_weights(rank, maxVal, minVal)
def normalize_weights(rank, maxVal, minVal):
# sort based on columns
sortedRank = sorted(rank, key=lambda x: x[1])
weights = []
for value, dimension in sortedRank:
# TODO Raise to power 2 and try
normal_Wt= ((maxVal - value) / (maxVal - minVal))
weights.append(normal_Wt)
return weights;
#fss() |
wonghoifung/learning-python | spider/spider_url_producer/pb/produce_url_resp_pb2.py | Python | mit | 1,934 | 0.00879 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: produce_url_resp.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
fr | om google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='produce_url_resp.proto',
package='spider',
syntax='proto2',
serialized_pb=_b('\ | n\x16produce_url_resp.proto\x12\x06spider\"\x1f\n\x10produce_url_resp\x12\x0b\n\x03res\x18\x01 \x02(\x05')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PRODUCE_URL_RESP = _descriptor.Descriptor(
name='produce_url_resp',
full_name='spider.produce_url_resp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='res', full_name='spider.produce_url_resp.res', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=34,
serialized_end=65,
)
DESCRIPTOR.message_types_by_name['produce_url_resp'] = _PRODUCE_URL_RESP
produce_url_resp = _reflection.GeneratedProtocolMessageType('produce_url_resp', (_message.Message,), dict(
DESCRIPTOR = _PRODUCE_URL_RESP,
__module__ = 'produce_url_resp_pb2'
# @@protoc_insertion_point(class_scope:spider.produce_url_resp)
))
_sym_db.RegisterMessage(produce_url_resp)
# @@protoc_insertion_point(module_scope)
|
ierror/django-js-reverse | django_js_reverse/templatetags/js_reverse.py | Python | mit | 838 | 0 | # -*- coding: utf-8 -*-
from django import template
from django.utils.safestring import mark_safe
from django_js_reverse.core import generate_js
try:
from django.urls import get_resolver
except ImportError:
from django.core.urlresolvers import get_resolver
register = template.Library()
urlconf = template.Variable('request.urlconf')
def _get_urlconf(context):
try:
return context.request.urlconf
except AttributeError:
pass
try:
return urlco | nf. | resolve(context)
except template.VariableDoesNotExist:
pass
@register.simple_tag(takes_context=True)
def js_reverse_inline(context):
"""
Outputs a string of javascript that can generate URLs via the use
of the names given to those URLs.
"""
return mark_safe(generate_js(get_resolver(_get_urlconf(context))))
|
JustinWingChungHui/electionleaflets | electionleaflets/apps/elections/admin.py | Python | mit | 312 | 0.032051 | from django.contrib import admin
from elections.models impor | t Election
class ElectionOptions(admin.ModelAdmin):
list_display = ['name', 'country', 'active']
search_fields = ['name']
ordering = ['live_da | te']
admin.site.register( Election, ElectionOptions )
|
anewmark/galaxy_dark_matter | callmeanSB.py | Python | mit | 1,365 | 0.035165 | from defcuts import *
from defflags import *
from defclump import *
import astropy.table as table
indir='/Users/amandanewmark/repositories/galaxy_dark_matter/GAH/'
outdir='/Users/amandanewmark/repositories/galaxy_dark_matter/lumprofplots/clumps/'
datatab = table.Table.read(indir+ 'LOWZ_HSCGAMA15_apmgs.fits')
band='i'
parm=['flags_pixel_saturated_center','flags_pixel_edge','flags_pixel_interpolated_center','flags_pixel_cr_center','flags_pixel_suspect_center', 'flags_pixel_clipped_any','flags_pixel_bad']
Flags=['flags_pixel_bright_object_center', 'No Flags', 'No Bright Ojbect Centers', 'Only Bright Object Centers', 'brobj_cen_flag']
#Flags=['flags_pixel_bright_object_any', 'No Flags', 'No Bright Ojbects', 'Only Bright Objects', 'brobj_any_flag']
#Flags=['blendedne | ss_flags', 'No Flags', 'Not Blended', 'Blended', 'blend']
daperture=[1.01,1.51,2.02,3.02,4.03,5.71,8.40,11.8,16.8,23.5]
aperture=[x*0.5 for x in daperture]
mincut= 17.5
maxcut=18.5
minz=0.25
maxz=0.35
colname='mag_aperture0'
cutcolname='mag_aperture05'
#cutcolname='mag_cmodel'
datazcut, rangez=z_cut(datatab, minz, maxz)
cutdata, crange=out_cut(datazcut, band, cutcolname,mincut, maxcut)
newdata=many_flags(c | utdata, parm, band) #this gets rid of multiple flags
Flag, Not,lab= TFflag(band,Flags, newdata)
TF_meanSB(Flag, Not, aperture, band, colname, lab,rangez, outdir)
|
rdio/sentry | src/sentry/models/event.py | Python | bsd-3-clause | 5,941 | 0.000673 | """
sentry.models.event
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import logging
from django.db import models
from django.utils import timezone
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from sentry.constants import LOG_LEVELS, MAX_CULPRIT_LENGTH
from sentry.db.models import (
Model, NodeField, BoundedIntegerField, BoundedPositiveIntegerField,
BaseManager, sane_repr
)
from sentry.utils.cache import memoize
from sentry.utils.imports import import_string
from sentry.utils.safe import safe_execute
from sentry.utils.strings import truncatechars, strip
class Event(Model):
"""
An individual event.
"""
group = models.ForeignKey('sentry.Group', blank=True, null=True, related_name="event_set")
event_id = models.CharField(max_length=32, null=True, db_column="message_id")
project = models.ForeignKey('sentry.Project', null=True)
logger = models.CharField(
max_length=64, blank=True, default='root', db_index=True)
level = BoundedPositiveIntegerField(
choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True,
db_index=True)
message = models.TextField()
culprit = models.CharField(
max_length=MAX_CULPRIT_LENGTH, blank=True, null=True,
db_column='view')
checksum = models.CharField(max_length=32, db_index=True)
num_comments = BoundedPositiveIntegerField(default=0, null=True)
platform = models.CharField(max_length=64, null=True)
datetime = models.DateTimeField(default=timezone.now, db_index=True)
time_spent = BoundedIntegerField(null=True)
server_name = models.CharField(max_length=128, db_index=True, null=True)
site = models.CharField(max_length=128, db_index=True, null=True)
data = NodeField(blank=True, null=True)
objects = BaseManager()
class Meta:
app_label = 'sentry'
db_table = 'sentry_message'
verbose_name = _('message')
verbose_name_plural = _('messages')
unique_together = ('project', 'event_id')
__repr__ = sane_repr('project_id', 'group_id', 'checksum')
def error(self):
message = strip(self.message)
if not message:
message = '<unlabeled message>'
else:
message = truncatechars(message.splitlines()[0], 100)
return message
error.short_description = _('error')
def has_two_part_message(self):
message = strip(self.message)
return '\n' in message or len(message) > 100
def message_top(self):
culprit = strip(self.culprit)
if culprit:
return culprit
return self.error()
@property
def team(self):
return self.project.team
@memoize
def ip_address(self):
http_data = self.data.get('sentry.interfaces.Http')
if http_data and 'env' in http_data:
value = http_data['env'].get('REMOTE_ADDR')
if value:
return value
user_data = self.data.get('sentry.interfaces.User' | )
if user_data:
va | lue = user_data.get('ip_address')
if value:
return value
return None
@memoize
def user_ident(self):
"""
The identifier from a user is considered from several interfaces.
In order:
- User.id
- User.email
- User.username
- Http.env.REMOTE_ADDR
"""
user_data = self.data.get('sentry.interfaces.User')
if user_data:
ident = user_data.get('id')
if ident:
return 'id:%s' % (ident,)
ident = user_data.get('email')
if ident:
return 'email:%s' % (ident,)
ident = user_data.get('username')
if ident:
return 'username:%s' % (ident,)
ident = self.ip_address
if ident:
return 'ip:%s' % (ident,)
return None
@memoize
def interfaces(self):
result = []
for key, data in self.data.iteritems():
if '.' not in key:
continue
try:
cls = import_string(key)
except ImportError:
continue # suppress invalid interfaces
value = safe_execute(cls, **data)
if not value:
continue
result.append((key, value))
return SortedDict((k, v) for k, v in sorted(result, key=lambda x: x[1].get_score(), reverse=True))
def get_version(self):
if not self.data:
return
if '__sentry__' not in self.data:
return
if 'version' not in self.data['__sentry__']:
return
module = self.data['__sentry__'].get('module', 'ver')
return module, self.data['__sentry__']['version']
def get_tags(self):
try:
return [
(t, v) for t, v in self.data.get('tags') or ()
if not t.startswith('sentry:')
]
except ValueError:
# at one point Sentry allowed invalid tag sets such as (foo, bar)
# vs ((tag, foo), (tag, bar))
return []
def as_dict(self):
# We use a SortedDict to keep elements ordered for a potential JSON serializer
data = SortedDict()
data['id'] = self.event_id
data['checksum'] = self.checksum
data['project'] = self.project.slug
data['logger'] = self.logger
data['level'] = self.get_level_display()
data['culprit'] = self.culprit
data['datetime'] = self.datetime
data['time_spent'] = self.time_spent
for k, v in sorted(self.data.iteritems()):
data[k] = v
return data
@property
def size(self):
return len(unicode(vars(self)))
|
Chandlercjy/OnePy | OnePy/sys_module/models/signals.py | Python | mit | 4,404 | 0 | from itertools import count
from typing import Union
from dataclasses import dataclass, field
from OnePy.constants import ActionType, OrderType
from OnePy.sys_module.components.exceptions import (OrderConflictError,
PctRangeError)
from OnePy.sys_module.metabase_env import OnePyEnvBase
@dataclass
class Signal(OnePyEnvBase):
counter = count(1)
strategy_name: str
action_type: ActionType
size: int
ticker: str
takeprofit: float = None
takeprofit_pct: float = None
stoploss: float = None
stoploss_pct: float = None
trailingstop: float = None
trailingstop_pct: float = None
price: float = None
price_pct: float = None
signal_id: int = None
datetime: str = field(init=False)
def __post_init__(self):
self.datetime = self.env.sys_date
self.next_datetime = self.env.feeds[self.ticker].next_ohlc['date']
self.signal_id = next(self.counter)
self._check_all_conflict()
self._save_signals()
def _save_signals(self):
self.env.signals_normal_cur.append(self)
if self.env.is_save_original:
self.env.signals_normal.append(self)
def _check_all_conflict(self):
self._check_size()
self._check_conflict(self.price, self.price_pct, name='price')
self._check_conflict(
self.takeprofit, self.takeprofit_pct, name='takeprofit')
self._check_conflict(self.stoploss, self.stoploss_pct, name='stoploss')
self._check_conflict(
self.trailingstop, self.trailingstop_pct, name='trailingstop')
def _check_size(self):
if self.size <= 0:
raise Exception("size should be Positive")
@staticmethod
def _check_conflict(obj: float, obj_pct: float, name: str):
if obj a | nd obj_pct: |
raise OrderConflictError("$ and pct can't be set together")
if obj_pct:
if not -1 < obj_pct < 1:
raise PctRangeError("pct should be -1 < pct < 1")
if name != 'price':
if obj:
if obj <= 0:
raise ValueError(f"{name.upper()} should be Positive")
if obj_pct:
if obj_pct <= 0:
raise ValueError(f"{name.upper()} should be Positive")
def get(self, name: str):
return getattr(self, name)
def set(self, name: str, value: float):
setattr(self, name, value)
@dataclass
class SignalForPending(Signal):
price: float = None
price_pct: float = None
def _save_signals(self):
self.env.signals_pending_cur.append(self)
if self.env.is_save_original:
self.env.signals_pending.append(self)
@dataclass
class SignalByTrigger(SignalForPending):
counter = count(1)
order_type: OrderType = None
mkt_id: int = None
trigger_key: str = None
execute_price: float = None # 用来确定是否是必成单,用于挂单
first_cur_price: float = None # 记录挂单信号产生时候的价格
parent_order: str = None # 其实不是str,是一个order对象
def _save_signals(self):
self.env.signals_trigger_cur.append(self)
if self.env.is_save_original:
self.env.signals_trigger.append(self)
@dataclass
class SignalCancelBase(OnePyEnvBase):
counter = None
action_type: ActionType
strategy_name: str
ticker: str
long_or_short: str
def __post_init__(self):
self.datetime = self.env.sys_date
self.signal_id = next(self.counter)
self._check_all_conflict()
self._save_signals()
def _save_signals(self):
self.env.signals_cancel_cur.append(self)
if self.env.is_save_original:
self.env.signals_cancel.append(self)
def _check_all_conflict(self):
raise NotImplementedError
@dataclass
class SignalCancelTST(SignalCancelBase):
counter = count(1)
takeprofit: bool
stoploss: bool
trailingstop: bool
def _check_all_conflict(self):
pass
@dataclass
class SignalCancelPending(SignalCancelBase):
counter = count(1)
below_price: float = None
above_price: float = None
def _check_all_conflict(self):
if self.below_price is not None and self.above_price is not None:
raise ValueError(f"below and above price can't be set together!")
|
hb9kns/PyBitmessage | src/helper_generic.py | Python | mit | 2,946 | 0.006789 | import os
import socket
import sys
from binascii import hexlify, unhexlify
from multiprocessing import current_process
from threading import current_thread, enumerate
import traceback
import shared
from debug import logger
import queues
import shutdown
def powQueueSize():
curWorkerQueue = queues.workerQueue.qsize()
for thread in enumerate():
try:
if thread.name == "singleWorker":
curWorkerQueue += thread.busy
except: |
pass
return curWorkerQueue
def convertIntToString(n):
a = __builtins__.hex(n)
if a[-1:] == 'L':
a = a[:-1]
if (len(a) % 2) == 0:
return unhexlify(a[2:])
else:
return unhexlify('0' + a[2: | ])
def convertStringToInt(s):
return int(hexlify(s), 16)
def allThreadTraceback(frame):
id2name = dict([(th.ident, th.name) for th in enumerate()])
code = []
for threadId, stack in sys._current_frames().items():
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""), threadId))
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
code.append(" %s" % (line.strip()))
print "\n".join(code)
def signal_handler(signal, frame):
logger.error("Got signal %i in %s/%s", signal, current_process().name, current_thread().name)
if current_process().name == "RegExParser":
# on Windows this isn't triggered, but it's fine, it has its own process termination thing
raise SystemExit
if "PoolWorker" in current_process().name:
raise SystemExit
if current_thread().name not in ("PyBitmessage", "MainThread"):
return
logger.error("Got signal %i", signal)
if shared.thisapp.daemon:
shutdown.doCleanShutdown()
else:
allThreadTraceback(frame)
print 'Unfortunately you cannot use Ctrl+C when running the UI because the UI captures the signal.'
def isHostInPrivateIPRange(host):
if ":" in host: #IPv6
hostAddr = socket.inet_pton(socket.AF_INET6, host)
if hostAddr == ('\x00' * 15) + '\x01':
return False
if hostAddr[0] == '\xFE' and (ord(hostAddr[1]) & 0xc0) == 0x80:
return False
if (ord(hostAddr[0]) & 0xfe) == 0xfc:
return False
pass
elif ".onion" not in host:
if host[:3] == '10.':
return True
if host[:4] == '172.':
if host[6] == '.':
if int(host[4:6]) >= 16 and int(host[4:6]) <= 31:
return True
if host[:8] == '192.168.':
return True
# Multicast
if host[:3] >= 224 and host[:3] <= 239 and host[4] == '.':
return True
return False
def addDataPadding(data, desiredMsgLength = 12, paddingChar = '\x00'):
return data + paddingChar * (desiredMsgLength - len(data))
|
gmarkall/numba | numba/cuda/tests/cudapy/test_inspect.py | Python | bsd-2-clause | 7,182 | 0.000139 | import numpy as np
from io import StringIO
from numba import cuda, float32, float64, int32, intp
from numba.core.errors import NumbaDeprecationWarning
from numba.cuda.testing import unittest, CUDATestCase
from numba.cuda.testing import (skip_on_cudasim, skip_with_nvdisasm,
skip_without_nvdisasm)
@skip_on_cudasim('Simulator does not generate code to be inspected')
class TestInspect(CUDATestCase):
@property
def cc(self):
return cuda.current_context().device.compute_capability
def test_monotyped(self):
@cuda.jit("(float32, int32)")
def foo(x, y):
pass
file = StringIO()
foo.inspect_types(file=file)
typeanno = file.getvalue()
# Function name in annotation
self.assertIn("foo", typeanno)
# Signature in annotation
self.assertIn("(float32, int32)", typeanno)
file.close()
# Function name in LLVM
self.assertIn("foo", foo.inspect_llvm())
asm = foo.inspect_asm()
# Function name in PTX
self.assertIn("foo", asm)
# NVVM inserted comments in PTX
self.assertIn("Generated by NVIDIA NVVM Compiler", asm)
def test_polytyped(self):
@cuda.jit
def foo(x, y):
pass
foo[1, 1](1, 1)
foo[1, 1](1.2, 2.4)
file = StringIO()
foo.inspect_types(file=file)
typeanno = file.getvalue()
file.close()
# Signature in annotation
self.assertIn("({0}, {0})".format(intp), typeanno)
self.assertIn("(float64, float64)", typeanno)
# Signature in LLVM dict
llvmirs = foo.inspect_llvm()
self.assertEqual(2, len(llvmirs), )
self.assertIn((intp, intp), llvmirs)
self.assertIn((float64, float64), llvmirs)
# Function name in LLVM
self.assertIn("foo", llvmirs[intp, intp])
self.assertIn("foo", llvmirs[float64, float64])
# Function name in LLVM using deprecated (cc, argtypes) pair for lookup
with self.assertWarns(NumbaDeprecationWarning) as warns:
self.assertIn("foo", llvmirs[self.cc, (intp, intp)])
self.assertIn("foo", llvmirs[self.cc, (float64, float64)])
self.assertEqual(len(warns.warnings), 2)
argtypes_only = "dicts returned by inspect functions should be keyed " \
"on argument types only"
self.assertIn(argtypes_only, str(warns.warnings[0].message))
self.assertIn(argtypes_only, str(warns.warnings[1].message))
asmdict = foo.inspect_asm()
# Signature in assembly dict
self.assertEqual(2, len(asmdict), )
self.assertIn((intp, intp), asmdict)
self.assertIn((float64, float64), asmdict)
# NVVM inserted in PTX
self.assertIn("foo", asmdict[intp, intp])
self.assertIn("foo", asmdict[float64, float64])
# NVVM inserted in PTX using deprecated (cc, argtypes) pair for lookup
with self.assertWarns(NumbaDeprecationWarning) as warns:
self.assertIn("foo", asmdict[self.cc, (intp, intp)])
self.assertIn("foo", asmdict[self.cc, (float64, float64)])
self.assertEqual(len(warns.warnings), 2)
self.assertIn(argtypes_only, str(warns.warnings[0].message))
self.assertIn(argtypes_only, str(warns.warnings[1].message))
def _test_inspect_sass(self, kernel, name, sass):
# Ensure function appears in output
seen_function = False
for line in sass.split():
if '.text' in line and name in line:
seen_function = True
self.assertTrue(seen_function)
# Some instructions common to all supported architectures that should
# appear in the output
self.assertIn('S2R', sass) # Special register to register
self.assertIn('BRA', sass) # Branch
self.assertIn('EXIT', sass) # Exit program
@skip_without_nvdisasm('nvdisasm needed for inspect_sass()')
def test_inspect_sass_eager(self):
@cuda.jit((float32[::1], int32[::1]))
def add(x, y):
i = cuda.grid(1)
if i < len(x):
x[i] += y[i]
self._test_inspect_sass(add, 'add', add.inspect_sass())
@skip_without_nvdisasm('nvdisasm needed for inspect_sass()')
def test_inspect_sass_lazy(self):
@cuda.jit
def add(x, y):
i = cuda.grid(1)
if i < len(x):
x[i] += y[i]
x = np.arange(10).astype(np.int32)
y = np.arange(10).astype(np.float32)
add[1, 10](x, y)
signature = (int32[::1], float32[::1])
self._test_inspect_sass(add, 'add', add.inspect_sass(signature))
@skip_with_nvdisasm('Missing nvdisasm exception only generated when it is '
'not present')
def test_inspect_sass_nvdisasm_missing(self):
@cuda.jit((float32[::1],))
def f(x):
x[0] = 0
with self.assertRaises(RuntimeError) as raises:
f.inspect_sass()
self.assertIn('nvdisasm is required', str(raises.exception))
def test_inspect_llvm_deprecations(self):
@cuda.jit((float32[::1],))
def f(x):
x[0] = 0
with self.assertWarns(NumbaDeprecationWarning) as warns:
f.inspect_llvm(compute_capability=self.cc)
self.assertEqual(len(warns.warnings), 2)
msg = 'compute_capability has no effect on the LLVM IR'
self.assertIn(msg, str(warns.warnings[0]))
msg = 'inspect_llvm will alw | ays return a dict in future'
self.assertIn(msg, str(warns.warnings[1]))
def test_inspect_asm_deprecations(self):
@cuda.jit((float32[::1],))
def f(x):
x[0] = 0
with self.assertWarns(NumbaDeprecationWarning) as warns:
f.inspect_asm(compute_capability=self.cc)
self.as | sertEqual(len(warns.warnings), 2)
msg = 'The compute_capability kwarg is deprecated'
self.assertIn(msg, str(warns.warnings[0]))
msg = 'inspect_asm will always return a dict in future'
self.assertIn(msg, str(warns.warnings[1]))
@skip_without_nvdisasm('nvdisasm needed for inspect_sass()')
def test_inspect_sass_deprecations(self):
@cuda.jit((float32[::1],))
def f(x):
x[0] = 0
with self.assertWarns(NumbaDeprecationWarning) as warns:
f.inspect_sass(compute_capability=self.cc)
self.assertEqual(len(warns.warnings), 2)
msg = 'passing compute_capability has no effect on the SASS code'
self.assertIn(msg, str(warns.warnings[0]))
msg = 'inspect_sass will always return a dict in future'
self.assertIn(msg, str(warns.warnings[1]))
def test_ptx_deprecations(self):
@cuda.jit((float32[::1],))
def f(x):
x[0] = 0
with self.assertWarns(NumbaDeprecationWarning) as warns:
f.ptx
self.assertEqual(len(warns.warnings), 1)
msg = 'ptx will always return a dict in future'
self.assertIn(msg, str(warns.warnings[0]))
if __name__ == '__main__':
unittest.main()
|
Panos512/invenio | modules/bibformat/lib/elements/bfe_video_platform_suggestions.py | Python | gpl-2.0 | 5,845 | 0.00633 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element
* Part of the video platform prototype
* Creates a list of video suggestions
* Based on word similarity ranking
* Must be done in a collection that holds video records with thumbnails, title and author
"""
from invenio.config import CFG_BASE_URL
from invenio.bibdocfile import BibRecDocs
from invenio.intbitset import intbitset
from invenio.search_engine import perform_request_search
from invenio.bibrank_record_sorter import rank_records
from invenio.search_engine_utils import get_fieldvalues
from invenio.bibencode_utils import timecode_to_seconds
import random
html_skeleton_suggestion = """
<!-- VIDEO SUGGESTION -->
<div class="video_suggestion_box">
<div class="video_suggestion_thumbnail">
<a href="%(video_record_url)s">
<img src="%(video_thumb_url)s" alt="%(video_thumb_alt)s"/>
</a>
<div class="video_suggestion_duration">
%(video_duration)s
</div>
</div>
<div class="video_suggestion_title">
%(video_title)s
</div>
<div class="video_suggestion_author">
by %(video_authors)s
</div>
</div>
"""
def format_element(bfo, collection="Videos", threshold="75", maximum="3", shuffle="True"):
""" Creates video suggestions based on ranking algorithms
@param collection: Collection to take the suggestions from
@param threshold: Value between 0 and 100. Only records ranked higher than the value are presented.
@param maximum: Maximum suggestions to show
@param shuffle: True or False, should the suggestions be shuffled?
"""
if threshold.isdigit():
threshold = int(threshold)
else:
raise ValueError("The given threshold is not a digit")
if maximum.isdigit():
maximum = int(maximum)
else:
raise ValueError("The given maximum is not a digit")
if shuffle == "True":
shuffle = True
else:
shuffle = False;
suggestions = []
recid = bfo.control_field('001')
similar_records = find_similar_videos(recid, collection, threshold, maximum, shuffle)
for sim_recid in similar_records:
thumbnail = get_video_thumbnail(sim_recid)
title = get_video_title(sim_recid)
authors = get_video_authors(sim_recid)
url = get_video_record_url(sim_recid)
duration = get_video_duration(sim_recid)
suggestion = html_skeleton_suggestion % {
'video_record_url': url,
'video_thumb_url': thumbnail[0],
'video_thumb_alt': thumbnail[1],
'video_duration': duration,
'video_title': title,
'video_authors': authors,
}
suggestions.append(suggestion)
return "\n".join(suggestions)
def find_similar_videos(recid, collection="Videos", threshold=75, maximum=3, shuffle=True):
""" Returns a list of similar video records
"""
similar_records = []
collection_recids = intbitset(perform_request_search(cc=collection))
ranking = rank_records('wrd', 0, collection_recids, ['recid:' + str(recid)])
## ([6, 7], [81, 100], ' | (', ')', '')
for list_pos, rank in enumerate(ranking[1]):
if rank >= threshold:
similar_records.append(ranking[0][list_pos])
if shuffle:
if maximum > len(similar_recor | ds):
maximum = len(similar_records)
return random.sample(similar_records, maximum)
else:
return similar_records[:maximum]
def get_video_thumbnail(recid):
""" Returns the URL and ALT text for a video thumbnail of a given record
"""
comments = get_fieldvalues(recid, '8564_z')
descriptions = get_fieldvalues(recid, '8564_y')
urls = get_fieldvalues(recid, '8564_u')
for pos, comment in enumerate(comments):
if comment in ('SUGGESTIONTHUMB', 'BIGTHUMB', 'THUMB', 'SMALLTHUMB', 'POSTER'):
return (urls[pos], descriptions[pos])
return ("", "")
def get_video_title(recid):
""" Return the Title of a video record
"""
return get_fieldvalues(recid, '245__a')[0]
def get_video_authors(recid):
""" Return the Authors of a video record
"""
return ", ".join(get_fieldvalues(recid, '100__a'))
def get_video_record_url(recid):
""" Return the URL of a video record
"""
return CFG_BASE_URL + "/record/" + str(recid)
def get_video_duration(recid):
""" Return the duration of a video
"""
duration = get_fieldvalues(recid, '950__d')
if duration:
duration = duration[0]
duration = timecode_to_seconds(duration)
return human_readable_time(duration)
else:
return ""
def human_readable_time(seconds):
""" Creates a human readable duration representation
"""
for x in ['s','m','h']:
if seconds < 60.0:
return "%.0f %s" % (seconds, x)
seconds /= seconds
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0 |
amirnissim/okqa | qa/migrations/0001_initial.py | Python | bsd-3-clause | 6,711 | 0.008046 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Question'
db.create_table('qa_question', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(related_name='questions', to=orm['auth.User'])),
('subject', self.gf('django.db.models.fields.CharField')(max_length=255)),
('content', self.gf('django.db.models.fields.TextField')(max_length=255)),
))
db.send_create_signal('qa', ['Question'])
# Adding model 'Answer'
db.create_table('qa_answer', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(related_name='answers', to=orm['auth.User'])),
('subject', self.gf('django.db.models.fields.CharField')(max_length=255)),
('content', self.gf('django.db.models.f | ields.TextField')(max_length=255)),
('question', self.gf('django.db.models.fields.related.ForeignKey')(related_name='answers', to=orm['qa.Question'])),
))
db.send_create_signal('qa', ['Answer'])
def backwards(self, orm):
# Deleting model 'Question'
db.delete_table('qa_question')
# Deleting model | 'Answer'
db.delete_table('qa_answer')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'qa.answer': {
'Meta': {'object_name': 'Answer'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['qa.Question']"}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'qa.question': {
'Meta': {'object_name': 'Question'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['qa'] |
Bitcoin-ABC/bitcoin-abc | test/functional/feature_block.py | Python | mit | 54,112 | 0.001737 | #!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Copyright (c) 2019 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test block processing."""
import copy
import struct
import time
from data import invalid_txs
from test_framework.blocktools import (
create_block,
create_coinbase,
create_tx_with_script,
make_conform_to_ctor,
)
from test_framework.cdefs import LEGACY_MAX_BLOCK_SIZE
from test_framework.key import ECKey
from test_framework.messages import (
COIN,
CBlock,
COutPoint,
CTransaction,
CTxIn,
CTxOut,
uint256_from_compact,
uint256_from_str,
)
from test_framework.p2p import P2PDataStore
from test_framework.script import (
OP_ELSE,
OP_ENDIF,
OP_FALSE,
OP_IF,
OP_INVALIDOPCODE,
OP_RETURN,
OP_TRUE,
SIGHASH_ALL,
SIGHASH_FORKID,
CScript,
SignatureHashForkId,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.txtools import pad_tx
from test_framework.util import assert_equal
# Use this class for tests that require behavior other than normal p2p behavior.
# For now, it is used to serialize a bloated varint (b64).
class CBrokenBlock(CBlock):
def initialize(self, base_block):
self.vtx = copy.deepcopy(base_block.vtx)
self.hashMerkleRoot = self.calc_merkle_root()
def serialize(self):
r = b""
r += super(CBlock, self).serialize()
r += struct.pack("<BQ", 255, len(self.vtx))
for tx in self.vtx:
r += tx.serialize()
return r
def normal_serialize(self):
return super().serialize()
# Valid for block at height 120
DUPLICATE_COINBASE_SCRIPT_SIG = b'\x01\x78'
class FullBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
# This is a consensus block test, we don't care about tx policy
self.extra_args = [['-noparkdeepreorg',
'-maxreorgdepth=-1', '-acceptnonstdtxn=1']]
def run_test(self):
node = self.nodes[0] # convenience reference to the node
self.bootstrap_p2p() # Add one p2p connection to the node
self.block_heights = {}
self.coinbase_key = ECKey()
self.coinbase_key.generate()
self.coinbase_pubkey = self.coinbase_key.get_pubkey().get_bytes()
self.tip = None
self.blocks = {}
self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
self.block_heights[self.genesis_hash] = 0
self.spendable_outputs = []
# Create a new block
b_dup_cb = self.next_block('dup_cb')
b_dup_cb.vtx[0].vin[0].scriptSig = DUPLICATE_COINBASE_SCRIPT_SIG
b_dup_cb.vtx[0].rehash()
duplicate_tx = b_dup_cb.vtx[0]
b_dup_cb = self.update_block('dup_cb', [])
self.send_blocks([b_dup_cb])
b0 = self.next_block(0)
self.save_spendable_output()
self.send_blocks([b0])
# These constants chosen specifically to trigger an immature coinbase spend
# at a certain time below.
NUM_BUFFER_BLOCKS_TO_GENERATE = 99
NUM_OUTPUTS_TO_COLLECT = 33
# Allow the block to mature
blocks = []
for i in range(N | UM_BUFFER_BLOCKS_TO_GENERATE):
blocks.append(self.next_blo | ck("maturitybuffer.{}".format(i)))
self.save_spendable_output()
self.send_blocks(blocks)
# collect spendable outputs now to avoid cluttering the code later on
out = []
for _ in range(NUM_OUTPUTS_TO_COLLECT):
out.append(self.get_spendable_output())
# Start by building a couple of blocks on top (which output is spent is
# in parentheses):
# genesis -> b1 (0) -> b2 (1)
b1 = self.next_block(1, spend=out[0])
self.save_spendable_output()
b2 = self.next_block(2, spend=out[1])
self.save_spendable_output()
self.send_blocks([b1, b2], timeout=4)
# Select a txn with an output eligible for spending. This won't actually be spent,
# since we're testing submission of a series of blocks with invalid
# txns.
attempt_spend_tx = out[2]
# Submit blocks for rejection, each of which contains a single transaction
# (aside from coinbase) which should be considered invalid.
for TxTemplate in invalid_txs.iter_all_templates():
template = TxTemplate(spend_tx=attempt_spend_tx)
if template.valid_in_block:
continue
self.log.info(
"Reject block with invalid tx: %s",
TxTemplate.__name__)
blockname = "for_invalid.{}".format(TxTemplate.__name__)
badblock = self.next_block(blockname)
badtx = template.get_tx()
if TxTemplate != invalid_txs.InputMissing:
self.sign_tx(badtx, attempt_spend_tx)
badtx.rehash()
badblock = self.update_block(blockname, [badtx])
self.send_blocks(
[badblock], success=False,
reject_reason=(
template.block_reject_reason or template.reject_reason),
reconnect=True, timeout=2)
self.move_tip(2)
# Fork like this:
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1)
#
# Nothing should happen at this point. We saw b2 first so it takes
# priority.
self.log.info("Don't reorg to a chain of the same length")
self.move_tip(1)
b3 = self.next_block(3, spend=out[1])
txout_b3 = b3.vtx[1]
self.send_blocks([b3], False)
# Now we add another block to make the alternative chain longer.
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1) -> b4 (2)
self.log.info("Reorg to a longer chain")
b4 = self.next_block(4, spend=out[2])
self.send_blocks([b4])
# ... and back to the first chain.
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b3 (1) -> b4 (2)
self.move_tip(2)
b5 = self.next_block(5, spend=out[2])
self.save_spendable_output()
self.send_blocks([b5], False)
self.log.info("Reorg back to the original chain")
b6 = self.next_block(6, spend=out[3])
self.send_blocks([b6], True)
# Try to create a fork that double-spends
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b7 (2) -> b8 (4)
# \-> b3 (1) -> b4 (2)
self.log.info(
"Reject a chain with a double spend, even if it is longer")
self.move_tip(5)
b7 = self.next_block(7, spend=out[2])
self.send_blocks([b7], False)
b8 = self.next_block(8, spend=out[4])
self.send_blocks([b8], False, reconnect=True)
# Try to create a block that has too much fee
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b9 (4)
# \-> b3 (1) -> b4 (2)
self.log.info(
"Reject a block where the miner creates too much coinbase reward")
self.move_tip(6)
b9 = self.next_block(9, spend=out[4], additional_coinbase_value=1)
self.send_blocks([b9], success=False,
reject_reason='bad-cb-amount', reconnect=True)
# Create a fork that ends in a block with too much fee (the one that causes the reorg)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b10 (3) -> b11 (4)
# \-> b3 (1) -> b4 (2)
self.log.info(
"Reject a chain where the miner creates too much coinbase reward, even if the chain is longer")
self.move_tip(5)
b10 = self.next_block(10, s |
moremorefor/Logpot | logpot/auth/models.py | Python | mit | 953 | 0.002099 | #-*- coding: utf-8 -*-
from logpot.ext import db
from logpot.models import TimestampMixin
from passlib.hash import pbk | df2_sha256
class User(db.Model, TimestampMixin):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, unique=True, nullable=False)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
def is_authenticated(self):
return True
def is_active(self):
return True
def | is_anonymous(self):
return False
def get_id(self):
return self.id
@classmethod
def generate_password_hash(self, password):
return pbkdf2_sha256.encrypt(password)
def check_password_hash(self, password):
return pbkdf2_sha256.verify(password, self.password)
def __str__(self):
return self.name
def __repr__(self):
return '<User %r>' % (self.name)
|
cloudify-cosmo/flask-securest | flask_securest/authorization_providers/role_based_authorization_provider.py | Python | apache-2.0 | 5,319 | 0.000188 | #########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import re
import logging
import os
import yaml
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
from flask_securest import rest_security
from flask_securest.authorization_providers.abstract_authorization_provider\
import AbstractAuthorizationProvider
from flask_securest.constants import FLASK_SECUREST_LOGGER_NAME
ANY = '*'
class RoleBasedAuthorizationProvider(AbstractAuthorizationProvider,
FileSystemEventHandler):
def __init__(self, role_loader, roles_config_file_path):
self.lgr = logging.getLogger(FLASK_SECUREST_LOGGER_NAME)
self.role_loader = role_loader
self.permissions_by_roles = None
self.roles_config_file_path = os.path.abspath(roles_config_file_path)
self.observer = Observer()
self.observer.schedule(self,
path=os.path.dirname(
self.roles_config_file_path),
recursive=False)
self.load_roles_config()
self.observer.start()
def load_roles_config(self):
try:
with open(self.roles_config_file_path, 'r') as config_file:
self.permissions_by_roles = yaml.safe_load(config_file.read())
self.lgr.info('Loading of roles configuration ended '
'successfully')
except (yaml.parser.ParserError, IOError) as e:
err = 'Failed parsing {role_config_file} file. Error: {error}.' \
.format(role_config_file=self.roles_config_file_path, error=e)
self.lgr.warning(err)
raise ValueError(err)
def on_modified(self, event):
if os.path.abspath(event.src_path) == self.roles_config_file_path:
self.load_roles_config()
def authorize(self):
target_endpoint = rest_security.get_endpoint()
target_method = rest_security.get_http_method()
roles = self.role_loader.get_roles()
return self._is_allowed(target_endpoint, target_method, roles) and \
not self._is_denied(target_endpoint, target_method, roles)
def _is_allowed(self, target_endpoint, target_method, user_roles):
return self._evaluate_permission_by_type(target_endp | oint,
| target_method, user_roles,
'allow')
def _is_denied(self, target_endpoint, target_method, user_roles):
return self._evaluate_permission_by_type(target_endpoint,
target_method, user_roles,
'deny')
def _evaluate_permission_by_type(self, target_endpoint, target_method,
user_roles, permission_type):
for role in user_roles:
role_permissions = self.permissions_by_roles.get(role,
{'allow': {},
'deny': {}})
relevant_permissions = role_permissions.get(permission_type, {})
if _is_permission_matching(target_endpoint, target_method,
relevant_permissions):
return True
return False
def _is_permission_matching(target_endpoint, target_method,
configured_permissions):
for endpoint, methods in configured_permissions.iteritems():
if _is_endpoint_matching(target_endpoint=target_endpoint,
configured_endpoint=endpoint):
if _is_method_matching(target_method=target_method,
configured_methods=methods):
# match!
return True
return False
def _is_method_matching(target_method, configured_methods):
if configured_methods == [ANY]:
return True
configured_methods = [value.upper() for value in configured_methods]
return target_method.upper() in configured_methods
def _is_endpoint_matching(target_endpoint, configured_endpoint):
if configured_endpoint == ANY:
return True
pattern = configured_endpoint.replace('/', '\/').replace('*', '.*') + '$'
if re.match(pattern, target_endpoint):
return True
else:
# this is so that endpoint "v2/blueprints/*" would match
# requests to "v2/blueprints"
if configured_endpoint.endswith('/*'):
return _is_endpoint_matching(target_endpoint,
configured_endpoint[:-2])
return False
|
svn2github/SVGKit | cgi-bin/convertsvg.py | Python | mit | 3,564 | 0.008698 | #!/usr/bin/python
"""
convertsvg.py 0.1
See <http://svgkit.sourceforge.net/> for documentation, downloads, license, etc.
(c) 2006-2007 Jason Gallicchio.
Licensed under the open source (GNU compatible) MIT License
"""
# Outline of code:
# Read the SVG
# Read the type to convert to
# Read the options (width, height, dpi, quality) (TODO)
# Generate a hash for caching and check to see if we've already done this (TODO)
# Convert with scripts off
# Check the output file size.
# Send the result back
import cgi
import cgitb; cgitb.enable() # Show errors to browser.
import sys
import os
import time
import md5
sys.stderr = sys.stdout
cgi.maxlen = 1024*1024
# Shared programs
pdf2ps = '/usr/bin/pdf2ps'
# Local programs
local_dir ='/home/project-web/svgkit/local'
lib_dir = os.path.join(local_dir, 'lib')
bin_dir = os.path.join(local_dir, 'bin')
src_dir = os.path.join(local_dir, 'src')
pstoedit = os.path.join(bin_dir, 'pstoedit')
texvc = os.path.join(src_dir, 'texvc')
java = os.path.join(src_dir, 'jre1.6.0_12/bin/java')
batik = os.path.join(src_dir, 'batik-1.7/batik-rasterizer.jar')
results_dir = '/home/project-web/svgkit/persistent/svgresults/'
results_url = '../svgresults/'
os.environ['PATH'] += os.pathsep+bin_dir
os.environ['LD_LIBRARY_PATH'] = os.pathsep+lib_dir
mediatypes={
'pdf': 'application/pdf',
'ps': 'application/pdf', # Gets converted after
'jpg': 'image/jpeg',
'png': 'image/png',
'tiff': 'image/tiff',
'svg': 'image/svg+xml'
}
debug = False
redirect = True
if debug:
print 'Content-type: text/plain\n\n'
print 'Debugging convert_svg.py'
def execute_cmd(cmd):
(child_stdin, child_stdout, child_stderr) = os.popen3(cmd)
str_stdout = child_stdout.read() # Read until the process quits.
str_stderr = child_stderr.read() # Read until the process quits.
if debug:
print cmd+'\n'
print 'stdout:'+str_stdout+'\n'
print 'stderr:'+str_stderr+'\n'
#execute_cmd('chown jason users files/*.*') # Redhat disables chown
form = cgi.FieldStorage()
time.sleep(0.1) # Throttle requests
if debug:
print 'Debug mode of convert_svg.py\n'
execute_cmd('which java')
#execute_cmd('locate javac')
#execute_cmd('locate java')
#execute_cmd('ls /usr/bin/')
#execute_cmd('ls /etc/alternatives/')
execute_cmd('df')
execute_cmd('mount')
print 'form.keys(): ' + form.keys().__str__()+'\n'
for key in form.keys():
print 'form['+key+'] = '+form[key].value+'\n'
source = form['source'].value
type = form['type'].value
mime = mediatypes[type]
md5hex = md5.new(source).hexdigest()
svgname = results_dir+md5hex+'.svg'
outname = results_dir+md5hex+'.'+type
out_url = results_url+md5hex+'.'+type
# If the result do | esn't already exist in cached form, create it
if not os.path.isfile(outname) or source!=open(svgname, 'r' ).read():
svgfile = open(svgname, 'w')
svgfile.write(source)
svgfile. | close()
if type == 'svg':
outname = svgname
else:
cmd = java+' -jar ' + batik + ' -d ' + results_dir + ' -m '+mime+' '+svgname # -dpi <resolution> -q <quality>
execute_cmd(cmd)
if type=='ps':
inname = results_dir+md5hex+'.pdf'
cmd = pdf2ps+' '+inname+' '+outname
execute_cmd(cmd)
if redirect:
print 'Location: '+out_url+'\r\n\r\n'
else:
outfile = open( outname, 'rb')
image = outfile.read()
if not debug:
sys.stdout.write('Content-type: '+mime+'\r\n\r\n')
sys.stdout.write(image)
outfile.close()
|
windskyer/mvpn | mvpn/conf/opts.py | Python | gpl-2.0 | 2,699 | 0.000371 | # Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This is the single point of entry to generate the sample configuration
file for mvpn. It collects all the necessary info from the other modules
in this package. It is assumed that:
* every other module in this package has a 'list_opts' function which
return a dict where
* the keys are strings which are the group names
* the value of each key is a list of config options for that group
* the mvpn.conf package doesn't have further packages with config options
* this module is only used in the context of sample file generation
"""
import collections
import importlib
import os
import pkgutil
LIST_OPTS_FUNC_NAME = "list_opts"
def _tupleize(dct):
"""Take the dict of options and convert to the 2-tuple format."""
return [(key, val) for key, val in dct.items()]
def list_opts():
opts = collections.defaultdict(list)
module_names = _list_module_names()
imported_modules = _import_module | s(module_names)
_append_config_options(imported_modules, opts)
return _tupleize(opts)
def _list_module_names():
module_names = []
package_path = os.path.dirname(os.path.abspath(__file__))
for _, modname, ispkg in pkgutil.iter_modules(path=[package_path]):
if modname == "opts" or ispkg:
continue
else:
module_names.append(modname)
return module_names
def _ | import_modules(module_names):
imported_modules = []
for modname in module_names:
mod = importlib.import_module("mvpn.conf." + modname)
if not hasattr(mod, LIST_OPTS_FUNC_NAME):
msg = "The module 'mvpn.conf.%s' should have a '%s' "\
"function which returns the config options." % \
(modname, LIST_OPTS_FUNC_NAME)
raise Exception(msg)
else:
imported_modules.append(mod)
return imported_modules
def _append_config_options(imported_modules, config_options):
for mod in imported_modules:
configs = mod.list_opts()
for key, val in configs.items():
config_options[key].extend(val)
|
praekelt/rapidpro | temba/channels/templatetags/channels.py | Python | agpl-3.0 | 246 | 0 | from __future__ import un | icode_literals
from django import template
from temba.channels.views import get_channel_icon
register = template. | Library()
@register.filter
def channel_icon(channel):
return get_channel_icon(channel.channel_type)
|
wuxue/altanalyze | InteractionBuilder.py | Python | apache-2.0 | 45,237 | 0.02299 | import sys, string
import os.path
import unique
import export
import gene_associations
import traceback
import time
################# Parse directory files
def filepath(filename):
fn = unique.filepath(filename)
return fn
def read_directory(sub_dir):
dir_list = unique.read_directory(sub_dir)
#add in code to prevent folder names from being included
dir_list2 = []
for file in dir_list:
lf = string.lower(file)
if '.txt' in lf or '.sif' in lf or '.tab' in lf: dir_list2.append(file)
return dir_list2
################# Begin Analysis from parsing files
def getEnsemblGeneData(filename):
fn=filepath(filename)
global ensembl_symbol_db; ensembl_symbol_db={}; global symbol_ensembl_db; symbol_ensembl_db={}
for line in open(fn,'rU').xreadlines():
data,null = string.split(line,'\n')
t = string.split(data,'\t')
ensembl=t[0];symbol=t[1]
### Have to do this in order to get the WEIRD chromosomal assocaitions and the normal to the same genes
try: symbol_ensembl_db[symbol].append(ensembl)
except Exception: symbol_ensembl_db[symbol] = [ensembl]
try: symbol_ensembl_db[string.lower(symbol)].append(ensembl)
except Exception: symbol_ensembl_db[string.lower(symbol)] = [ensembl]
try: symbol_ensembl_db[symbol.title()].append(ensembl)
except Exception: symbol_ensembl_db[symbol.title()] = [ensembl]
ensembl_symbol_db[ensembl] = symbol
def getHMDBData(species):
program_type,database_dir = unique.whatProgramIsThis()
filename = database_dir+'/'+species+'/gene/HMDB.txt'
x=0
fn=filepath(filename)
for line in open(fn,'rU').xreadlines():
data = cleanUpLine(line)
if x==0: x=1
else:
t = string.split(data,'\t')
try: hmdb_id,symbol,description,secondary_id,iupac,cas_number,chebi_id,pubchem_compound_id,Pathways,ProteinNames = t
except Exception:
### Bad Tab introduced from HMDB
hmdb_id = t[0]; symbol = t[1]; ProteinNames = t[-1]
symbol_hmdb_db[symbol]=hmdb_id
hmdb_symbol_db[hmdb_id] = symbol
ProteinNames=string.split(ProteinNames,',')
### Add gene-metabolite interactions to databases
for protein_name in ProteinNames:
try:
for ensembl in symbol_ensembl_db[protein_name]:
z = InteractionInformation(hmdb_id,ensembl,'HMDB','Metabolic')
interaction_annotation_dbase[ensembl,hmdb_id] = z ### This is the interaction direction that is appropriate
try: interaction_db[hmdb_id][ensembl]=1
except KeyError: db = {ensembl:1}; interaction_db[hmdb_id] = db ###weight of 1 (weights currently not-supported)
try: interaction_db[ensembl][hmdb_id]=1
except KeyError: db = {hmdb_id:1}; interaction_db[ensembl] = db ###weight of 1 (weights currently not-supported)
| except Exception: None
def verifyFile(filename):
status = 'not found'
try:
fn=filepath(filename)
for line in open(fn,'rU').xreadlines(): status = 'found';break
except Exception: status = 'not found'
return status
def importInteractionDatabases(interactionDirs):
""" Import multiple interaction format file types (de | signated by the user) """
exclude=[]
for file in interactionDirs:
status = verifyFile(file)
if status == 'not found':
exclude.append(file)
for i in exclude:
interactionDirs.remove(i)
for fn in interactionDirs: #loop through each file in the directory to output results
x=0; imported=0; stored=0
file = export.findFilename(fn)
print "Parsing interactions from:",file
for line in open(fn,'rU').xreadlines():
data,null = string.split(line,'\n')
t = string.split(data,'\t')
if x==0: x=1
#elif 'PAZAR' in data or 'Amadeus' in data:x+=0
else:
obligatory = False
imported+=1
proceed = True
source=''
interaction_type = 'interaction'
try:
symbol1,interaction_type, symbol2, ensembl1,ensembl2,source = t
ens_ls1=[ensembl1]; ens_ls2=[ensembl2]
if 'HMDB' in ensembl1:
ensembl1 = string.replace(ensembl1,' ','') ### HMDB ID sometimes proceeded by ' '
symbol_hmdb_db[symbol1]=ensembl1
hmdb_symbol_db[ensembl1] = symbol1
interaction_type = 'Metabolic'
if 'HMDB' in ensembl2:
ensembl2 = string.replace(ensembl2,' ','') ### HMDB ID sometimes proceeded by ' '
symbol_hmdb_db[symbol2]=ensembl2
hmdb_symbol_db[ensembl2] = symbol2
interaction_type = 'Metabolic'
except Exception:
try:
ensembl1,ensembl2,symbol1,symbol2,interaction_type=t
if ensembl1 == '':
try:
ens_ls1 = symbol_ensembl_db[symbol1]
ens_ls2 = symbol_ensembl_db[symbol2]
except Exception: None
except Exception:
proceed = False
if proceed: ### If the interaction data conformed to one of the two above types (typically two valid interacting gene IDs)
if (len(ens_ls1)>0 and len(ens_ls2)>0):
secondary_proceed = True
stored+=1
for ensembl1 in ens_ls1:
for ensembl2 in ens_ls2:
"""
if (ensembl1,ensembl2) == ('ENSG00000111704','ENSG00000152284'):
print t;sys.exit()
if (ensembl1,ensembl2) == ('ENSG00000152284','ENSG00000111704'):
print t;sys.exit()
"""
if 'WikiPathways' in file or 'KEGG' in file:
if ensembl2 != ensembl1:
if (ensembl2,ensembl1) in interaction_annotation_dbase:
del interaction_annotation_dbase[(ensembl2,ensembl1)]
### Exclude redundant entries with fewer interaction details (e.g., arrow direction BIOGRID) - overwrite with the opposite gene arrangement below
if (ensembl1,ensembl2) in interaction_annotation_dbase:
if interaction_annotation_dbase[(ensembl1,ensembl2)].InteractionType() !='physical':
secondary_proceed = False ### Don't overwrite a more informative annotation like transcriptional regulation or microRNA targeting
if 'DrugBank' in fn:
source = 'DrugBank'
interaction_type = 'drugInteraction'
obligatory=True
ensembl1, ensembl2 = ensembl2, ensembl1 ### switch the order of these (drugs reported as first ID and gene as the second)
if secondary_proceed:
z = InteractionInformation(ensembl1,ensembl2,source,interaction_type)
interaction_annotation_dbase[ensembl1,ensembl2] = z
#z = InteractionInformation(ensembl2,ensembl1,source,interaction_type)
#interaction_annotation_dbase[ensembl2,ensembl1] = z
try: inte |
oVirt/mom | mom/Collectors/GuestIoTuneOptional.py | Python | gpl-2.0 | 528 | 0 | from mom.Collectors.GuestIoTune import GuestIoTune
class GuestIoTuneOptional(GuestIoTune):
"""
This Collector gets I | oTune statistics in the same way GuestIoTune does.
The only difference is that it reports all the fields as optional and thus
allows the policy to be evaluated even when the balloon device unavailable.
"""
def getFields(self):
| return set()
def getOptionalFields(self):
return GuestIoTune.getFields(self).union(
GuestIoTune.getOptionalFields(self))
|
villaverde/iredadmin | controllers/decorators.py | Python | gpl-2.0 | 1,054 | 0.001898 | # Author: Zhang Huangbin <zhb@iredmail.org>
import web
session = web.config.get('_session')
def require_login(func):
def proxyfunc(self, *args, **kw):
if session.get('log | ged') is True:
return func(self, *args, **kw)
else:
session.kill()
raise web.seeother('/login?msg=loginRequired')
return proxyfunc
def require_global_admin(func):
def proxyfunc(self, *args, **kw):
if session.get('domainGlobalAdmin') is True:
return fu | nc(self, *args, **kw)
else:
if session.get('logged'):
raise web.seeother('/domains?msg=PERMISSION_DENIED')
else:
raise web.seeother('/login?msg=PERMISSION_DENIED')
return proxyfunc
def csrf_protected(f):
def decorated(*args, **kw):
inp = web.input()
if not ('csrf_token' in inp and \
inp.csrf_token == session.pop('csrf_token', None)):
return web.render('error_csrf.html')
return f(*args, **kw)
return decorated
|
buhe/judge | result.py | Python | agpl-3.0 | 368 | 0 | class Result(object):
AC = 0
WA = 1 << 0
RTE = 1 << 1
TLE = 1 << 2
MLE = 1 << 3
IR = 1 << 4
SC = 1 << 5
OLE = 1 << 6
IE = 1 << 30
def __init__(self):
self.result_flag = 0
self.execution_time = 0
self.r_execution_time = 0
self.max_memory = 0
self.proc_out | put = ''
| self.points = 0
|
GeyerA/android_external_chromium_org | chrome/tools/build/generate_policy_source.py | Python | bsd-3-clause | 19,950 | 0.007719 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''python %prog [options] platform chromium_os_flag template
platform specifies which platform source is being generated for
and can be one of (win, mac, linux)
chromium_os_flag should be 1 if this is a Chromium OS build
template is the path to a .json policy template file.'''
from __future__ import with_statement
import json
from optparse import OptionParser
import re
import sys
import textwrap
CHROME_POLICY_KEY = 'SOFTWARE\\\\Policies\\\\Google\\\\Chrome'
CHROMIUM_POLICY_KEY = 'SOFTWARE\\\\Policies\\\\Chromium'
class PolicyDetails:
"""Parses a policy template and caches all its details."""
# Maps policy types to a tuple with 3 other types:
# - the equivalent base::Value::Type or 'TYPE_EXTERNAL' if the policy
# references external data
# - the equivalent Protobuf field type
# - the name of one of the protobufs for shared policy types
# TODO(joaodasilva): refactor the 'dict' type into a more generic 'json' type
# that can also be used to represent lists of other JSON objects.
TYPE_MAP = {
'dict': ('TYPE_DICTIONARY', 'string', 'String'),
'external': ('TYPE_EXTERNAL', 'string', 'String'),
'int': ('TYPE_INTEGER', 'int64', 'Integer'),
'int-enum': ('TYPE_INTEGER', 'int64', 'Integer'),
'list': ('TYPE_LIST', 'StringList', 'StringList'),
'main': ('TYPE_BOOLEAN', 'bool', 'Boolean'),
'string': ('TYPE_STRING', 'string', 'String'),
'string-enum': ('TYPE_STRING', 'string', 'String'),
}
class EnumItem:
def __init__(self, item):
self.caption = PolicyDetails._RemovePlaceholders(item['caption'])
self.value = item['value']
def __init__(self, policy, os, is_chromium_os):
self.id = policy['id']
self.name = policy['name']
self.is_deprecated = policy.get('deprecated', False)
self.is_device_only = policy.get('device_only', False)
if is_chromium_os:
expected_platform = 'chrome_os'
else:
expected_platform = os.lower()
self.platforms = []
for platform, version in [ p.split(':') for p in policy['supported_on'] ]:
if not version.endswith('-'):
continue
if platform.startswith('chrome.'):
platform_sub = platform[7:]
if platform_sub == '*':
self.platforms.extend(['win', 'mac', 'linux'])
else:
self.platforms.append(platform_sub)
else:
self.platforms.append(platform)
self.platforms.sort()
self.is_supported = expected_platform in self.platforms
if not PolicyDetails.TYPE_MAP.has_key(policy['type']):
raise NotImplementedError('Unknown policy type for %s: %s' %
(policy['name'], policy['type']))
self.policy_type, self.protobuf_type, self.policy_protobuf_type = \
PolicyDetails.TYPE_MAP[policy['type']]
self.schema = policy['schema']
self.desc = '\n'.join(
map(str.strip,
PolicyDetails._RemovePlaceholders(policy['desc']).splitlines()))
self.caption = PolicyDetails._RemovePlaceholders(policy['caption'])
self.max_size = policy.get('max_size', 0)
items = policy.get('items')
if items is None:
self.items = None
else:
self.items = [ PolicyDetails.EnumItem(entry) for entry in items ]
PH_PATTERN = re.compile('<ph[^>]*>([^<]*|[^<]*<ex>([^<]*)</ex>[^<]*)</ph>')
# Simplistic grit placeholder stripper.
@staticmethod
def _RemovePlaceholders(text):
result = ''
pos = 0
for m in PolicyDetails.PH_PATTERN.finditer(text):
result += text[pos:m.start(0)]
result += m.group(2) or m.group(1)
pos = m.end(0)
result += text[pos:]
return result
def main():
parser = OptionParser(usage=__doc__)
parser.add_option('--pch', '--policy-constants-header', dest='header_path',
help='generate header file of policy constants',
metavar='FILE')
parser.add_option('--pcc', '--policy-constants-source', dest='source_path',
help='generate source file of policy constants',
metavar='FILE')
parser.add_option('--cpp', '--cloud-policy-protobuf',
dest='cloud_policy_proto_path',
help='generate cloud policy protobuf file',
metavar='FILE')
parser.add_option('--csp', '--chrome-settings-protobuf',
dest='chrome_settings_proto_path',
help='generate chrome settings protobuf file',
metavar='FILE')
parser.add_option('--cpd', '--cloud-policy-decoder',
dest='cloud_policy_decoder_path',
help='generate C++ code decoding the cloud policy protobuf',
metavar='FILE')
(opts, args) = parser.parse_args()
i | f len(args) != 3:
print 'exactly platform, chromium_os flag and input file must be specified.'
parser.print_help()
| return 2
os = args[0]
is_chromium_os = args[1] == '1'
template_file_name = args[2]
template_file_contents = _LoadJSONFile(template_file_name)
policy_details = [ PolicyDetails(policy, os, is_chromium_os)
for policy in _Flatten(template_file_contents) ]
sorted_policy_details = sorted(policy_details, key=lambda policy: policy.name)
def GenerateFile(path, writer, sorted=False):
if path:
with open(path, 'w') as f:
_OutputGeneratedWarningHeader(f, template_file_name)
writer(sorted and sorted_policy_details or policy_details, os, f)
GenerateFile(opts.header_path, _WritePolicyConstantHeader, sorted=True)
GenerateFile(opts.source_path, _WritePolicyConstantSource, sorted=True)
GenerateFile(opts.cloud_policy_proto_path, _WriteCloudPolicyProtobuf)
GenerateFile(opts.chrome_settings_proto_path, _WriteChromeSettingsProtobuf)
GenerateFile(opts.cloud_policy_decoder_path, _WriteCloudPolicyDecoder)
return 0
#------------------ shared helpers ---------------------------------#
def _OutputGeneratedWarningHeader(f, template_file_path):
f.write('//\n'
'// DO NOT MODIFY THIS FILE DIRECTLY!\n'
'// IT IS GENERATED BY generate_policy_source.py\n'
'// FROM ' + template_file_path + '\n'
'//\n\n')
COMMENT_WRAPPER = textwrap.TextWrapper()
COMMENT_WRAPPER.width = 80
COMMENT_WRAPPER.initial_indent = '// '
COMMENT_WRAPPER.subsequent_indent = '// '
COMMENT_WRAPPER.replace_whitespace = False
# Writes a comment, each line prefixed by // and wrapped to 80 spaces.
def _OutputComment(f, comment):
for line in comment.splitlines():
if len(line) == 0:
f.write('//')
else:
f.write(COMMENT_WRAPPER.fill(line))
f.write('\n')
# Returns an iterator over all the policies in |template_file_contents|.
def _Flatten(template_file_contents):
for policy in template_file_contents['policy_definitions']:
if policy['type'] == 'group':
for sub_policy in policy['policies']:
yield sub_policy
else:
yield policy
def _LoadJSONFile(json_file):
with open(json_file, 'r') as f:
text = f.read()
return eval(text)
#------------------ policy constants header ------------------------#
def _WritePolicyConstantHeader(policies, os, f):
f.write('#ifndef CHROME_COMMON_POLICY_CONSTANTS_H_\n'
'#define CHROME_COMMON_POLICY_CONSTANTS_H_\n'
'\n'
'#include <string>\n'
'\n'
'#include "base/basictypes.h"\n'
'#include "base/values.h"\n'
'\n'
'namespace policy {\n\n')
if os == 'win':
f.write('// The windows registry path where Chrome policy '
'configuration resides.\n'
'extern const wchar_t kRegistryChromePolicyKey[];\n')
f.write('// Lists metadata such as name, expected type and id for all\n'
'// policies. Used to initialize ConfigurationPolicyProviders and\n'
'// CloudExternalDataManagers.\n'
|
tejassp/asadmn-web | webapp/lib/logger.py | Python | unlicense | 27,000 | 0.004148 | __author__ = 'aerospike'
import copy
import ntpath
from lib import logutil
import os
from lib.logsnapshot import LogSnapshot
from lib.serverlog import ServerLog
from lib.logreader import LogReader, SHOW_RESULT_KEY, COUNT_RESULT_KEY, END_ROW_KEY, TOTAL_ROW_HEADER
from lib import terminal
import re
DT_FMT = "%b %d %Y %H:%M:%S"
DT_TO_MINUTE_FMT = "%b %d %Y %H:%M"
DT_TIME_FMT = "%H:%M:%S"
DATE_SEG = 0
DATE_SEPARATOR = "-"
YEAR = 0
MONTH = 1
DATE = 2
TIME_SEG = 1
TIME_SEPARATOR = ":"
HH = 0
MM = 1
SS = 2
class Logger(object):
logInfo = {}
all_cluster_files = {}
selected_cluster_files = {}
all_server_files = {}
selected_server_files = {}
def __init__(self, log_path):
self.log_path = log_path
self.log_reader = LogReader()
self.add_cluster_snapshots(path=log_path)
fg_color_re = re.compile("^(fg_(.*))$")
self.fg_colors = map(
lambda v: (
fg_color_re.match(v).groups()[1], getattr(
terminal, fg_color_re.match(v).group(1))), filter(
lambda x: fg_color_re.search(x) and "clear" not in x, dir(terminal)))
bg_color_re = re.compile("^(bg_(.*))$")
self.bg_colors = map(
lambda v: (
bg_color_re.match(v).groups()[1], getattr(
terminal, bg_color_re.match(v).group(1))), filter(
lambda x: bg_color_re.search(x) and "clear" not in x, dir(terminal)))
def __str__(self):
files = self.get_list(cluster_snapshot=True, all_list=True)
retval = ""
i = 1
for timestamp in sorted(files.keys()):
nodes = self.log_reader.get_nodes(files[timestamp])
if len(nodes) == 0:
continue
retval += "\n " + str(i) + ": "
retval += ntpath.basename(files[timestamp])
retval += " ("
retval += str(timestamp)
retval += ")"
retval += "\n\tFound %s nodes" % (len(nodes))
retval += "\n\tOnline: %s" % (", ".join(nodes))
retval += "\n"
i = i + 1
return retval
def create_log_snapshot(self, timestamp="", file=""):
if not file:
return None
if not timestamp:
timestamp = self.log_reader.get_timestamp(file)
if not timestamp:
return None
return LogSnapshot(timestamp=timestamp, cluster_file=file, log_reader=self.log_reader)
def create_server_log(self, display_name="", file=""):
if not file:
return None
if not display_name:
display_name = self.log_reader.get_server_node_id(file)
if not display_name:
return None
return ServerLog(display_name=display_name, server_file=file, log_reader=self.log_reader)
def get_log_snapshot(self, timestamp=""):
if not timestamp or timestamp not in self.all_cluster_files:
return None
return self.all_cluster_files[timestamp]
def get_server_log(self, display_name=""):
if not display_name or display_name not in self.all_server_files:
return None
return self.all_server_files[display_name]
def get_node(self, path):
for node, fpath in self.selected_server_files.iteritems():
if path == fpath:
return node
return path
def get_files_by_index(self, clusterMode, indices=[]):
if clusterMode:
files = {}
if indices:
timestamps = sorted(self.all_cluster_files.keys())
for index in indices:
try:
files[timestamps[index -1]] = [self.all_cluster_files[timestamps[index-1]]]
except Exception:
continue
else:
for timestamp in self.selected_cluster_files:
try:
files[timestamp] = [self.selected_cluster_files[timestamp]]
except Exception:
continue
return files
else:
files = []
if indices:
nodes = sorted(self.all_server_files.keys())
for index in indices:
try:
files.append(self.all_server_files[nodes[index - 1]])
except Exception:
continue
else:
for node in sorted(self.selected_server_files.keys()):
try:
files.append(self.selected_server_files[node])
except Exception:
continue
return {"cluster": files}
def get_files(self, clusterMode, dir_path=""):
try:
if not dir_path:
dir_path = self.log_path
files = logutil.get_all_files(dir_path)
if clusterMode:
cluster_files = []
for file in files:
try:
if self.log_reader.is_cluster_log_file(file):
cluster_files.append(file)
except Exception:
pass
return cluster_files
else:
server_files = []
for file in files:
try:
if self.log_reader.is_server_log_file(file):
server_files.append(file)
except Exception:
pass
return server_files
except Exception:
return []
def add_cluster_snapshots(self, path=""):
snapshots_added = 0
if not path:
return snapshots_added, ">>> Wrong path <<<"
error = ""
if os.path.isdir(path):
for file in self.get_files(True, path):
timestamp = self.log_reader.get_timestamp(file)
if timestamp:
| log_snapshot = self.create_log_snapshot(timestamp, file)
self.selected_cluster_files[timestamp] = log_snapshot
| self.all_cluster_files[timestamp] = log_snapshot
snapshots_added += 1
else:
error += ">>> Cannot add collectinfo file from asmonitor or any other log file other than collectinfo. Use the one generated by asadm (>=0.0.13). Ignoring " + file + " <<<\n"
if snapshots_added==0:
error += ">>> No aerospike collectinfo file available in " + path + ". <<<\n"
elif os.path.isfile(path) and self.log_reader.is_cluster_log_file(path):
timestamp = self.log_reader.get_timestamp(path)
if timestamp:
log_snapshot = self.create_log_snapshot(timestamp, path)
self.selected_cluster_files[timestamp] = log_snapshot
self.all_cluster_files[timestamp] = log_snapshot
snapshots_added += 1
else:
error += ">>> Missing Timestamp in file. Use the collectinfo generated by asadm (>=0.0.13). <<<\n"
else:
error += ">>> " + path + " is incorrect path or not an aerospike collectinfo file <<<\n"
return snapshots_added, error
def add_server_logs(self, prefix="", path=""):
server_logs_added = 0
if not path:
return server_logs_added, ">>> Wrong path <<<"
error = ""
if os.path.isdir(path):
count = 0
for file in self.get_files(False, path):
file_key = self.log_reader.get_server_node_id(file)
if not file_key:
if not prefix:
error += ">>> " + file + " is not new aerospike server log file with node id. Please provide prefix to set name for it. <<<\n"
continue
file_key = prefix + str(count)
count += 1
server_log = self.create_server_log(display_name=file_key, file=file)
self.all_server_files[file_key] = server_log
self.selected_serv |
AOtools/soapy | soapy/pyqtgraph/graphicsItems/ScatterPlotItem.py | Python | gpl-3.0 | 37,649 | 0.005179 | from itertools import starmap, repeat
try:
from itertools import imap
except ImportError:
imap = map
import numpy as np
import weakref
from ..Qt import QtGui, QtCore, USE_PYSIDE, USE_PYQT5
from ..Point import Point
from .. import functions as fn
from .GraphicsItem import GraphicsItem
from .GraphicsObject import GraphicsObject
from .. import getConfigOption
from ..pgcollections import OrderedDict
from .. import debug
from ..python2_3 import basestring
__all__ = ['ScatterPlotItem', 'SpotItem']
## Build all symbol paths
Symbols = OrderedDict([(name, QtGui.QPainterPath()) for name in ['o', 's', 't', 't1', 't2', 't3','d', '+', 'x', 'p', 'h', 'star']])
Symbols['o'].addEllipse(QtCore.QRectF(-0.5, -0.5, 1, 1))
Symbols['s'].addRect(QtCore.QRectF(-0.5, -0.5, 1, 1))
coords = {
't': [(-0.5, -0.5), (0, 0.5), (0.5, -0.5)],
't1': [(-0.5, 0.5), (0, -0.5), (0.5, 0.5)],
't2': [(-0.5, -0.5), (-0.5, 0.5), (0.5, 0)],
't3': [(0.5, 0.5), (0.5, -0.5), (-0.5, 0)],
'd': [(0., -0.5), (-0.4, 0.), (0, 0.5), (0.4, 0)],
'+': [
(-0.5, -0.05), (-0.5, 0.05), (-0.05, 0.05), (-0.05, 0.5),
(0.05, 0.5), (0.05, 0.05), (0.5, 0.05), (0.5, -0.05),
(0.05, -0.05), (0.05, -0.5), (-0.05, -0.5), (-0.05, -0.05)
],
'p': [(0, -0.5), (-0.4755, -0.1545), (-0.2939, 0.4045),
(0.2939, 0.4045), (0.4755, -0.1545)],
'h': [(0.433, 0.25), (0., 0.5), (-0.433, 0.25), (-0.433, -0.25),
(0, -0.5), (0.433, -0.25)],
'star': [(0, -0.5), (-0.1123, -0.1545), (-0.4755, -0.1545),
(-0.1816, 0.059), (-0.2939, 0.4045), (0, 0.1910),
(0.2939, 0.4045), (0.1816, 0.059), (0.4755, -0.1545),
(0.1123, -0.1545)]
}
for k, c in coords.items():
Symbols[k].moveTo(*c[0])
for x,y in c[1:]:
Symbols[k].lineTo(x, y)
Symbols[k].closeSubpath()
tr = QtGui.QTransform()
tr.rotate(45)
Symbols['x'] = tr.map(Symbols['+'])
def drawSymbol(painter, symbol, size, pen, brush):
if symbol is None:
return
painter.scale(size, size)
painter.setPen(pen)
painter.setBrush(brush)
if isinstance(symbol, basestring):
symbol = Symbols[symbol]
if np.isscalar(symbol):
symbol = list(Symbols.values())[symbol % len(Symbols)]
painter.drawPath(symbol)
def renderSymbol(symbol, size, pen, brush, device=None):
"""
Render a symbol specification to QImage.
Symbol may be either a QPainterPath or one of the keys in the Symbols dict.
If *device* is None, a new QPixmap will be returned. Otherwise,
the symbol will be rendered into the device specified (See QPainter documentation
for more information).
"""
## Render a spot with the given parameters to a pixmap
penPxWidth = max(np.ceil(pen.widthF()), 1)
if device is None:
device = QtGui.QImage(int(size+penPxWidth), int(size+penPxWidth), QtGui.QImage.Format_ARGB32)
device.fill(0)
p = QtGui.QPainter(device)
try:
p.setRenderHint(p.Antialiasing)
p.translate(device.width()*0.5, device.height()*0.5)
drawSymbol(p, symbol, size, pen, brush)
finally:
p.end()
return device
def makeSymbolPixmap(size, pen, brush, symbol):
## deprecated
img = renderSymbol(symbol, size, pen, brush)
return QtGui.QPixmap(img)
class SymbolAtlas(object):
"""
Used to efficiently construct a single QPixmap containing all rendered symbols
for a ScatterPlotItem. This is required for fragment rendering.
Use example:
atlas = SymbolAtlas()
sc1 = atlas.getSymbolCoords('o', 5, QPen(..), QBrush(..))
sc2 = atlas.g | etSymbolCoords('t', 10, QPen(..), QBrush(..))
pm = atlas.getAtlas()
"""
def __init__(self):
# symbol key : QRect(...) coordinates where symbol can be found in atlas.
# note that the coordinate list will always be the same list object as
# long as the symbol i | s in the atlas, but the coordinates may
# change if the atlas is rebuilt.
# weak value; if all external refs to this list disappear,
# the symbol will be forgotten.
self.symbolMap = weakref.WeakValueDictionary()
self.atlasData = None # numpy array of atlas image
self.atlas = None # atlas as QPixmap
self.atlasValid = False
self.max_width=0
def getSymbolCoords(self, opts):
"""
Given a list of spot records, return an object representing the coordinates of that symbol within the atlas
"""
sourceRect = np.empty(len(opts), dtype=object)
keyi = None
sourceRecti = None
for i, rec in enumerate(opts):
key = (rec[3], rec[2], id(rec[4]), id(rec[5])) # TODO: use string indexes?
if key == keyi:
sourceRect[i] = sourceRecti
else:
try:
sourceRect[i] = self.symbolMap[key]
except KeyError:
newRectSrc = QtCore.QRectF()
newRectSrc.pen = rec['pen']
newRectSrc.brush = rec['brush']
self.symbolMap[key] = newRectSrc
self.atlasValid = False
sourceRect[i] = newRectSrc
keyi = key
sourceRecti = newRectSrc
return sourceRect
def buildAtlas(self):
# get rendered array for all symbols, keep track of avg/max width
rendered = {}
avgWidth = 0.0
maxWidth = 0
images = []
for key, sourceRect in self.symbolMap.items():
if sourceRect.width() == 0:
img = renderSymbol(key[0], key[1], sourceRect.pen, sourceRect.brush)
images.append(img) ## we only need this to prevent the images being garbage collected immediately
arr = fn.imageToArray(img, copy=False, transpose=False)
else:
(y,x,h,w) = sourceRect.getRect()
arr = self.atlasData[int(x):int(x+w), int(y):int(y+w)]
rendered[key] = arr
w = arr.shape[0]
avgWidth += w
maxWidth = max(maxWidth, w)
nSymbols = len(rendered)
if nSymbols > 0:
avgWidth /= nSymbols
width = max(maxWidth, avgWidth * (nSymbols**0.5))
else:
avgWidth = 0
width = 0
# sort symbols by height
symbols = sorted(rendered.keys(), key=lambda x: rendered[x].shape[1], reverse=True)
self.atlasRows = []
x = width
y = 0
rowheight = 0
for key in symbols:
arr = rendered[key]
w,h = arr.shape[:2]
if x+w > width:
y += rowheight
x = 0
rowheight = h
self.atlasRows.append([y, rowheight, 0])
self.symbolMap[key].setRect(y, x, h, w)
x += w
self.atlasRows[-1][2] = x
height = y + rowheight
self.atlasData = np.zeros((int(width), int(height), 4), dtype=np.ubyte)
for key in symbols:
y, x, h, w = self.symbolMap[key].getRect()
self.atlasData[int(x):int(x+w), int(y):int(y+h)] = rendered[key]
self.atlas = None
self.atlasValid = True
self.max_width = maxWidth
def getAtlas(self):
if not self.atlasValid:
self.buildAtlas()
if self.atlas is None:
if len(self.atlasData) == 0:
return QtGui.QPixmap(0,0)
img = fn.makeQImage(self.atlasData, copy=False, transpose=False)
self.atlas = QtGui.QPixmap(img)
return self.atlas
class ScatterPlotItem(GraphicsObject):
"""
Displays a set of x/y points. Instances of this class are created
automatically as part of PlotDataItem; these rarely need to be instantiated
directly.
The size, shape, pen, and fill brush may be set for each point individually
or for all points.
======================== ===============================================
**Signals:**
sigPlotChanged(self) Emitted when the data being plotted has changed
sigClicked(self, points) Emitted |
cbitstech/Purple-Robot-Django | formatters/features_deviceinusefeature.py | Python | gpl-3.0 | 1,013 | 0.000987 | # pylint: disable=line-too-long, unused-argument
import json
from django.template.loader import render_to_string
def format_reading(probe_name, json_payload):
item = json.loads(json_payload)
status = item['DEVICE_ACTIVE']
if item['DEVICE_ACTIVE'] is True:
status = "Active"
elif item['DEVICE_A | CTIVE'] is False:
status = "Inactive"
return status
def visualize(probe_name, readings):
report = []
for reading in readings:
payload = json.loads(reading.payload)
timestamp = payload['TIMESTAMP']
device_active = payload['DEVICE_ACTIVE']
if device_active is True:
| device_active = 1
elif device_active is False:
device_active = 0
rep_dict = {}
rep_dict["y"] = device_active
rep_dict["x"] = timestamp
report.append(rep_dict)
return render_to_string('visualization_device.html', {'probe_name': probe_name, 'readings': readings, 'device_report': json.dumps(report)})
|
astrobin/astrobin | nested_comments/models.py | Python | agpl-3.0 | 2,677 | 0.000374 | # Django
from django.contrib.auth.models import User
from common.utils import get_sentinel_user
from toggleproperties.models import ToggleProperty
try:
# Django < 1.10
from django.contrib.contenttypes.generic import GenericForeignKey
except ImportError:
# Django >= 1.10
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import models
class NestedComment(models.Model):
content_type = models.ForeignKey(
ContentType,
on_delete=models.CASCADE
)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey(
'content_type',
'object_id',
)
author = models.ForeignKey(
User,
related_name="comments",
on_delete=models.SET(get_sentinel_user),
editable=False,
)
text = models.TextField()
created = models.DateTimeField(
auto_now_add=True,
editable=False,
)
updated = models.DateTimeField(
auto_now=True,
editable=False,
)
parent = models.ForeignKey(
'self',
null=True,
| blank | =True,
related_name='children',
on_delete=models.SET_NULL,
)
deleted = models.BooleanField(
default=False,
)
pending_moderation = models.NullBooleanField()
moderator = models.ForeignKey(
User,
related_name="moderated_comments",
null=True,
default=None,
on_delete=models.SET_NULL
)
@property
def depth(self):
value = 1
if self.parent:
return value + self.parent.depth
return value
@property
def likes(self):
return ToggleProperty.objects.filter(
object_id=self.pk,
content_type=ContentType.objects.get_for_model(NestedComment),
property_type='like'
).values_list('user__pk', flat=True)
def __str__(self):
return "Comment %d" % self.pk
def get_absolute_url(self):
object_url = self.content_type.get_object_for_this_type(id=self.object_id).get_absolute_url()
return '%s#c%d' % (object_url, self.id)
def delete(self, *args, **kwargs):
if not self.deleted:
self.deleted = True
self.save()
def clean(self, *args, **kwargs):
obj = self.content_type.get_object_for_this_type(pk=self.object_id)
if hasattr(obj, 'allow_comments') and obj.allow_comments is False:
raise ValidationError('Comments are closed')
class Meta:
app_label = 'nested_comments'
|
stryder199/RyarkAssignments | Assignment2/ttt/archive/for_loops/for_continue.py | Python | mit | 1,067 | 0.099344 | import question_template
game_type = 'input_output'
source_language = 'C'
parameter_list = [
['$x1','int'],['$x2','int'],['$x3','int'],['$y0','int'],
]
tuple_list = [
['for_continue_',
[0,1,2,None],
[0,2,2,None],
[0,4,2,None],
[0,6,2,None],
[0,7,2,None],
[None,None,2,1],
[None,None,2,2],
[None,None,2,4],
[None,None,2,6],
[None,None,2,7],
[0,1,3,None],
[0,2,3,None],
[0,4,3,None],
[0,6,4,None],
[0,7,5,None],
[None,None,3,1],
[None,None,3,2],
[None,None,3,4],
[None,None,4,6],
[None,None,5,7],
]
]
global_code_template = '''\
d #include <stdio.h>
x #include <stdio.h>
'''
main_code_template = '''\
dx int s = $x1;
dx int i;
dx for (i = 1; i < $x2; i++) {
dx if (i % $x3 == 0)
dx continue;
dx s = s + i;
dx }
dx printf("%d\\n",s);
'''
argv_template = ''
stdin_template = ''
stdout_template = '''\
$y0
'''
question = question_template.Question_template(game_type,source_language,
p | arameter_list,tuple_li | st,global_code_template,main_code_template,
argv_template,stdin_template,stdout_template)
|
mcus/SickRage | lib/feedparser/namespaces/itunes.py | Python | gpl-3.0 | 4,131 | 0.001695 | # Support for the iTunes format
# Copyright 2010-2015 Kurt McKee <contactme@kurtmckee.org>
# Copyright 2002-2008 Mark Pilgrim
# All rights reserved.
#
# This file is a part of feedparser.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BU | SINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, unicode_literals
from ..util import FeedParserDict
class Namespace(object):
supported_namespaces = {
# Canonical namespace
'http://www.itunes.com/DTDs/PodCast-1.0.dtd': 'itunes',
| # Extra namespace
'http://example.com/DTDs/PodCast-1.0.dtd': 'itunes',
}
def _start_itunes_author(self, attrsD):
self._start_author(attrsD)
def _end_itunes_author(self):
self._end_author()
def _end_itunes_category(self):
self._end_category()
def _start_itunes_name(self, attrsD):
self._start_name(attrsD)
def _end_itunes_name(self):
self._end_name()
def _start_itunes_email(self, attrsD):
self._start_email(attrsD)
def _end_itunes_email(self):
self._end_email()
def _start_itunes_subtitle(self, attrsD):
self._start_subtitle(attrsD)
def _end_itunes_subtitle(self):
self._end_subtitle()
def _start_itunes_summary(self, attrsD):
self._start_summary(attrsD)
def _end_itunes_summary(self):
self._end_summary()
def _start_itunes_owner(self, attrsD):
self.inpublisher = 1
self.push('publisher', 0)
def _end_itunes_owner(self):
self.pop('publisher')
self.inpublisher = 0
self._sync_author_detail('publisher')
def _end_itunes_keywords(self):
for term in self.pop('itunes_keywords').split(','):
if term.strip():
self._addTag(term.strip(), 'http://www.itunes.com/', None)
def _start_itunes_category(self, attrsD):
self._addTag(attrsD.get('text'), 'http://www.itunes.com/', None)
self.push('category', 1)
def _start_itunes_image(self, attrsD):
self.push('itunes_image', 0)
if attrsD.get('href'):
self._getContext()['image'] = FeedParserDict({'href': attrsD.get('href')})
elif attrsD.get('url'):
self._getContext()['image'] = FeedParserDict({'href': attrsD.get('url')})
_start_itunes_link = _start_itunes_image
def _end_itunes_block(self):
value = self.pop('itunes_block', 0)
self._getContext()['itunes_block'] = (value == 'yes') and 1 or 0
def _end_itunes_explicit(self):
value = self.pop('itunes_explicit', 0)
# Convert 'yes' -> True, 'clean' to False, and any other value to None
# False and None both evaluate as False, so the difference can be ignored
# by applications that only need to know if the content is explicit.
self._getContext()['itunes_explicit'] = (None, False, True)[(value == 'yes' and 2) or value == 'clean' or 0]
|
agendaTCC/AgendaTCC | tccweb/utils/storage.py | Python | gpl-2.0 | 415 | 0.004819 | import unicodedata
from django.core.files | .storage import FileSystemStorage
class ASCIIFileSystemStorage(FileSystemStorage):
"""
Convert unicode characters in name to ASCII characters.
"""
def get_valid_name( | self, name):
name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore')
name = unicode(name)
return super(ASCIIFileSystemStorage, self).get_valid_name(name) |
maxisacson/sml-project | Henrik_Project/ensemble_methods.py | Python | mit | 11,726 | 0.008272 | import pandas as pd
import numpy as np
from six import iteritems
from scipy import stats
#from sklearn.feature_selection import SelectKBest, f_classif
from sklearn.cross_validation import train_test_split
from sklearn.ensemble import \
RandomForestRegressor, \
ExtraTreesRegressor, \
GradientBoostingRegressor
from sklearn.tree import \
DecisionTreeRegressor
import matplotlib.pyplot as plt
from argparse import ArgumentParser
from ROOT import TH1F, TFile, TProfile
from rutil import init_atlas_style, show_hists
parser = ArgumentParser(description = 'Regression using ensemble methods')
parser.add_argument('-r',
'--regressor',
required=True,
help='the regressor to use',
metavar='<regressor>')
parser.add_argument('-d',
'--depth',
default=3,
type=int,
help='the tree depth',
metavar='<depth>')
parser.add_argument('-t',
'--trees',
default=100,
type=int,
help='the number of trees',
metavar='<trees>')
parser.add_argument('-s',
'--sample',
default='all',
help='the mass point of the sample to use or \'all\'',
metavar='<sample>')
parser.add_argument('-m',
'--matplotlib',
action='store_true',
help='plot using matplotlib')
arguments = parser.parse_args()
def plot_matplotlib(sample, base_name):
colors = ['#e41a1c', '#377eb8', '#4daf4a', '#984ea3', '#ff7f00']
params = { 'figure.facecolor': 'white',
'figure.subplot.bottom': 0.0,
'font.size': 16,
'legend.fontsize': 16,
'legend.borderpad': 0.2,
'legend.labelspacing': 0.2,
'legend.handlelength': 1.5,
'legend.handletextpad': 0.4,
'legend.borderaxespad': 0.2,
'lines.markeredgewidth': 2.0,
'lines.linewidth': 2.0,
'axes.prop_cycle': plt.cycler('color',colors)}
plt.rcParams.update(params)
fig, ax = plt.subplots(2)
ax[0].hist(sample[met_pred_parameter], 50, range=(0.0, 400.0), label='Pred', histtype='step')
ax[0].hist(sample[met_reco_parameter], 50, range=(0.0, 400.0), label='Obs', histtype='step')
ax[0].hist(sample[met_target_parameter], 50, range=(0.0, 400.0), label='Target', histtype='step')
ax[0].set_xlim([0,400])
ax[0].set_xlabel("pT (GeV)")
ax[0].legend(loc='upper right')
ax[1].hist(sample[met_pred_resolution], 50, range=(-100.0, 100.0), label='Pred', histtype='step')
ax[1].hist(sample[met_reco_resolution], 50, range=(-100.0, 100.0), label='Obs', histtype='step')
ax[1].set_xlim([-100,100])
ax[1].set_xlabel("Resolution (%)")
ax[1].legend(loc='upper left')
fig.tight_layout(pad=0.3)
fig.savefig('{}.pdf'.format(base_name))
plt.show()
def plot_root(sample, base_name):
met_pred, met_truth, met_reco, mt_pred, mt_reco = [
TH1F(n, '', 100, 0, 500)
for n
in ['pt_{}'.format(arguments.regressor), 'pt_truth', 'met_reco',
'mt_{}'.format(arguments.regressor), 'mt_reco']]
res_pred, res_reco = [
TH1F(n, '', 100, -100, 100)
for n
in ['res_{}'.format(arguments.regressor), 'res_reco']]
profile = TProfile('profile_pred_{}'.format(arguments.regressor),
'', 100, 0, 500)
map(met_pred.Fill, sample[met_pred_parameter])
map(met_truth.Fill, sample[met_truth_parameter])
map(met_reco.Fill, sample[met_reco_parameter])
map(mt_pred.Fill, sample[mt_pred_parameter])
map(mt_reco.Fill, sample[mt_reco_parameter])
map(res_pred.Fill, sample[met_pred_resolution])
map(res_reco.Fill, sample[met_reco_resolution])
map(profile.Fill, sample['pt(mc nuH)'], sample[met_pred_ratio])
root_file = TFile.Open('{}.root'.format(base_name), 'RECREATE')
root_file.cd()
met_pred.Write()
mt_pred.Write()
res_pred.Write()
profile.Write()
root_file.ls()
root_file.Close()
init_atlas_style()
met_pred.SetName('Pred')
met_reco.SetName('Reco')
met_truth.SetName('Target')
o1 = show_hists((met_pred, met_truth, met_reco),
'Missing ET',
'{}_met.pdf'.format(base_name))
res_pred.SetName('Pred')
res_reco.SetName('Reco')
o2 = show_hists((res_pred, res_reco),
'Missing ET resolution',
'{}_res.pdf'.format(base_name))
mt_pred.SetName('Pred')
mt_reco.SetName('Reco')
o3 = show_hists((mt_pred, mt_reco),
'Transverse mass',
'{}_mt.pdf'.format(base_name))
profile.SetName('Profile')
o3 = show_hists((profile,),
'Profile',
'{}_profile.pdf'.format(base_name))
raw_input('Press any key...')
# All available variables in the dataset
all_predictors = [
'et(met)', 'phi(met)', 'ntau', 'nbjet', 'njet', 'pt(reco tau1)',
'eta(reco tau1)', 'phi(reco tau1)', 'm(reco tau1)', 'pt(reco bjet1)',
'eta(reco bjet1)', 'phi(reco bjet1)', 'm(reco bjet1)', 'pt(reco bjet2)',
'eta(reco bjet2)', 'phi(reco bjet2)', 'm(reco bjet2)', 'pt(reco bjet3)',
'eta(reco bjet3)', 'phi(reco bjet3)', 'm(reco bjet3)', 'pt(reco bjet4)',
'eta(reco bjet4)', 'phi(reco bjet4)', 'm(reco bjet4)', 'pt(reco jet1)',
'eta(reco jet1)', 'phi(reco jet1)', 'm(reco jet1)', 'pt(reco jet2)',
'eta(reco jet2)', 'phi(reco jet2)', 'm(reco jet2)', 'pt(reco jet3)',
'eta(reco jet3)', 'phi(reco jet3)', 'm(reco jet3)', 'pt(reco jet4)',
'eta(reco jet4)', 'phi(reco jet4)', 'm(reco jet4)'
]
# Predictors as selected by SelektKBest
selected_predictors = [
'et(met)', 'phi(met)',
'pt(reco tau1)', 'eta(reco tau1)', 'phi(reco tau1)',
'pt(reco bjet1)', 'eta(reco bjet1)', 'phi(reco bjet1)', 'm(reco bjet1)',
#'phi(reco bjet3)', 'm( | reco bjet3)',
#'pt(reco bjet4)', 'eta(reco bjet4)', 'phi(reco bjet4)', 'm(reco bjet4)',
'pt(reco jet1)', 'eta(reco jet1)', 'phi(reco jet1)', 'm(reco jet1)',
#'pt(reco jet2)', 'eta(reco jet2)', 'phi(reco jet2)', 'm(reco jet2)',
'nbjet'
]
selected_displays = [
'et(met)',
'pt(mc nuH)',
'pt(reco tau1)',
'pt(mc tau)',
'mass_truth'
]
nupt_truth_parameter = 'pt(mc nuH)'
met_truth_parameter = 'et(mc met)'
met_reco_parameter | = 'et(met)'
met_pred_parameter = 'pt(pred nuH)'
met_reco_resolution = 'et(res met)'
met_pred_resolution = 'pt(res pred nuH)'
met_pred_ratio = 'pt(ratio nuH)'
#mt_truth_parameter = 'mt(mc)'
mt_reco_parameter = 'mt(net)'
mt_pred_parameter = 'mt(pred)'
pd.set_option('display.max_columns', 500)
sample_200 = pd.read_csv('../test/mg5pythia8_hp200.root.test3.csv')
sample_300 = pd.read_csv('../test/mg5pythia8_hp300.root.test.csv')
sample_400 = pd.read_csv('../test/mg5pythia8_hp400.root.test.csv')
# Make a combined sample
if arguments.sample == 'all':
combined_sample = pd.concat((sample_200, sample_300, sample_400))
dataset = combined_sample.sample(100000, random_state=1)
elif arguments.sample == '200':
dataset = sample_200
elif arguments.sample == '300':
dataset = sample_300
elif arguments.sample == '400':
dataset = sample_400
# Replace invalid values with NaN
dataset = dataset.where(dataset > -998.0, other=np.nan)
# Compute the H+ truth mass
dataset['mass_truth'] = (
np.sqrt( 2
* (dataset['pt(mc nuH)'])
* (dataset['pt(mc tau)'])
* ( np.cosh(dataset['eta(mc nuH)'] - dataset['eta(mc tau)'])
- np.cos(dataset['phi(mc nuH)'] - dataset['phi(mc tau)'])))
)
dataset[met_truth_parameter] = \
np.sqrt( dataset['pt(mc nuH)']**2
+ dataset['pt(mc nuTau)']**2
+ 2*dataset['pt(mc nuH)']*dataset['pt(mc nuTau)']
* np.cos( dataset['phi(mc nuH)']
- dataset['phi(mc nuTau)']))
#dataset[mt_truth_parameter] = \
#np.sqrt( 2*dataset[met_truth_parameter]*dataset['pt(mc |
NetEaseGame/git-webhook | app/config_test.py | Python | mit | 357 | 0 | # -*- coding: utf-8 -*-
DEBUG = True
TES | TING = True
SECRET_KEY = 'SECRET_KEY'
DATABASE_URI = 'mysql+pymysql://root:root@127.0.0.1/git_webhook'
CELERY_BROKER_URL = 'redis://:@127.0.0.1:6379/0'
CELERY_RESULT_BACKEND = 'redis://:@127.0.0.1:6379/0'
SOCKET_MESSAGE_QUEUE = 'redis://:@127.0.0.1:6379/0'
GITHUB_CLIENT_ID | = '123'
GITHUB_CLIENT_SECRET = 'SECRET'
|
tdickers/mitmproxy | netlib/http/http2/connections.py | Python | mit | 14,926 | 0.001072 | from __future__ import (absolute_import, print_function, division)
import itertools
import time
import hyperframe.frame
from hpack.hpack import Encoder, Decoder
from netlib import utils
from netlib.http import url
import netlib.http.headers
import netlib.http.response
import netlib.http.request
from netlib.http.http2 import framereader
class TCPHandler(object):
def __init__(self, rfile, wfile=None):
self.rfile = rfile
self.wfile = wfile
class HTTP2Protocol(object):
ERROR_CODES = utils.BiDi(
NO_ERROR=0x0,
PROTOCOL_ERROR=0x1,
INTERNAL_ERROR=0x2,
FLOW_CONTROL_ERROR=0x3,
SETTINGS_TIMEOUT=0x4,
STREAM_CLOSED=0x5,
FRAME_SIZE_ERROR=0x6,
REFUSED_STREAM=0x7,
CANCEL=0x8,
COMPRESSION_ERROR=0x9,
CONNECT_ERROR=0xa,
ENHANCE_YOUR_CALM=0xb,
INADEQUATE_SECURITY=0xc,
HTTP_1_1_REQUIRED=0xd
)
CLIENT_CONNECTION_PREFACE = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
HTTP2_DEFAULT_SETTINGS = {
hyperframe.frame.SettingsFrame.HEADER_TABLE_SIZE: 4096,
hyperframe.frame.SettingsFrame.ENABLE_PUSH: 1,
hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: None,
hyperframe.frame.SettingsFrame.INITIAL_WINDOW_SIZE: 2 ** 16 - 1,
hyperframe.frame.SettingsFrame.MAX_FRAME_SIZE: 2 ** 14,
hyperframe.frame.Set | tingsFrame.MAX_HEADER_LIST_SIZE: None,
}
def __init__(
self,
tcp_handler=None,
rfile=None,
wfile=None,
is_server=False,
dump_frames=False,
encoder=None,
decoder=None,
unhandled_frame_cb | =None,
):
self.tcp_handler = tcp_handler or TCPHandler(rfile, wfile)
self.is_server = is_server
self.dump_frames = dump_frames
self.encoder = encoder or Encoder()
self.decoder = decoder or Decoder()
self.unhandled_frame_cb = unhandled_frame_cb
self.http2_settings = self.HTTP2_DEFAULT_SETTINGS.copy()
self.current_stream_id = None
self.connection_preface_performed = False
def read_request(
self,
__rfile,
include_body=True,
body_size_limit=None,
allow_empty=False,
):
if body_size_limit is not None:
raise NotImplementedError()
self.perform_connection_preface()
timestamp_start = time.time()
if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
self.tcp_handler.rfile.reset_timestamps()
stream_id, headers, body = self._receive_transmission(
include_body=include_body,
)
if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
timestamp_end = time.time()
authority = headers.get(':authority', b'')
method = headers.get(':method', 'GET')
scheme = headers.get(':scheme', 'https')
path = headers.get(':path', '/')
headers.clear(":method")
headers.clear(":scheme")
headers.clear(":path")
host = None
port = None
if path == '*' or path.startswith("/"):
first_line_format = "relative"
elif method == 'CONNECT':
first_line_format = "authority"
if ":" in authority:
host, port = authority.split(":", 1)
else:
host = authority
else:
first_line_format = "absolute"
# FIXME: verify if path or :host contains what we need
scheme, host, port, _ = url.parse(path)
scheme = scheme.decode('ascii')
host = host.decode('ascii')
if host is None:
host = 'localhost'
if port is None:
port = 80 if scheme == 'http' else 443
port = int(port)
request = netlib.http.request.Request(
first_line_format,
method.encode('ascii'),
scheme.encode('ascii'),
host.encode('ascii'),
port,
path.encode('ascii'),
b"HTTP/2.0",
headers,
body,
timestamp_start,
timestamp_end,
)
request.stream_id = stream_id
return request
def read_response(
self,
__rfile,
request_method=b'',
body_size_limit=None,
include_body=True,
stream_id=None,
):
if body_size_limit is not None:
raise NotImplementedError()
self.perform_connection_preface()
timestamp_start = time.time()
if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
self.tcp_handler.rfile.reset_timestamps()
stream_id, headers, body = self._receive_transmission(
stream_id=stream_id,
include_body=include_body,
)
if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
if include_body:
timestamp_end = time.time()
else:
timestamp_end = None
response = netlib.http.response.Response(
b"HTTP/2.0",
int(headers.get(':status', 502)),
b'',
headers,
body,
timestamp_start=timestamp_start,
timestamp_end=timestamp_end,
)
response.stream_id = stream_id
return response
def assemble(self, message):
if isinstance(message, netlib.http.request.Request):
return self.assemble_request(message)
elif isinstance(message, netlib.http.response.Response):
return self.assemble_response(message)
else:
raise ValueError("HTTP message not supported.")
def assemble_request(self, request):
assert isinstance(request, netlib.http.request.Request)
authority = self.tcp_handler.sni if self.tcp_handler.sni else self.tcp_handler.address.host
if self.tcp_handler.address.port != 443:
authority += ":%d" % self.tcp_handler.address.port
headers = request.headers.copy()
if ':authority' not in headers:
headers.insert(0, b':authority', authority.encode('ascii'))
headers.insert(0, b':scheme', request.scheme.encode('ascii'))
headers.insert(0, b':path', request.path.encode('ascii'))
headers.insert(0, b':method', request.method.encode('ascii'))
if hasattr(request, 'stream_id'):
stream_id = request.stream_id
else:
stream_id = self._next_stream_id()
return list(itertools.chain(
self._create_headers(headers, stream_id, end_stream=(request.body is None or len(request.body) == 0)),
self._create_body(request.body, stream_id)))
def assemble_response(self, response):
assert isinstance(response, netlib.http.response.Response)
headers = response.headers.copy()
if ':status' not in headers:
headers.insert(0, b':status', str(response.status_code).encode('ascii'))
if hasattr(response, 'stream_id'):
stream_id = response.stream_id
else:
stream_id = self._next_stream_id()
return list(itertools.chain(
self._create_headers(headers, stream_id, end_stream=(response.body is None or len(response.body) == 0)),
self._create_body(response.body, stream_id),
))
def perform_connection_preface(self, force=False):
if force or not self.connection_preface_performed:
if self.is_server:
self.perform_server_connection_preface(force)
else:
self.perform_client_connection_preface(force)
def perform_server_connection_preface(self, force=False):
if force or not self.connection_preface_performed:
self.connection_preface_performed = True
magic_length = len(self.CLIENT_CONNECTION_PREFACE)
magic = self.tcp_handler.rfile.safe_read(m |
exa-analytics/exatomic | exatomic/qe/pw/input.py | Python | apache-2.0 | 1,703 | 0.006459 | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2022, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
#'''
#PW Module Input File Editor
#====================================
#
#'''
#import numpy as np
#from exatomic.exa import DataFrame
#from exatomic import Length, Frame, Atom
#from exqe.input import QEInput
#from exqe.types import lengths
#
#
#class PWInput(QEInput):
# '''
# Editor representation of QE's pw.x input file.
# '''
# def parse_frame(self):
# '''
# Parse the :class:`~atomic.frame.Frame` dataframe and store
# it locally, accessible through the ".frame" property.
# '''
# nat = len(self.atom)
# df = DataFrame.from_dict({'atom_count': [nat]})
# self._frame = Frame(df)
#
# | def parse_cell(s | elf):
# '''
# Determine the type of unit cell being used.
# '''
# ibrav = int(list(self.find('ibrav').values())[0].split('=')[1])
# if ibrav == 1:
# a = np.float64(list(self.find('celldm(1)').values())[0].split('=')[1])
# frame = self.frame
# frame['xi'] = a
# frame['xj'] = 0.0
# frame['xk'] = 0.0
# frame['yi'] = 0.0
# frame['yj'] = a
# frame['yk'] = 0.0
# frame['zi'] = 0.0
# frame['zj'] = 0.0
# frame['zk'] = a
# frame['ox'] = 0.0
# frame['oy'] = 0.0
# frame['oz'] = 0.0
# self._frame = frame
# else:
# raise NotImplementedError()
#
# def __init__(self, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self._atom = None
# self._frame = None
#
|
BLavery/PiBlynk | PiBlynk-py/32-bridge-in.py | Python | mit | 1,523 | 0.013132 | # This example is designed to be paired with example file 31-bridge-out.py
# Run the two with DIFFERENT DEVICE TOKENS.
# (They can be either in same "project" or separate projects as set at phone. Just use different tokens.)
# This "in" bridge receives data directly from other RPi.
# Our display shows incoming messages.
# Our LED on gpio 21 is controlled by button at other end.
import gpiozero as GPIO
from PiBlynk import Blynk
from mytoken import *
blynk = Blynk(token2) # <<<<<<<<<<<<<<<<<<<< USE DIFFERENT TOKED FROM OTHER END !!!
#-----------------------------------------------
# gpio (incoming) write
def gpioOut_h(val, pin, gpioObj):
gpioObj.value = val # control the LED
print("Incoming GPIO OUT command:", pin, val)
# set up the RPi LED or other outputs and connect to generic gpioOut function above
ledR = GPIO.LED(21) # gpiozero led objects
blynk.add_digital_hw_pin(21, None, gpioOut_h, ledR)
#-----------------------------------------
# Listen for anything coming in V61. Just print it
def virt_in_h(val, pin, st):
print("Incoming on VP:", pin, val)
blynk.add_virtual_pin(61, write=virt_in_h) # we place a LISTEN for incoming writes on V61
def cnct_cb():
print ("Connected: ")
print("Waiting for in | coming messages ...")
blynk.on_connect(cnct_cb)
#################################################################################### | ##
blynk.run()
######################################################################################
#At APP:
# Nothing
|
nortikin/sverchok | nodes/object_nodes/getsetprop.py | Python | gpl-3.0 | 9,277 | 0.003342 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import ast
import traceback
import bpy
from bpy.props import StringProperty, BoolProperty, IntProperty, FloatProperty, FloatVectorProperty
from bpy.types import bpy_prop_array
import mathutils
from mathutils import Matrix, Vector, Euler, Quaternion, Color
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.utils.nodes_mixins.sv_animatable_nodes import SvAnimatableNode
from sverchok.data_structure import Matrix_generate, updateNode, node_id
def parse_to_path(p):
'''
Create a path and can be looked up easily.
Return an array of tuples with op type and value
ops are:
name - global name to use
attr - attribute to get using getattr(obj,attr)
key - key for accessing via obj[key]
'''
if isinstance(p, ast.Attribute):
return parse_to_path(p.value)+[("attr", p.attr)]
elif isinstance(p, ast.Subscript):
if isinstance(p.slice.value, ast.Num):
return parse_to_path(p.value) + [("key", p.slice.value.n)]
elif isinstance(p.slice.value, ast.Str):
return parse_to_path(p.value) + [("key", p.slice.value.s)]
elif isinstance(p, ast.Name):
return [("name", p.id)]
else:
raise NameError
def get_object(path):
'''
access the object speciefed from a path
generated b | y parse_to_path
will fail if path is invalid
'''
curr_object = globals()[path[0][1]]
for t, value in path[1:]:
if t == "attr":
curr_object = getattr(curr_object, value)
elif t == "key":
curr_object = curr_object[value]
return curr_object
def apply_alias(eval_str):
'''
apply standard aliases
will raise error if it i | sn't an bpy path
'''
if not eval_str.startswith("bpy."):
for alias, expanded in aliases.items():
if eval_str.startswith(alias):
eval_str = eval_str.replace(alias, expanded, 1)
break
if not eval_str.startswith("bpy."):
raise NameError
return eval_str
def wrap_output_data(tvar):
'''
create valid sverchok socket data from an object
from ek node
'''
if isinstance(tvar, (Vector, Color)):
data = [[tvar[:]]]
elif isinstance(tvar, Matrix):
data = [[r[:] for r in tvar[:]]]
elif isinstance(tvar, (Euler, Quaternion)):
tvar = tvar.to_matrix().to_4x4()
data = [[r[:] for r in tvar[:]]]
elif isinstance(tvar, list):
data = [tvar]
elif isinstance(tvar, (int, float)):
data = [[tvar]]
else:
data = tvar
return data
def assign_data(obj, data):
'''
assigns data to the object
'''
if isinstance(obj, (int, float)):
# doesn't work
obj = data[0][0]
elif isinstance(obj, (Vector, Color)):
obj[:] = data[0][0]
elif isinstance(obj, (Matrix, Euler, Quaternion)):
mats = Matrix_generate(data)
mat = mats[0]
if isinstance(obj, Euler):
eul = mat.to_euler(obj.order)
obj[:] = eul
elif isinstance(obj, Quaternion):
quat = mat.to_quaternion()
obj[:] = quat
else: #isinstance(obj, Matrix)
obj[:] = mat
else: # super optimistic guess
obj[:] = type(obj)(data[0][0])
aliases = {
"c": "bpy.context",
"C" : "bpy.context",
"scene": "bpy.context.scene",
"data": "bpy.data",
"D": "bpy.data",
"objs": "bpy.data.objects",
"mats": "bpy.data.materials",
"M": "bpy.data.materials",
"meshes": "bpy.data.meshes",
"texts": "bpy.data.texts"
}
types = {
int: "SvStringsSocket",
float: "SvStringsSocket",
str: "SvStringsSocket", # I WANT A PROPER TEXT SOCKET!!!
mathutils.Vector: "SvVerticesSocket",
mathutils.Color: "SvVerticesSocket",
mathutils.Matrix: "SvMatrixSocket",
mathutils.Euler: "SvMatrixSocket",
mathutils.Quaternion: "SvMatrixSocket"
}
def secondary_type_assesment(item):
"""
we can use this function to perform more granular attr/type identification
"""
if isinstance(item, bpy_prop_array):
if hasattr(item, "path_from_id") and item.path_from_id().endswith('color'):
return "SvColorSocket"
return None
class SvGetPropNode(bpy.types.Node, SverchCustomTreeNode, SvAnimatableNode):
''' Get property '''
bl_idname = 'SvGetPropNode'
bl_label = 'Get property'
bl_icon = 'FORCE_VORTEX'
sv_icon = 'SV_PROP_GET'
bad_prop: BoolProperty(default=False)
def verify_prop(self, context):
try:
obj = self.obj
except:
traceback.print_exc()
self.bad_prop = True
return
self.bad_prop = False
s_type = types.get(type(self.obj))
if not s_type:
s_type = secondary_type_assesment(self.obj)
outputs = self.outputs
if s_type and outputs:
outputs[0].replace_socket(s_type)
elif s_type:
outputs.new(s_type, "Data")
updateNode(self, context)
prop_name: StringProperty(name='', update=verify_prop)
@property
def obj(self):
eval_str = apply_alias(self.prop_name)
ast_path = ast.parse(eval_str)
path = parse_to_path(ast_path.body[0].value)
return get_object(path)
def draw_buttons(self, context, layout):
layout.alert = self.bad_prop
if len(self.outputs) > 0:
self.draw_animatable_buttons(layout, icon_only=True)
layout.prop(self, "prop_name", text="")
def process(self):
# print(">> Get process is called")
self.outputs[0].sv_set(wrap_output_data(self.obj))
class SvSetPropNode(bpy.types.Node, SverchCustomTreeNode):
''' Set property '''
bl_idname = 'SvSetPropNode'
bl_label = 'Set property'
bl_icon = 'FORCE_VORTEX'
sv_icon = 'SV_PROP_SET'
ok_prop: BoolProperty(default=False)
bad_prop: BoolProperty(default=False)
@property
def obj(self):
eval_str = apply_alias(self.prop_name)
ast_path = ast.parse(eval_str)
path = parse_to_path(ast_path.body[0].value)
return get_object(path)
def verify_prop(self, context):
# test first
try:
obj = self.obj
except:
traceback.print_exc()
self.bad_prop = True
return
# execute second
self.bad_prop = False
s_type = types.get(type(self.obj))
if not s_type:
s_type = secondary_type_assesment(self.obj)
p_name = {
float: "float_prop",
int: "int_prop",
bpy_prop_array: "color_prop"
}.get(type(self.obj),"")
inputs = self.inputs
if inputs and s_type:
socket = inputs[0].replace_socket(s_type)
socket.prop_name = p_name
elif s_type:
inputs.new(s_type, "Data").prop_name = p_name
if s_type == "SvVerticesSocket":
inputs[0].use_prop = True
updateNode(self, context)
def local_updateNode(self, context):
# no further interaction with the nodetree is required.
self.process()
prop_name: StringProperty(name='', update=verify_prop)
float_prop: FloatProperty(update=updateNode, name="x")
int_prop: IntProperty(update=updateNode, name="x")
color_ |
claudep/pootle | pootle/core/markup/filters.py | Python | gpl-3.0 | 5,520 | 0 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from ..utils.html import rewrite_links
__all__ = (
'get_markup_filter_name', 'get_markup_filter_display_name',
'get_markup_filter', 'apply_markup_filter',
)
def rewrite_internal_link(link):
"""Converts `link` into an internal link.
Any active static pages defined for a site can be linked by pointing
to its virtual path by starting the anchors with the `#/` sequence
(e.g. `#/the/virtual/path`).
Links pointing to non-existent pages will return `#`.
Links not starting with `#/` will be omitted.
"""
if not link.startswith('#/'):
return link
from staticpages.models import AbstractPage
virtual_path = link[2:]
url = u'#'
for page_model in AbstractPage.__subclasses__():
try:
page = page_model.objects.live().get(virtual_path=virtual_path,)
url = page.get_absolute_url()
except ObjectDoesNotExist:
pass
return url
def get_markup_filter_name():
"""Returns the cur | rent markup filter's name."""
name = get_markup_filter()[0]
return 'html' if name is None else name
def get_markup_filter_display_name():
"""Returns a nice version for the current markup filter's name."""
name = get_markup_filter_name()
return {
'textile': u'Textile',
'markdown': u'Markdown',
'restructuredtext': | u'reStructuredText',
}.get(name, u'HTML')
def get_markup_filter():
"""Returns the configured filter as a tuple with name and args.
If there is any problem it returns (None, '').
"""
try:
markup_filter, markup_kwargs = settings.POOTLE_MARKUP_FILTER
if markup_filter is None:
return (None, "unset")
elif markup_filter == 'textile':
import textile # noqa
elif markup_filter == 'markdown':
import markdown # noqa
elif markup_filter == 'restructuredtext':
import docutils # noqa
else:
return (None, '')
except Exception:
return (None, '')
return (markup_filter, markup_kwargs)
def apply_markup_filter(text):
"""Applies a text-to-HTML conversion function to a piece of text and
returns the generated HTML.
The function to use is derived from the value of the setting
``POOTLE_MARKUP_FILTER``, which should be a 2-tuple:
* The first element should be the name of a markup filter --
e.g., "markdown" -- to apply. If no markup filter is desired,
set this to None.
* The second element should be a dictionary of keyword
arguments which will be passed to the markup function. If no
extra arguments are desired, set this to an empty
dictionary; some arguments may still be inferred as needed,
however.
So, for example, to use Markdown with bleach cleaning turned on (cleaning
removes non-whitelisted HTML), put this in your settings file::
POOTLE_MARKUP_FILTER = ('markdown', {})
Currently supports Textile, Markdown and reStructuredText, using
names identical to the template filters found in
``django.contrib.markup``.
Borrowed from http://djangosnippets.org/snippets/104/
"""
markup_filter_name, markup_kwargs = get_markup_filter()
if not text.strip():
return text
html = text
if markup_filter_name is not None:
if markup_filter_name == 'textile':
import textile
if 'encoding' not in markup_kwargs:
markup_kwargs.update(encoding=settings.DEFAULT_CHARSET)
if 'output' not in markup_kwargs:
markup_kwargs.update(output=settings.DEFAULT_CHARSET)
html = textile.textile(text, **markup_kwargs)
elif markup_filter_name == 'markdown':
import bleach
import markdown
# See ALLOWED_TAGS in
# https://github.com/mozilla/bleach/blob/master/bleach/__init__.py
tags = bleach.ALLOWED_TAGS + [
u'h1', u'h2', u'h3', u'h4', u'h5',
u'p', u'pre',
u'img',
u'hr',
]
tags_provided = ('clean' in markup_kwargs
and 'extra_tags' in markup_kwargs['clean'])
if tags_provided:
tags += markup_kwargs['clean']['extra_tags']
html = bleach.clean(markdown.markdown(text, **markup_kwargs),
tags=tags)
elif markup_filter_name == 'restructuredtext':
from docutils import core
if 'settings_overrides' not in markup_kwargs:
markup_kwargs.update(
settings_overrides=getattr(
settings,
"RESTRUCTUREDTEXT_FILTER_SETTINGS",
{},
)
)
if 'writer_name' not in markup_kwargs:
markup_kwargs.update(writer_name='html4css1')
parts = core.publish_parts(source=text, **markup_kwargs)
html = parts['html_body']
return rewrite_links(html, rewrite_internal_link)
|
anhstudios/swganh | data/scripts/templates/object/building/player/shared_player_house_naboo_medium_style_01.py | Python | mit | 479 | 0.045929 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/player/shared_player_house_naboo_medium_style_01.iff"
result.attribu | te_template_id = -1
result.stfName("building_name","housing_naboo_medium")
#### BEGIN MODIFICATIONS ####
#### EN | D MODIFICATIONS ####
return result |
JT5D/scikit-learn | setup.py | Python | bsd-3-clause | 5,576 | 0.002331 | #! /usr/bin/env python
#
# Copyright (C) 2007-2009 Cournapeau David <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
descr = """A set of python modules for machine learning and data mining"""
import sys
import os
import shutil
from distutils.command.clean import clean as Clean
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
# This is a bit (!) hackish: we are setting a global variable so that the main
# sklearn __init__ can detect if it is being loaded by the setup routine, to
# avoid attempting to load components that aren't built yet.
builtins.__SKLEARN_SETUP__ = True
DISTNAME = 'scikit-learn'
DESCRIPTION = 'A set of python modules for machine learning and data mining'
LONG_DESCRIPTION = open('README.rst').read()
MAINTAINER = 'Andreas Mueller'
MAINTAINER_EMAIL = 'amueller@ais.uni-bonn.de'
URL = 'http://scikit-learn.org'
LICENSE = 'new BSD'
DOWNLOAD_URL = 'http://sourceforge.net/projects/scikit-learn/files/'
# We can actually import a restricted version of sklearn that
# does not need the compiled code
import sklearn
VERSION = sklearn.__version__
###############################################################################
# Optional setuptools features
# We need to import setuptools early, if we want setuptools features,
# as it monkey-patches the 'setup' function
# For some commands, use setuptools
if len(set(('develop', 'release', 'bdist_egg', 'bdist_rpm',
'bdist_wininst', 'install_egg_info', 'build_sphinx',
'egg_info', 'easy_install', 'upload',
'--single-version-externally-managed',
)).intersection(sys.argv)) > 0:
import setuptools
extra_setuptools_args = dict(
zip_safe=False, # the package can run out of an .egg file
include_package_data=True,
)
else:
extra_setuptools_args = dict()
###############################################################################
class CleanCommand(Clean):
description = "Remove build directories, and compiled file in the source tree"
def run(self):
Clean.run(self)
if os.path.exists('build'):
shutil.rmtree('build')
for dirpath, dirnames, filenames in os.walk('sklearn'):
for filename in filenames:
if (filename.endswith('.so') or filename.endswith('.pyd')
or filename.endswith('.dll')
or filename.endswith('.pyc')):
os.unlink(os.path.join(dirpath, filename))
###############################################################################
def configuration(parent_package='', top_path=None):
if os.path.exists('MANIFEST'):
os.remove('MANIFEST')
from numpy.distutils.misc_util import Configuration
config = Configuration(None, parent_package, top_path)
# Avoid non-useful msg:
# "Ignoring attempt to set 'name' (from ... "
config.set_options(ignore_setup_xxx_py=True,
assume_default_configuration=True,
delegate_options_to_subpackages=True,
quiet=True)
config.add_subpackage('sklearn')
return config
def setup_package():
metadata = dict(name=DISTNAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
license=LICENSE,
url=URL,
version=VERSION,
download_url=DOWNLOAD_URL,
long_description=LONG_DESCRIPTION,
classifiers=['Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: C',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
],
cmdclass={'clean': CleanCommand},
**extra_setuptools_args)
if (len(sys.argv) >= | 2
and ('--help' in sys.argv[1:] or sys.argv[1]
in ('--help-commands', 'egg_info', '--version', 'clean'))):
# For these actions, NumPy is not required.
#
# They are required to succeed without Numpy for example when
# pip is used to install Scikit when Numpy is not yet present in
# the system.
try:
from setuptools imp | ort setup
except ImportError:
from distutils.core import setup
metadata['version'] = VERSION
else:
from numpy.distutils.core import setup
metadata['configuration'] = configuration
setup(**metadata)
if __name__ == "__main__":
setup_package()
|
Chasego/codirit | leetcode/074-Search-a-2D-Matrix/Searcha2DMatrix_001.py | Python | mit | 608 | 0.009868 | class Solution:
# @param {integer[][]} matrix
# @param {integer} target
| # @return {boolean}
def se | archMatrix(self, matrix, target):
if len(matrix) == 0 or len(matrix[0]) == 0:
return False
m , n = len(matrix), len(matrix[0])
l, r = 0, m * n - 1
while l <= r:
mid = (l + r) / 2
i, j = mid / n, mid % n
if matrix[i][j] == target:
return True
elif matrix[i][j] < target:
l += 1
else:
r -= 1
return False
|
FiloSottile/Griffith-mirror | lib/plugins/export/PluginExportCSV.py | Python | gpl-2.0 | 3,429 | 0.005833 | # -*- coding: UTF-8 -*-
__revision__ = '$Id$'
# Copyright (c) 2005-2007 Vasco Nunes
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
# You may use and distribute this software under the terms of the
# GNU General Public License, version 2 or later
import csv
import gtk
import os
import gutils
import db
from plugins.export import Base
class ExportPlugin(Base):
name = "CSV"
description = _("Full CSV list export plugin")
author = "Vasco Nunes"
email = "<vasco.m.nunes@gmail.com>"
version = "0.3"
fields_to_export = ('number', 'o_title', 'title', 'director', 'year', 'classification', 'country',
'genre', 'rating', 'runtime', 'studio', 'seen', 'loaned', 'o_site', 'site', 'trailer',
'plot', 'cast', 'notes', 'image', 'volumes.name', 'collections.name', 'media.name',
'screenplay', 'cameraman', 'barcode', 'color', 'cond', | 'layers', 'region',
'media_num', 'vcodecs.name') |
def run(self):
basedir = None
if self.config is not None:
basedir = self.config.get('export_dir', None, section='export-csv')
if not basedir:
filename = gutils.file_chooser(_("Export a %s document")%"CSV", action=gtk.FILE_CHOOSER_ACTION_SAVE, \
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_SAVE, gtk.RESPONSE_OK), name='griffith_list.csv')
else:
filename = gutils.file_chooser(_("Export a %s document")%"CSV", action=gtk.FILE_CHOOSER_ACTION_SAVE, \
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_SAVE,gtk.RESPONSE_OK), name='griffith_list.csv', folder=basedir)
if filename and filename[0]:
if self.config is not None and filename[1]:
self.config.set('export_dir', filename[1], section='export-csv')
self.config.save()
overwrite = None
if os.path.isfile(filename[0]):
if gutils.question(_("File exists. Do you want to overwrite it?"), self.parent_window):
overwrite = True
else:
overwrite = False
if overwrite or overwrite is None:
movies = self.get_query().execute()
writer = csv.writer(file(filename[0], 'w'), dialect=csv.excel)
# write column header row
writer.writerow(self.fields_to_export)
# write data rows
for movie in movies:
t = []
for s in self.exported_columns:
t.append(movie[s])
writer.writerow(t)
gutils.info(_("%s file has been created.") % "CSV", self.parent_window)
|
apatriciu/OpenStackOpenCL | computeOpenCL/nova/nova/tests/OpenCL/testOpenCLInterfaceMemoryObjects.py | Python | apache-2.0 | 4,684 | 0.00491 | from nova.OpenCL import OpenCLContextsAPI
from nova.OpenCL import OpenCLClientException
from nova.OpenCL import OpenCLBuffersAPI
import unittest
import sys
class LaptopResources:
listDevicesIDs = [0]
dictProperties = {}
memID = 0
invalidMemID = 1
device_type = "GPU"
class TestMems(unittest.TestCase):
# define the expected response
testResources = LaptopResources()
contexts_interface = OpenCLContextsAPI.API()
buffers_interface = OpenCLBuffersAPI.API()
def setUp(self):
"""Creates a new context"""
self.contextID, retErr = self.contexts_interface.CreateContext(self.testResources.listDevicesIDs, self.testResources.dictProperties)
self.assertEqual(retErr, 0)
pass
def tearDown(self):
retErr = self.contexts_interface.ReleaseContext(self.contextID)
self.assertEqual(retErr, 0)
pass
def testCreateMemBuffer(self):
# create mem buffer
bufferSize = 512
bufferCreateFlags = []
bufferID, retErr = self.buffers_interface.CreateBuffer(self.contextID, bufferSize, bufferCreateFlags)
self.assertEqual(retErr, 0)
listBuffers = self.buffers_interface.ListBuffers()
self.assertEqual(listBuffers, [bufferID])
bu | fferProperty, retErr = self.buffers_interface.GetBufferProperties(bufferID)
self.assertEqual(bufferProperty['id'], bufferID)
self.assertEqual(bufferProperty['Size'], bufferSize)
self.assertEqual(bufferProperty['Context'], self.contextID)
retErr = self.buffe | rs_interface.ReleaseBuffer(bufferID)
self.assertEqual(retErr, 0)
listBuffers = self.buffers_interface.ListBuffers()
self.assertEqual(listBuffers, [])
def testGetUnknownContextProperties(self):
# Tries to retrieve the properties of an inexistent device
bufferID = 0
self.assertRaises(OpenCLClientException.OpenCLClientException, self.buffers_interface.GetBufferProperties, bufferID)
def testRetainAndRelease(self):
# Create and release a context
bufferSize = 512
bufferAttribs = []
bufferID, retErr = self.buffers_interface.CreateBuffer(self.contextID, bufferSize, bufferAttribs)
self.assertEqual(retErr, 0)
listBuffers = self.buffers_interface.ListBuffers()
self.assertEqual(listBuffers, [bufferID])
retErr = self.buffers_interface.ReleaseBuffer( bufferID )
self.assertEqual(retErr, 0)
listBuffers = self.buffers_interface.ListBuffers()
self.assertEqual(listBuffers, [])
# try to release again
self.assertRaises(OpenCLClientException.OpenCLClientException, self.buffers_interface.ReleaseBuffer, bufferID)
self.assertRaises(OpenCLClientException.OpenCLClientException, self.buffers_interface.RetainBuffer, bufferID)
def testMultipleBuffers(self):
# Creates multiple buffers
buffer1Size = 512
bufferAttribs = []
buffer1ID, retErr = self.buffers_interface.CreateBuffer(self.contextID, buffer1Size, bufferAttribs)
self.assertEqual(retErr, 0)
print "OK1"
listBuffers = self.buffers_interface.ListBuffers()
self.assertEqual(listBuffers, [buffer1ID])
print "OK1"
buffer2Size = 1024
bufferAttribs = []
buffer2ID, retErr = self.buffers_interface.CreateBuffer(self.contextID, buffer2Size, bufferAttribs)
self.assertEqual(retErr, 0)
print "OK1"
listBuffers = self.buffers_interface.ListBuffers()
self.assertEqual(listBuffers, [buffer1ID, buffer2ID])
print "OK1"
buffer1Property, retErr = self.buffers_interface.GetBufferProperties(buffer1ID)
self.assertEqual(buffer1Property['id'], buffer1ID)
self.assertEqual(buffer1Property['Size'], buffer1Size)
self.assertEqual(buffer1Property['Context'], self.contextID)
print "OK1"
buffer2Property, retErr = self.buffers_interface.GetBufferProperties(buffer2ID)
self.assertEqual(buffer2Property['id'], buffer2ID)
self.assertEqual(buffer2Property['Size'], buffer2Size)
self.assertEqual(buffer2Property['Context'], self.contextID)
retErr = self.buffers_interface.ReleaseBuffer( buffer1ID )
self.assertEqual(retErr, 0)
listBuffers = self.buffers_interface.ListBuffers()
self.assertEqual(listBuffers, [buffer2ID])
retErr = self.buffers_interface.ReleaseBuffer( buffer2ID )
self.assertEqual(retErr, 0)
listBuffers = self.buffers_interface.ListBuffers()
self.assertEqual(listBuffers, [])
if __name__ == "__main__":
unittest.main()
|
Kryz/sentry | tests/sentry/plugins/interfaces/test_releasehook.py | Python | bsd-3-clause | 1,132 | 0 | """
sentry.plugins.base.structs
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LIC | ENSE for more details.
"""
from __future__ import absolute_import, print_function
__all__ = ['ReleaseHook']
from sentry.models import Release
from sentry.plugins import ReleaseHook
from sentry.testutils import TestCase
class StartReleaseTest(TestCase):
def test_minimal(self):
project = self.create_project()
version = 'bbee5b51f84611e4b14834363b8514c2'
hook = ReleaseHo | ok(project)
hook.start_release(version)
release = Release.objects.get(
project=project,
version=version,
)
assert release.date_started
class FinishReleaseTest(TestCase):
def test_minimal(self):
project = self.create_project()
version = 'bbee5b51f84611e4b14834363b8514c2'
hook = ReleaseHook(project)
hook.finish_release(version)
release = Release.objects.get(
project=project,
version=version,
)
assert release.date_released
|
SciTools/cartopy | examples/lines_and_polygons/always_circular_stereo.py | Python | lgpl-3.0 | 1,610 | 0 | """
Custom Boundary Shape
---------------------
This example demonstrates how a custom shape geometry may be used
instead of the projection's default boundary.
In this instance, we define the boundary as a circle in axes coordinates.
This means that no matter the extent of the map itself, the boundary will
always be a circle.
"""
import matplotlib.path as mpath
import matplotlib.pyplot as plt
import numpy as np
import cartopy.crs as ccrs
import cartopy.feature as cfeature
def main():
fig = plt.figure(figsize=[10, 5])
ax1 = fig.add_subplot(1, 2, 1, projection=ccrs.SouthPolarStereo())
ax2 = fig.add_subplot(1, 2, 2, projection=ccrs.SouthPolarStereo(),
sharex=ax1, sharey=ax1)
fig.subplots_adjust(bottom=0.05, top=0.95,
left=0.04, right=0.95, wspace=0.02)
# Limit the map to -60 degrees latitude and below.
ax1.set_extent([-180, 180, -90, -60], ccrs.PlateCarree())
ax1.add_feature(cfeature.LAND)
ax1.add_feature(cfeature.OCEAN)
ax1.gridlines()
ax2.gridlines()
ax2.add_feature(cfeature.LAND)
ax2.add_feature(cfeature.OCEAN)
# Compute a circle in axes coordinates, which we can use as a boundary
| # for the map. We can pan/zoom as much as we like - the boundary will be
# permanently circular.
theta = np.linspace(0, 2*np.pi, 100)
center, | radius = [0.5, 0.5], 0.5
verts = np.vstack([np.sin(theta), np.cos(theta)]).T
circle = mpath.Path(verts * radius + center)
ax2.set_boundary(circle, transform=ax2.transAxes)
plt.show()
if __name__ == '__main__':
main()
|
alexlo03/ansible | lib/ansible/plugins/cliconf/enos.py | Python | gpl-3.0 | 2,594 | 0.000771 | # (C) 2017 Red Hat Inc.
# Copyright (C) 2017 Lenovo.
#
# GNU General Public License v3.0+
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHAN | TABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Contains CLIConf Plugin methods for ENOS Modules
# Lenovo Networking
#
| from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from itertools import chain
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.network.common.utils import to_list
from ansible.plugins.cliconf import CliconfBase, enable_mode
class Cliconf(CliconfBase):
def get_device_info(self):
device_info = {}
device_info['network_os'] = 'enos'
reply = self.get(b'show version')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'^Software Version (.*?) ', data, re.M | re.I)
if match:
device_info['network_os_version'] = match.group(1)
match = re.search(r'^Lenovo RackSwitch (\S+)', data, re.M | re.I)
if match:
device_info['network_os_model'] = match.group(1)
match = re.search(r'^(.+) uptime', data, re.M)
if match:
device_info['network_os_hostname'] = match.group(1)
else:
device_info['network_os_hostname'] = "NA"
return device_info
@enable_mode
def get_config(self, source='running', format='text'):
if source not in ('running', 'startup'):
msg = "fetching configuration from %s is not supported"
return self.invalid_params(msg % source)
if source == 'running':
cmd = b'show running-config'
else:
cmd = b'show startup-config'
return self.send_command(cmd)
@enable_mode
def edit_config(self, command):
for cmd in chain([b'configure terminal'], to_list(command), [b'end']):
self.send_command(cmd)
def get(self, command, prompt=None, answer=None, sendonly=False, check_all=False):
return self.send_command(command=command, prompt=prompt, answer=answer, sendonly=sendonly, check_all=check_all)
def get_capabilities(self):
result = {}
result['rpc'] = self.get_base_rpc()
result['network_api'] = 'cliconf'
result['device_info'] = self.get_device_info()
return json.dumps(result)
|
xuru/pyvisdk | pyvisdk/do/virtual_usbxhci_controller_option.py | Python | mit | 1,380 | 0.01087 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def VirtualUSBXHCIControllerOption(vim, *args, **kwargs):
'''The VirtualUSBXHCIControllerOption data object type contains the options for a
virtual USB Extensible Host Controller Interface (USB 3.0).'''
obj = vim.client.factory.create('ns0:VirtualUSBXHCIControllerOption')
# do some validation checking...
if (len(args) + len(kwargs)) < 7:
raise IndexError('Expected at least 8 arguments got: %d' % len(args))
required = [ 'autoConnectDevices', 'supportedSpeeds', 'devices', 'deprecated',
'hotRemoveSupported', 'plugAndPlay', 'type' ]
optional = [ 'supportedDevice', 'autoAssignController', 'backingOption', 'connectOption',
'controllerType', 'defaultBackingOptionIndex', 'licensingLimit',
'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
| if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expec | ted one of %s" % (name, ", ".join(required + optional)))
return obj
|
bitmazk/django-document-library | document_library/templatetags/document_library_tags.py | Python | mit | 1,256 | 0 | """Templatetags for the ``document_library`` app."""
from django import template
from ..models import Document
register = template.Library()
@register.assignment_tag
def get_files_for_document(document):
"""
Returns the available files for all languages.
In case the file is already present in another language, it does not re-add
it again.
"""
files = []
for doc_trans in document.translations.all():
if doc_trans.filer_file is not None and \
doc_trans.filer_file not in files:
doc_trans.filer_file.language = doc_trans.language_code
files.append(doc_trans.filer_file)
return files
@register.assignment_tag(takes_cont | ext=True)
def get_frontpage_documents(context):
"""Returns the library favs that should be shown on the front page."""
req = context.get('request')
qs = Document.objects.published(req).filter(is_on_front_page=True)
return qs
@register.assignment_tag(takes_context=True)
def get_latest_documents(context, count=5):
"""
Returns the latest documents.
:param count: Number of documents to be returned. D | efaults to 5.
"""
req = context.get('request')
qs = Document.objects.published(req)[:count]
return qs
|
grantjenks/pyannote-core | pyannote/core/__init__.py | Python | mit | 1,810 | 0.00387 | #!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2014 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL | INGS IN THE
# SOFTWARE.
# AUTHORS
# Hervé BREDIN - http://herve.niderb.fr
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
PYANNOTE_URI = 'uri'
PYANNOTE_MODALITY = ' | modality'
PYANNOTE_SEGMENT = 'segment'
PYANNOTE_TRACK = 'track'
PYANNOTE_LABEL = 'label'
PYANNOTE_SCORE = 'score'
PYANNOTE_IDENTITY = 'identity'
from .time import T, TStart, TEnd
from .segment import Segment, SlidingWindow
from .timeline import Timeline
from .annotation import Annotation
from .transcription import Transcription
from .scores import Scores
from .feature import SlidingWindowFeature
try:
from .notebook import notebook
except ImportError as e:
pass
|
foursquare/pants | tests/python/pants_test/backend/codegen/ragel/java/test_ragel_gen.py | Python | apache-2.0 | 2,263 | 0.006186 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from textwrap import dedent
from pants.backend.codegen.ragel.jav | a.java_ragel_library import JavaRagelLibrary
from pants.backend.codegen.ragel.java.ragel_gen import RagelGen, calculate_genfile
from pants.util.contextu | til import temporary_file
from pants.util.dirutil import safe_mkdtemp
from pants_test.task_test_base import TaskTestBase
ragel_file_contents = dedent("""
package com.example.atoi;
%%{
machine parser;
action minus {
negative = true;
}
action digit {
val *= 10;
val += fc - '0';
}
main := ( '-'@minus )? ( [0-9] @digit ) + '\0';
}%%
public class Parser {
%% write data;
public static int parse(CharSequence input) {
StringBuilder builder = new StringBuilder(input);
builder.append('\0');
char[] data = builder.toString().toCharArray();
int p = 0;
int pe = data.length;
int eof = pe;
int cs;
boolean negative = false;
int val = 0;
%% write init;
%% write exec;
if (negative)
return -val;
else
return val;
}
}
""")
class RagelGenTest(TaskTestBase):
@classmethod
def task_type(cls):
return RagelGen
def test_ragel_gen(self):
self.create_file(relpath='test_ragel_gen/atoi.rl', contents=ragel_file_contents)
target = self.make_target(spec='test_ragel_gen:atoi',
target_type=JavaRagelLibrary,
sources=['atoi.rl'])
task = self.create_task(self.context(target_roots=[target]))
target_workdir = safe_mkdtemp(dir=self.test_workdir)
task.execute_codegen(target, target_workdir)
generated_files = []
for root, _, files in os.walk(target_workdir):
generated_files.extend(os.path.relpath(os.path.join(root, f), target_workdir) for f in files)
self.assertEqual(['com/example/atoi/Parser.java'], generated_files)
def test_smoke(self):
with temporary_file() as fp:
fp.write(ragel_file_contents)
fp.flush()
self.assertEquals(calculate_genfile(fp.name), 'com/example/atoi/Parser.java')
|
OTL/ros_book_programs | ros_start/scritps/joy_twist.py | Python | bsd-2-clause | 644 | 0.003106 | import rospy
from sensor_msgs.msg import Joy
from geometry_msgs.msg import Twist
class JoyTwist(object):
def __init__(self):
self._joy_sub = rospy.Subscriber('joy', Joy, self.joy_callback, queue_size=1)
self._twist_pub = rospy.Publisher('cmd_vel | ', Twist, queue_size=1)
def joy_callback(self, joy_msg):
if joy_msg.buttons[0] == 1:
twist = Twist()
twist.linear.x = joy_msg.axes[1] * 0.5
twist. | angular.z = joy_msg.axes[0] * 1.0
self._twist_pub.publish(twist)
if __name__ == '__main__':
rospy.init_node('joy_twist')
joy_twist = JoyTwist()
rospy.spin()
|
rowhit/tagfs | src/test/tagfs_test_small/test_freebase_support_query.py | Python | gpl-3.0 | 1,239 | 0.003228 | #
# Copyright 2012 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
import unittest
import tagfs.freebase_support as freebase_support
class WhenQueryWithOneFilerAndOneSelector(unittest.TestCase):
def setUp(self):
super(WhenQueryWithOneFilerAndOneSelector, self).setUp()
self.query = freebase_support.Query({'filter': 'filterValue', 'selector': None, })
def testThenSelectedKeysIsSelector(self):
self.assertEqual(list(self.query.selectedKeys), ['selector',])
def testThenQueryStringIs(s | elf):
self.assertEqual(self.query.queryString, '{"filter":"filterValue | ","selector":[]}')
|
Comunitea/CMNT_00098_2017_JIM_addons | picking_company_sequence/__manifest__.py | Python | agpl-3.0 | 632 | 0.00317 | # -*- coding: utf-8 -*-
# © 2016 Comunitea Servicios Tecnologicos (<http://www.comunitea.com>)
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Stock picking company sequence',
'version': '10.0',
| 'author': 'Comunitea, ',
"category": "",
'license': 'AGPL-3',
'description': 'Allow different sequence per company',
'depends': [
'stock'
],
'contributors': [
"Comunitea",
"Kiko Sanchez<kiko@comunitea.com>"]
,
"data": [
'views/stock_picking_type.xml'
],
"de | mo": [
],
'test': [
],
'installable': True
}
|
DevynCJohnson/Pybooster | pylib/neuralnet.py | Python | lgpl-3.0 | 13,720 | 0.002041 | #!/usr/bin/env python3
# -*- coding: utf-8; Mode: Python; indent-tabs-mode: nil; tab-width: 4 -*-
# vim: set fileencoding=utf-8 filetype=python syntax=python.doxygen fileformat=unix tabstop=4 expandtab :
# kate: encoding utf-8; bom off; syntax python; indent-mode python; eol unix; replace-tabs off; indent-width 4; tab-width 4; remove-trailing-space on;
"""@brief Lightweight pure-Python neural network library.
@file neuralnet.py
@package pybooster.neuralnet
@version 2019.12.23
@author Devyn Collier Johnson <DevynCJohnson@Gmail.com>
@copyright LGPLv3
@section DESCRIPTION
@code{.py}
from pybooster.neuralnet import NeuroCode
data = [ # The input and output of an XOR gate
([0, 0], [0]), # The first list in the tuple represents the input(s)
([0, 1], [1]), # The last list in the tuple represents the output(s)
([1, 0], [1]),
([1, 1], [0])
] # Provide sample input and expected output
net = NeuroCode(
data, # The data table created above
layers = [4, 3], # Number of nodes in each hidden layers (between input and output)
iterations = 40000, # Maximum training iterations
rate = 0.1 # Learning rate
)
net.train() # Returns (correctness, iterations)
output = net.run([1, 0]) # Execute neuralnet
net.writedump(r'xor_code.py') # Save the generated code
net.neurocode2cfile(r'neural_xor.c', r'neural_xor') # Save the generated code as plain C code
net.neurocode2javafile(r'neural_xor.java', r'neural_xor') # Save the generated code as plain Java code
net.neurocode2pythonfile(r'neural_xor.py', r'neural_xor') # Save the generated code as plain Python code
@endcode
@section LICENSE
GNU Lesser General Public License v3
Copyright (c) Devyn Collier Johnson, All rights reserved.
This software is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software.
"""
# pylint: disable=C0103
from base64 import b64decode, b64encode
from math import exp, floor
from pickle import dumps, loads # nosec
from random import Random
from typing import Any, Dict, Generator, List, Tuple
from zlib import compress, decompress
__all__: list = [
r'flatten',
r'NeuroCode'
]
def flatten(_lst: list) -> Generator[list, None, None]:
"""Flatten list of lists."""
for _sublist in _lst:
if isinstance(_sublist, list):
for _sublist in flatten(_sublist):
yield _sublist
else:
yield _sublist
def _indent(txt: str, chars: int) -> str:
"""Indent the given code."""
result: str = r''
d: str = r' ' * chars
for line in txt.split('\n'):
result += (d + line + '\n')
return result
class NeuroCode: # pylint: disable=C0200,R0902
"""Neurocode class."""
def __init__(self, data: list, layers: list, iterations: int = 40000, rate: float = 0.2) -> None:
"""Initialize Neurocode-learning.
@param[in] data A list of lists of the input data
@param[in] layers Specify the number of hidden layers in the network and the size of each layer. For example, `layers = [3, 4]` makes two hidden layers, the first with 3 nodes and the second with 4 nodes. By default, one hidden layer is used with a size proportionate to the size of the input array
@param[in] iterations Number of times to run the training
@param[in] rate Learning rate (float less than 1.0)
"""
# Setup input data
input_size: int = len(data[0][0])
output_size: int = len(data[0][1])
# Settings
self.hidden_layers = [max(3, int(floor(input_size / 2)))] if not layers else layers
self.sizes: List[Any] = list(flatten([input_size, self.hidden_layers, output_size]))
self.iterations: int = iterations
self.rate: float = rate if rate < 1.0 else 0.4
self.io_rules: list = data
self.io_rules_len: int = len(data)
self.outputlayer: int = len(self.sizes) - 1
self.error_threshold: float = 0.0001
neural_rand = Random()
# Training State
self.deltas: List[Any] = [[]] * (self.outputlayer + 1)
self.changes: List[Any] = [[]] * (self.outputlayer + 1)
self.errors: List[Any] = [[]] * (self.outputlayer + 1)
self.outputs: List[Any] = [[]] * (self.outputlayer + 1)
self.biases: List[Any] = [[]] * (self.outputlayer + 1)
self.weights: List[Any] = [[]] * (self.outputlayer + 1)
for layer in range(self.outputlayer + 1):
_size = self.sizes[layer]
self.deltas[layer] = [0] * _size
self.errors[layer] = [0] * _size
self.outputs[layer] = [0] * _size
if layer > 0:
self.biases[layer] = [(neural_rand.random() * 0.4) - 0.2 for i in range(_size)]
self.weights[layer] = [0] * _size
self.changes[layer] = self.weights[layer]
for node in range(_size):
_prev_size = self.sizes[layer - 1]
self.weights[layer][node] = [(neural_rand.random() * 0.4) - 0.2 for j in range(_prev_size)]
self.changes[layer][node] = [0] * _prev_size
def train(self) -> Tuple[float, int]: # noqa: C901
"""Neurocode training (core function)."""
error: float = 1.0
used_iterations: int = 0
for i in | range(self.iterations):
used_iterations = i
if error <= self.error_threshold: # Error Threshold
| break
_sum = 0.0
for d in self.io_rules:
self.run(d[0])
self._calculate_deltas(d[1])
# Adjust Weights
for _layer in range(1, self.outputlayer + 1):
incoming = self.outputs[_layer - 1]
for _node in range(self.sizes[_layer]):
delta = self.deltas[_layer][_node]
for k in range(len(incoming)):
change = (self.rate * delta * incoming[k]) + (0.1 * self.changes[_layer][_node][k]) # 0.1 = momentum
self.changes[_layer][_node][k] = change
self.weights[_layer][_node][k] = change + self.weights[_layer][_node][k]
self.biases[_layer][_node] = self.biases[_layer][_node] + (self.rate * delta)
_errsum = 0.0
for err in self.errors[self.outputlayer]:
_errsum += err ** 2.0
_sum += _errsum / len(self.errors[self.outputlayer])
error = _sum / self.io_rules_len
return (error, used_iterations)
def run(self, _input: List[Any]) -> list:
"""Forward Propagation; Execute neuralnet."""
output = self.outputs[0] = _input # Set output state of input layer
for _layer in range(1, self.outputlayer + 1):
for _node in range(self.sizes[_layer]):
weights = self.weights[_layer][_node]
_sum = self.biases[_layer][_node]
for k in range(len(weights)):
_sum += weights[k] * _input[k]
self.outputs[_layer][_node] = 1.0 / (1.0 + exp(-_sum))
_input = self.outputs[_layer]
output = _input
return output
def _calculate_deltas(self, target: list) -> None:
"""Backward Propagation."""
layer: int = self.outputlayer
while layer >= 0:
for node in range(self.sizes[layer]):
output = self.outputs[layer][node]
if layer == self.outputlayer:
error = target[node] - output
else:
deltas = self.deltas[layer + 1]
error = 0.0 |
JioCloud/oslo.config | tests/test_cfgfilter.py | Python | apache-2.0 | 9,272 | 0 | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslotest import base as test_base
from oslo.config import cfg
from oslo.config import cfgfilter
class BaseTestCase(test_base.BaseTestCase):
def setUp(self, conf=None):
super(BaseTestCase, self).setUp()
if conf is None:
self.conf = cfg.ConfigOpts()
else:
self.conf = conf
self.fconf = cfgfilter.ConfigFilter(self.conf)
class RegisterTestCase(BaseTestCase):
def test_register_opt_default(self):
self.fconf.register_opt(cfg.StrOpt('foo', default='bar'))
self.assertEqual('bar', self.fconf.foo)
self.assertEqual('bar', self.fconf['foo'])
self.assertIn('foo', self.fconf)
self.assertEqual(['foo'], list(self.fconf))
self.assertEqual(1, len(self.fconf))
self.assertNotIn('foo', self.conf)
self.assertEqual(0, len(self.conf))
self.assertRaises(cfg.NoSuchOptError, getattr, self.conf, 'foo')
def test_register_opt_none_default(self):
self.fconf.register_opt(cfg.StrOpt('foo'))
self.assertIsNone(self.fconf.foo)
self.assertIsNone(self.fconf['foo'])
self.assertIn('foo', self.fconf)
self.assertEqual(['foo'], list(self.fconf))
self.assertEqual(1, len(self.fconf))
self.assertNotIn('foo', self.conf)
self.assertEqual(0, len(self.conf))
self.assertRaises(cfg.NoSuchOptError, getattr, self.conf, 'foo')
def test_register_grouped_opt_default(self):
self.fconf.register_opt(cfg.StrOpt('foo', default='bar'),
group='blaa')
self.assertEqual('bar', self.fconf.blaa.foo)
self.assertEqual('bar', self.fconf['blaa']['foo'])
self.assertIn('blaa', self.fconf)
self.assertIn('foo', self.fconf.blaa)
self.assertEqual(['blaa'], list(self.fconf))
self.assertEqual(['foo'], list(self.fconf.blaa))
self.assertEqual(1, len(self.fconf))
self.assertEqual(1, len(self.fconf.blaa))
self.assertNotIn('blaa', self.conf)
self.assertEqual(0, len(self.conf))
self.assertRaises(cfg.NoSuchOptError, getattr, self.conf, 'blaa')
def test_register_grouped_opt_none_default(self):
self.fconf.register_opt(cfg.StrOpt('foo'), group='blaa')
self.assertIsNone(self.fconf.blaa.foo)
self.assertIsNone(self.fconf['blaa']['foo'])
self.assertIn('blaa', self.fconf)
self.assertIn('foo', self.fconf.blaa)
self.assertEqual(['blaa'], list(self.fconf))
self.assertEqual(['foo'], list(self.fconf.blaa))
self.assertEqual(1, len(self.fconf))
self.assertEqual(1, len(self.fconf.blaa))
self.assertNotIn('blaa', self.conf)
self.assertEqual(0, len(self.conf))
self.assertRaises(cfg.NoSuchOptError, getattr, self.conf, 'blaa')
def test_register_group(self):
group = cfg.OptGroup('blaa')
self.fconf.register_group(group)
self.fconf.register_opt(cfg.StrOpt('foo'), group=group)
self.assertIsNone(self.fconf.blaa.foo)
self.assertIsNone(self.fconf['blaa']['foo'])
self.assertIn('blaa', self.fconf)
self.assertIn('foo', self.fconf.blaa)
self.assertEqual(['blaa'], list(self.fconf))
self.assertEqual(['foo'], list(self.fconf.blaa))
self.assertEqual(1, len(self.fconf))
self.assertEqual(1, len(self.fconf.blaa))
self.assertNotIn('blaa', self.conf)
self.assertEqual(0, len(self.conf))
self.assertRaises(cfg.NoSuchOptError, getattr, self.conf, 'blaa')
def test_register_opts(self):
self.fconf.register_opts([cfg.StrOpt('foo'),
cfg.StrOpt('bar')])
self.assertIn('foo', self.fconf)
self.assertIn('bar', self.fconf)
self.assertNotIn('foo', self.conf)
self.assertNotIn('bar', self.conf)
def test_register_cli_opt(self):
self.fconf.register_cli_opt(cfg.StrOpt('foo'))
self.assertIn('foo', self.fconf)
self.assertNotIn('foo', self.conf)
def test_register_cli_opts(self):
self.fconf.register_cli_opts([cfg.StrOpt('foo'), cfg.StrOpt('bar')])
self.assertIn('foo', self.fconf)
self.assertIn('bar', self.fconf)
self.assertNotIn('foo', self.conf)
self.assertNotIn('bar', self.conf)
def test_register_opts_grouped(self):
self.fconf.register_opts([cfg.StrOpt('foo'), cfg.StrOpt('bar')],
group='blaa')
self.assertIn('foo', self.fconf.b | laa)
self.assertIn('bar', self.fconf.blaa)
self.assertNotIn('blaa', self.conf)
def test_register_cli_opt_grouped(self):
self.fconf.register_cli_opt(cfg.StrOpt('foo'), group= | 'blaa')
self.assertIn('foo', self.fconf.blaa)
self.assertNotIn('blaa', self.conf)
def test_register_cli_opts_grouped(self):
self.fconf.register_cli_opts([cfg.StrOpt('foo'), cfg.StrOpt('bar')],
group='blaa')
self.assertIn('foo', self.fconf.blaa)
self.assertIn('bar', self.fconf.blaa)
self.assertNotIn('blaa', self.conf)
def test_unknown_opt(self):
self.assertNotIn('foo', self.fconf)
self.assertEqual(0, len(self.fconf))
self.assertRaises(cfg.NoSuchOptError, getattr, self.fconf, 'foo')
self.assertNotIn('blaa', self.conf)
def test_blocked_opt(self):
self.conf.register_opt(cfg.StrOpt('foo'))
self.assertIn('foo', self.conf)
self.assertEqual(1, len(self.conf))
self.assertIsNone(self.conf.foo)
self.assertNotIn('foo', self.fconf)
self.assertEqual(0, len(self.fconf))
self.assertRaises(cfg.NoSuchOptError, getattr, self.fconf, 'foo')
def test_already_registered_opt(self):
self.conf.register_opt(cfg.StrOpt('foo'))
self.fconf.register_opt(cfg.StrOpt('foo'))
self.assertIn('foo', self.conf)
self.assertEqual(1, len(self.conf))
self.assertIsNone(self.conf.foo)
self.assertIn('foo', self.fconf)
self.assertEqual(1, len(self.fconf))
self.assertIsNone(self.fconf.foo)
self.conf.set_override('foo', 'bar')
self.assertEqual('bar', self.conf.foo)
self.assertEqual('bar', self.fconf.foo)
def test_already_registered_opts(self):
self.conf.register_opts([cfg.StrOpt('foo'),
cfg.StrOpt('fu')])
self.fconf.register_opts([cfg.StrOpt('foo'),
cfg.StrOpt('bu')])
self.assertIn('foo', self.conf)
self.assertIn('fu', self.conf)
self.assertNotIn('bu', self.conf)
self.assertEqual(2, len(self.conf))
self.assertIsNone(self.conf.foo)
self.assertIsNone(self.conf.fu)
self.assertIn('foo', self.fconf)
self.assertIn('bu', self.fconf)
self.assertNotIn('fu', self.fconf)
self.assertEqual(2, len(self.fconf))
self.assertIsNone(self.fconf.foo)
self.assertIsNone(self.fconf.bu)
self.conf.set_override('foo', 'bar')
self.assertEqual('bar', self.conf.foo)
self.assertEqual('bar', self.fconf.foo)
def test_already_registered_cli_opt(self):
self.conf.register_cli_opt(cfg.StrOpt('foo'))
self.fconf.register_cli_opt(cfg.StrOpt('foo'))
self.assertIn('foo', self.conf)
self.assertEqual(1, len(self.conf))
self.assertIsNone(self.conf.foo)
self.assertIn('foo', self.fconf)
self.assertEqual(1, len(self.fconf))
|
ludusrusso/arduino-compiler-web | manage.py | Python | gpl-2.0 | 725 | 0.01931 | #!/usr/bin/env python
import os
from app import create_app # , db
# from app.models import User, Role
from flask.ext.script import Manager, Shell
# from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Mana | ger(app)
# migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app) # , db=db, User=User, Role=Role)
manager.add_command("shell", Shell(make_context=make_shel | l_context))
# manager.add_command('db', MigrateCommand)
@manager.command
def test():
"""Run the unit tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run() |
open-mmlab/mmdetection | configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py | Python | apache-2.0 | 1,878 | 0 | _base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py'
model = di | ct(
backbone=dict(
type='ResNeXt',
depth=101,
groups=32,
base_width=8,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=False),
style='pytorch',
init_cfg=dict(
type='Pretrained',
checkpoint='open-mmlab://detectron | 2/resnext101_32x8d')))
# ResNeXt-101-32x8d model trained with Caffe2 at FB,
# so the mean and std need to be changed.
img_norm_cfg = dict(
mean=[103.530, 116.280, 123.675],
std=[57.375, 57.120, 58.395],
to_rgb=False)
# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)],
# multiscale_mode='range'
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(
type='Resize',
img_scale=[(1333, 640), (1333, 800)],
multiscale_mode='range',
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
train=dict(dataset=dict(pipeline=train_pipeline)),
val=dict(pipeline=test_pipeline),
test=dict(pipeline=test_pipeline))
|
rcbops/melange-buildpackage | melange/ipv6/rfc2462_generator.py | Python | apache-2.0 | 1,519 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
class RFC2462IpV6Generator(object):
required_params = ["mac_address"]
def __init__(self, cidr, **kwargs):
self._cidr = cidr
self._mac_address = netaddr.EUI(kwargs['mac_address'])
def nex | t_ip(self):
address = self._deduce_ip_address()
next_mac = int(self._mac_address) + 1
self._mac_address = netaddr.EUI(next_mac)
return address
def _deduce_ip_address(self):
variable_segment = self._variable_segment()
| network = netaddr.IPNetwork(self._cidr)
return str(variable_segment & network.hostmask | network.cidr.ip)
def _variable_segment(self):
mac64 = self._mac_address.eui64().words
int_addr = int(''.join(['%02x' % i for i in mac64]), 16)
return netaddr.IPAddress(int_addr) ^ netaddr.IPAddress("::0200:0:0:0")
|
svn2github/pyopt | pyOpt/pyOpt_history.py | Python | gpl-3.0 | 7,012 | 0.056047 | #!/usr/bin/env python
'''
pyOpt_history
Holds the Python Design Optimization History Class.
Copyright (c) 2008-2014 by pyOpt Developers
All rights reserved.
Revision: 1.0 $Date: 11/12/2009 21:00$
Developers:
-----------
- Dr. Ruben E. Perez (RP)
- Mr. Peter W. Jansen (PJ)
History
-------
v. 1.0 - Initial Class Creation (PJ,RP 2009)
'''
__version__ = '$Revision: $'
'''
To Do:
- shevling of optimizer
'''
# =============================================================================
# Standard Python modules
# =============================================================================
import os, sys
import pdb
import array as ARRAY
import shelve
# =============================================================================
# External Python modules
# =============================================================================
import numpy
# =============================================================================
# Extension modules
# =============================================================================
#import extension
# =============================================================================
# Misc Definitions
# =============================================================================
# =============================================================================
# History Class
# =============================================================================
class History(object):
'''
Abstract Class for Optimizer History Object
'''
def __init__(self, filename, mode, optimizer=None, opt_prob=None, *args, **kwargs):
'''
Optimizer History Class Initialization
**Arguments:**
- filename -> STR: Name for .bin and .cue file
- mode -> STR: Either read ('r') or | write ('w') mode
**Keyword arguments:**
- optimizer -> INST: Opimizer class instance, *Default* = None
- opt_prob -> STR: Optimization Problem Name, *Default* = None
Documentation last updated: April. | 14, 2010 - Peter W. Jansen
'''
#
self.filename = filename
self.mode = mode
#
bin_name = filename + '.bin'
cue_name = filename + '.cue'
#opt_name = filename + '.opt'
#
if self.mode == 'w':
#
if os.path.isfile(bin_name):
os.remove(bin_name)
#end
if os.path.isfile(cue_name):
os.remove(cue_name)
#end
#if os.path.isfile(opt_name):
# os.remove(opt_name)
##end
else:
if not os.path.isfile(bin_name):
raise NameError('Error: filename %s.bin does not exist'%(filename))
#end
if not os.path.isfile(cue_name):
raise NameError('Error: filename %s.cue does not exist'%(filename))
#end
#if not os.path.isfile(opt_name):
# raise NameError('Error: filename %s.opt does not exist'%(filename))
##end
#end
#
self.bin_file = open(bin_name,mode+'b')
self.cue_file = open(cue_name,mode)
#self.opt_file = shelve.open(opt_name)
#
if self.mode == 'w':
#
if (optimizer == None):
optname = 'None'
else:
optname = optimizer.name
#end
header = 'History for %s solving %s\n' %(optname,opt_prob)
self.cue_file.write(header)
#self.opt_file['optimizer'] = optimizer
#self.opt_file.close()
elif self.mode == 'r':
#
self.cues = {}
self.icount = {}
# if (self.opt_file['optimizer'].__dict__ != optimizer.__dict__):
# print 'Warning Optimizer Instance and stored Optimizer do not match -- hot start aborted'
# self.cue_file.close()
# self.opt_file.close()
# self.s_count = 0
# return
# #end
lines = self.cue_file.readlines()
for line in lines[1:]:
if len(line) < 3:
break
else:
#read in positions
tline = line.split()
if self.cues.has_key(tline[2]):
self.cues[tline[2]].append([int(tline[0]),int(tline[1])])
else:
self.cues[tline[2]] = [[int(tline[0]),int(tline[1])]]
self.icount[tline[2]] = 0
#end
#end
#end
self.cue_file.close()
#end
#
self.s_count = 0
def close(self):
'''
Close Optimizer History Files
Documentation last updated: December. 11, 2009 - Ruben E. Perez
'''
self.bin_file.close()
if self.mode == 'w':
self.cue_file.close()
#end
def read(self, index=[], ident=['obj']):
'''
Read Data from Optimizer History Files
**Keyword arguments:**
- index -> LIST,SCALAR: Index (list), [0,-1] for all, [] internal count, -1 for last, *Default* = []
- ident -> STR: Indentifier, *Default* = 'obj'
Documentation last updated: April. 14, 2010 - Peter W. Jansen
'''
# index = [0,-1]
bdata = {}
hist_end = False
for id in ident:
bdata[id] = []
if id in self.cues.keys():
if isinstance(index,int):
if (index == -1):
index = len(self.cues[id])-1
#end
index = [index, index+1]
elif isinstance(index,list):
if (index == []):
index = [self.icount[id], self.icount[id]+1]
self.icount[id] += 1
elif (index == [0,-1]):
index = [0, len(self.cues[id])]
#end
else:
raise ValueError('Index type not understood - must be either int or list')
#end
else:
hist_end = True
return (bdata,hist_end)
#end
for i in xrange(index[0],index[1]):
#
if (i >= len(self.cues[id])):
hist_end = True
return (bdata,hist_end)
#end
tvals = ARRAY.array('d')
self.bin_file.seek(self.cues[id][i][0]*8,0)
tvals.fromfile(self.bin_file,self.cues[id][i][1])
bdata[id].append(numpy.array(tvals))
#end
#end
return (bdata, hist_end)
def write(self,bin_data,cue_data):
'''
Write Data to Optimizer History Files
**Arguments:**
- bin_data -> LIST/ARRAY: Data to be written to binary file
- cue_data -> STR: Variable identifier for cue file
Documentation last updated: Feb. 07, 2011 - Peter W. Jansen
'''
#
bin_data = numpy.array(bin_data)
tdata = ARRAY.array('d',bin_data.flatten())
tdata.tofile(self.bin_file)
self.bin_file.flush()
#
self.cue_file.write('%d %d %s\n'%(self.s_count,len(bin_data.flatten()), cue_data))
self.cue_file.flush()
#
self.s_count += len(bin_data.flatten())
return
def overwrite(self,bin_data,index):
'''
Overwrite Data on Optimizer History Files
**Arguments:**
- bin_data -> ARRAY: Data to overwrite old data
- index -> INT: Starting index of old data
Documentation last updated: Feb. 03, 2011 - Peter W. Jansen
'''
#
bin_data = numpy.array(bin_data)
tdata = ARRAY.array('d',bin_data.flatten())
self.bin_file.seek(index*8,0)
tdata.tofile(self.bin_file)
self.bin_file.flush()
self.bin_file.seek(0,2)
return
#==============================================================================
# Optimizer History Test
#==============================================================================
if __name__ == '__main__':
# Test Optimizer History
print 'Testing Optimizer History...'
hst = History()
|
berrange/nova | nova/tests/console/test_websocketproxy.py | Python | apache-2.0 | 4,576 | 0 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for nova websocketproxy."""
import mock
from nova.console import websocketproxy
from nova import exception
from nova import test
class NovaProxyRequestHandlerBaseTestCase(test.TestCase):
def setUp(self):
super(NovaProxyRequestHandlerBaseTestCase, self).setUp()
self.wh = websocketproxy.NovaProxyRequestHandlerBase()
self.wh.socket = mock.MagicMock()
self.wh.msg = mock.MagicMock()
self.wh.do_proxy = mock.MagicMock()
self.wh.headers = mock.MagicMock()
@mock.patch('nova.consoleauth.rpcapi.ConsoleAuthAPI.check_token')
def test_new_websocket_client(self, check_token):
check_token.return_value = {
'host': 'node1',
'port': '10000'
}
self.wh.socket.return_value = '<socket>'
self.wh.path = "ws://127.0.0.1/?token=123-456-789"
self.wh.new_websocket_client()
check_token.assert_called_with(mock.ANY, token="123-456-789")
self.wh.socket.assert_called_with('node1', 10000, connect=True)
self.wh.do_proxy.assert_called_with('<socket>')
@mock.patch('nova.consoleauth.rpcapi.ConsoleAuthAPI.check_token')
def test_new_websocket_client_token_invalid(self, check_token):
check_token.return_value = False
self.wh.path = "ws://127.0.0.1/?token=XXX"
self.assertRaises(exception.InvalidToken,
self.wh.new_websocket_clie | nt)
check_token.assert_called_with(mock.ANY, token="XXX")
@mock.patch('nova.consoleauth.rpcapi.ConsoleAuthAPI.check_token')
def test_new_websocket_client_novnc(self, check_token):
check_token.return_value = {
'host': 'node1',
'port': '10000'
}
self.wh.socket.return_value = '<socket>'
self.wh.path = "http://127.0.0.1/"
self.wh.headers.getheader | .return_value = "token=123-456-789"
self.wh.new_websocket_client()
check_token.assert_called_with(mock.ANY, token="123-456-789")
self.wh.socket.assert_called_with('node1', 10000, connect=True)
self.wh.do_proxy.assert_called_with('<socket>')
@mock.patch('nova.consoleauth.rpcapi.ConsoleAuthAPI.check_token')
def test_new_websocket_client_novnc_token_invalid(self, check_token):
check_token.return_value = False
self.wh.path = "http://127.0.0.1/"
self.wh.headers.getheader.return_value = "token=XXX"
self.assertRaises(exception.InvalidToken,
self.wh.new_websocket_client)
check_token.assert_called_with(mock.ANY, token="XXX")
@mock.patch('nova.consoleauth.rpcapi.ConsoleAuthAPI.check_token')
def test_new_websocket_client_internal_access_path(self, check_token):
check_token.return_value = {
'host': 'node1',
'port': '10000',
'internal_access_path': 'vmid'
}
tsock = mock.MagicMock()
tsock.recv.return_value = "HTTP/1.1 200 OK\r\n\r\n"
self.wh.socket.return_value = tsock
self.wh.path = "ws://127.0.0.1/?token=123-456-789"
self.wh.new_websocket_client()
check_token.assert_called_with(mock.ANY, token="123-456-789")
self.wh.socket.assert_called_with('node1', 10000, connect=True)
self.wh.do_proxy.assert_called_with(tsock)
@mock.patch('nova.consoleauth.rpcapi.ConsoleAuthAPI.check_token')
def test_new_websocket_client_internal_access_path_err(self, check_token):
check_token.return_value = {
'host': 'node1',
'port': '10000',
'internal_access_path': 'xxx'
}
tsock = mock.MagicMock()
tsock.recv.return_value = "HTTP/1.1 500 Internal Server Error\r\n\r\n"
self.wh.socket.return_value = tsock
self.wh.path = "ws://127.0.0.1/?token=123-456-789"
self.assertRaises(Exception, self.wh.new_websocket_client) # noqa
check_token.assert_called_with(mock.ANY, token="123-456-789")
|
guerinj/expenses-api | api.py | Python | apache-2.0 | 1,733 | 0.003462 | from flask import Flask
from flask import jsonify, request
from flask.ext import restful
from flask.ext.pymongo import PyMongo, ObjectId
import ipdb;
app = Flask(__name__)
app.config['MONGO_DBNAME'] = "expenses"
mongo = PyMongo(app)
api = restful.Api(app)
class Expense(restful.Resource):
def get(self, expense_id=None):
if expense_id is None:
expenses = mongo.db.expenses.find()
exp_list = list()
for exp in expenses :
exp_list.append({ "value" : exp["value"], "date" : exp["_id"].generation_time, "_id" : str(exp["_id"])})
return jsonify(expenses = exp_list)
else :
if not ObjectId.is_valid(expense_id):
return ("Bad Request", 400)
expenses = mongo.db.expenses.find({"_id" : ObjectId(expense_id)})
if expenses.count() != 1:
return ("Not Found", 404)
else:
exp = expenses[0]
return jsonify(value = exp[ | "value"], _id=str(exp["_id"]), date=exp["_id"].generation_time)
def post(self):
ipdb.set_trace()
if not 'value' in request.json or type(request.json["value"]) not in [float, int, long]:
return "Bad Request", 400
expense_id = mongo.db.expenses.insert({"value": request.json["value"]})
return "OK", 200
def delete(self, e | xpense_id = None):
if expense_id is None:
return "Bad Request", 400
mongo.db.expenses.remove({"_id": ObjectId(expense_id)})
return "OK", 200
api.add_resource(Expense, '/expenses/<string:expense_id>', '/expenses')
if __name__ == '__main__':
app.run(debug=True, host= '0.0.0.0')
|
Flexget/Flexget | flexget/plugins/clients/pyload.py | Python | mit | 9,436 | 0.001166 | from urllib.parse import quote
from loguru import logger
from requests.exceptions import RequestException
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.event import event
from flexget.utils import json
from flexget.utils.template import RenderError
logger = logger.bind(name='pyload')
class PyloadApi:
def __init__(self, requests, url):
self.requests = requests
self.url = url
def get_session(self, config):
# Login
data = {'username': config['username'], 'password': config['password']}
result = self.post('login', data=data)
response = result.json()
if not response:
raise plugin.PluginError('Login failed', logger)
if isinstance(response, str):
return response.replace('"', '')
else:
return response
def get(self, method):
try:
return self.requests.get(self.url.rstrip("/") + "/" + method.strip("/"))
except RequestException as e:
if e.response and e.response.status_code == 500:
raise plugin.PluginError(
'Internal API Error: <%s> <%s>' % (method, self.url), logger
)
raise
def post(self, method, data):
try:
return self.requests.post(self.url.rstrip("/") + "/" + method.strip("/"), data=data)
except RequestException as e:
if e.response and e.response.status_code == 500:
raise plugin.PluginError(
'Internal API Error: <%s> <%s> <%s>' % (method, self.url, data), logger
)
raise
class PluginPyLoad:
"""
Parse task content or url for hoster links and adds them to pyLoad.
Example::
pyload:
api: http://localhost:8000/api
queue: yes
username: my_username
password: my_password
folder: desired_folder
package: desired_package_name (jinja2 supported)
package_password: desired_package_password
hoster:
- YoutubeCom
parse_url: no
multiple_hoster: yes
enabled: yes
Default values for the config elements::
pyload:
api: http://localhost:8000/api
queue: no
hoster: ALL
parse_url: no
multiple_hoster: yes
enabled: yes
"""
__author__ = 'http://pyload.org'
__version__ = '0.5'
DEFAULT_API = 'http://localhost:8000/api'
DEFAULT_QUEUE = False
DEFAULT_FOLDER = ''
DEFAULT_HOSTER = []
DEFAULT_PARSE_URL = False
DEFAULT_MULTIPLE_HOSTER = True
DEFAULT_PREFERRED_HOSTER_ONLY = False
DEFAULT_HANDLE_NO_URL_AS_FAILURE = False
schema = {
'type': 'object',
'properties': {
'api': {'type': 'string'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'folder': {'type': 'string'},
'package': {'type': 'string'},
'package_password': {'type': 'string'},
'queue': {'type': 'boolean'},
'parse_url': {'type': 'boolean'},
'multiple_hoster': {'type': 'boolean'},
'hoster': one_or_more({'type': 'string'}),
'preferred_hoster_only': {'type': 'boolean'},
'handle_no_url_as_failure': {'type': 'boolean'},
'enabled': {'type': 'boolean'},
},
'required': ['username', 'password'],
'additionalProperties': False,
}
def on_task_output(self, task, config):
if not config.get('enabled', True):
return
if not task.accepted:
return
self.add_entries(task, config)
def add_entries(self, task, config):
"""Adds accepted entries"""
apiurl = config.get('api', self.DEFAULT_API)
api = PyloadApi(task.requests, apiurl)
try:
session = api.get_session(config)
except OSError:
raise plugin.PluginError('pyLoad not reachable', logger)
except plugin.PluginError:
raise
except Exception as e:
raise plugin.PluginError('Unknown error: %s' % str(e), logger)
# old pyload (stable)
is_pyload_ng = False
parse_urls_command = 'parseURLs'
add_package_command = 'addPackage'
set_package_data_command = 'setPackageData'
# pyload-ng is returning dict instead of session string on login
if isinstance(session, dict):
is_pyload_ng = True
parse_urls_command = 'parse_urls'
add_package_command = 'add_package'
set_package_data_command = 'set_package_date'
hoster = config.get('hoster', self.DEFAULT_HOSTER)
for entry in task.accepted:
# bunch of urls now going to check
content = entry.get('description', '') + ' ' + quote(entry['url'])
content = json.dumps(content)
if is_pyload_ng:
url = entry['url'] if config.get('parse_url', self.DEFAULT_PARSE_URL) else ''
else:
url = (
json.dumps(entry['url'])
if config.get('parse_url', self.DEFAULT_PARSE_URL)
else "''"
)
logger.debug('Parsing url {}', url)
data = {'html': content, 'url': url}
if not is_pyload_ng:
data['session'] = session
result = api.post(parse_urls_command, data=data)
| parsed = result.json()
urls = entry.get('urls', [])
# check for preferred hoster
for name in hoster:
if name in parsed:
urls.extend(parsed[name])
if not config.get('multiple_hoster', self.DEFAULT_MULTIPLE_HOSTER):
break
| # no preferred hoster and not preferred hoster only - add all recognized plugins
if not urls and not config.get(
'preferred_hoster_only', self.DEFAULT_PREFERRED_HOSTER_ONLY
):
for name, purls in parsed.items():
if name != 'BasePlugin':
urls.extend(purls)
if task.options.test:
logger.info('Would add `{}` to pyload', urls)
continue
# no urls found
if not urls:
if config.get('handle_no_url_as_failure', self.DEFAULT_HANDLE_NO_URL_AS_FAILURE):
entry.fail('No suited urls in entry %s' % entry['title'])
else:
logger.info('No suited urls in entry {}', entry['title'])
continue
logger.debug('Add {} urls to pyLoad', len(urls))
try:
dest = 1 if config.get('queue', self.DEFAULT_QUEUE) else 0 # Destination.Queue = 1
# Use the title of the entry, if no naming schema for the package is defined.
name = config.get('package', entry['title'])
# If name has jinja template, render it
try:
name = entry.render(name)
except RenderError as e:
name = entry['title']
logger.error('Error rendering jinja event: {}', e)
if is_pyload_ng:
data = {
'name': name.encode('ascii', 'ignore').decode(),
'links': urls,
'dest': dest,
}
else:
data = {
'name': json.dumps(name.encode('ascii', 'ignore').decode()),
'links': json.dumps(urls),
'dest': json.dumps(dest),
'session': session,
}
pid = api.post(add_package_command, data=data).text
logger.debug('added package pid: {}', pid)
# Set Folder
folder = config.get('folder', self.DEFAULT_FOLDER)
folder = entry.get('path', folder)
if folder:
|
ikn/brjaf | brjaf/ext/evthandler.py | Python | gpl-3.0 | 12,404 | 0.002338 | """Pygame event handler by J.
This module consists of the EventHandler class, which is used to assign
callbacks to events and keypresses in Pygame.
Release: 12.
Licensed under the GNU General Public License, version 3; if this was not
included, you can find it here:
http://www.gnu.org/licenses/gpl-3.0.txt
"""
# TODO:
# - match keys by event.unicode
# - ability to remove event/key/default handlers
# - joystick stuff
import sys
import pygame
MODE_HELD = 0
MODE_ONPRESS = 1
MODE_ONPRESS_REPEAT = 2
MODE_ONDOWN = 3
MODE_ONDOWN_REPEAT = 4
def quit (event):
pygame.quit()
sys.exit()
class EventHandler:
"""Assign callbacks to events and keypresses.
EventHandler(event_handlers = {}, key_handlers = [], suppress_quit = False,
quit_handler = evthandler.quit[, default_cbs],
ignore_locks = True)
event_handlers: (event.type: callbacks) dict.
key_handlers: list of (keys, callbacks, mode) tuples, where:
- keys is a list of (key_ID, mods, exact) tuples or key_ID ints, where:
- key_ID is as used in pygame.
- mods is a modifier bitmask or list of modifier bitmasks to match as
well. 'Matching' a bitmask is having any key it 'contains' pressed;
passing a list does an AND-type comparison, where we check for a
match against every bitmask in the list.
- exact is a bool, determining whether to match the modifiers exactly
(otherwise, it's a match if other modifiers are held as well).
Passing a key_ID is like passing (key_ID, 0, False).
- mode is one of those defined in this module. *_REPEAT modes require two
more arguments in each tuple, both integers greater than 0:
- initial_delay, the number of frames the key must be held down for
until it starts repeating.
- repeat_delay, the number of frames between repeats.
suppress_quit: don't exit (call quit_handler) on a pygame.QUIT event.
quit_handler: handler to attach to pygame.QUIT events; the default function
calls pygame.quit and sys.exit. This is ignored if suppress_quit
is True.
default_cbs: callbacks to call for events with no registered event handlers.
ignore_locks: whether to ignore num lock and caps lock when matching modifiers
for key handlers with exact = True.
In all cases, callbacks is a list of (callback, args) tuples, where args is a
list of arguments to pass to the callback (after any compulsory arguments).
(callback, args) can be reduced to callback if args is empty, and the whole
list can be reduced to just a callback if there's only one and its args list is
empty.
Event callbacks (includes those in default_cbs) take the event as an argument.
Key callbacks take three arguments:
- key_ID or the (key_ID, mods, exact) tuple as passed.
- the type of key event: -1 if the key is being held down, 0 if it was
pressed, 1 if released, 2 if this is a repeat call (simulated keypress).
(This means that for some modes, this argument is always the same.)
- the key modifiers being held at the time of the keypress/release/currently.
(This is a bitmask that can be compared to the pygame.KMOD_* constants.)
The available modes and the extra arguments needed in the key_handler entry are
as follows:
MODE_HELD: the key is currently being held down.
MODE_ONPRESS: the key was pressed or released since the last check.
MODE_ONPRESS_REPEAT: as MODE_ONPRESS, but call the callback repeatedly when
held down for some time.
MODE_ONDOWN: the key was pressed since the last check.
MODE_ONDOWN_REPEAT: as MODE_ONDOWN, but call the callback repeatedly when
held down for some time.
Frames, here, are the number of calls to EventHandler.update.
Note that the callbacks associated with any given key are not called more than
once per frame, even if the key is pressed more than once in the last frame
(could happen with a mode other than MODE_HELD).
METHODS
add_event_handlers
add_key_handlers
update
ATTRIBUTES
event_handlers: (event.type: callbacks) dict of registered event handlers.
default_cbs: callbacks for unhandled events.
key_handlers: (keycode: data) dict of registered key handlers, where data is a
(key_data: callbacks) dict and key_data is keycode or
(keycode, mods, exact) as given.
keys_down: keys pressed between the last two calls to update.
keys_up: keys released between the last two calls to update.
keys_pressed: keys held down at the time of the last call to update.
key_mods: the return value from pygame.key.get_mods at the time of the last
call to update.
events_active: whether event handlers are called.
keys_active: whether key handlers are called.
defaults_active: whether default handlers are called.
"""
def __init__ (self, event_handlers = {}, key_handlers = [],
suppress_quit = False, quit_handler = quit,
default_cbs = None, ignore_locks = True):
self.event_handlers = {}
self.add_event_handlers(event_handlers)
self.key_handlers = {}
self._keys_handled = [set(), set(), set(), set(), set()]
self.add_key_handlers(key_handlers)
self.default_cbs = []
if default_cbs is not None:
self.add_default_cbs(default_cbs)
if not suppress_quit:
self.add_event_handlers({pygame.QUIT: quit_handler})
self._ignore_locks = ignore_locks
self.keys_down = set()
self.keys_up = set()
self.keys_pressed = set()
self.key_mods = 0
self.repeat_count = {}
self.events_active = self.keys_active = self.defaults_active = True
def _clean_cbs (self, cbs):
# expand shorthand callback arguments
if hasattr(cbs, '__call__'):
cbs = [cbs]
return [(cb, ()) if hasattr(cb, '__call__') else cb for cb in cbs]
def _call_cbs (self, cbs, *args):
# call callbacks in list of accepted format
args = tuple(args)
for cb, extra_args in cbs:
extra_args = tuple(extra_args)
cb(*(args + extra_args))
def _call_key_cbs (self, cbs, key_data, press_type, current_mods):
# call key callbacks in list of accepted format if modifiers match
if isinstance(key_data, int):
# just got a key ID
key, mods, exact = (key_data, 0, False)
else:
# got (key_ID, mods, exact)
key, mods, exact = key_data
# check mods match
if isinstance(mods, int):
mods = (mods,)
mods = set(mods)
# check all wanted mods are currently pressed
match = all(mod == 0 or mod & current_mods for mod in mods)
if exact and match:
# 'subtracting' mods from current_mods gives 0 if current_mods
# 'contains' no other mods
subtract = list(mods)
if self._ignore_locks:
subtract += [pygame.KMOD_CAPS, pygame.KMOD_NUM]
match = current_mods & reduce(int.__or__, subtract)
match = (current_mods - match) == 0
if match:
self._call_cbs(cbs, key_data, press_type, current_mods)
def _call_all_cbs (self, key, press_type, modes, mods):
# call all callbacks for a key
for key_data, cb_data_sets in self.key_handlers[key].iteritems():
for cb_data in cb_data_sets:
if cb_data[1] in modes:
| self._call_key_cbs(cb_data[0], key_data, press_type, mods)
def add_event | _handlers (self, event_handlers):
"""Add more event handlers.
Takes an event_handlers argument in the same form as expected by the
constructor.
"""
for e, cbs in event_handlers.iteritems():
cbs = self._clean_cbs(cbs)
try:
self.event_handlers[e] += cbs
except KeyError:
self.event_handlers[e] = cbs
def add_key_handlers (self, key_handlers):
"""Add more key handlers.
Takes a key_handlers argument in the same form as expected by the constructor.
"""
for x in key_handlers:
keys, cbs, mode = |
ATNF/askapsdp | 3rdParty/casacore/casacore-1.6.0a/build.py | Python | gpl-2.0 | 1,801 | 0.004442 | import os.path
from askapdev.rbuild.builders import CMake as Builder
import askapdev.rbuild.utils as utils
# CMake doesn't know about ROOT_DIR for blas and lapack, so need to
# explicitly name them. Want to use the dynamic libraries in order
# to avoid link problems with missing FORTRAN symbols.
platform = utils.get_platform()
libblas = "libblas.so"
liblapack = "liblapack.so"
if platform['system'] == 'Darwin':
libblas = libblas.replace(".so", ".dylib")
liblapack = liblapack.replace(".so", ".dylib")
builder = Builder()
builder.remote_archive = "casacore-1.6.0a.tar.bz2"
cfitsio = builder.dep.get_install_path("cfitsio")
wcslib = builder.dep.get_install_path("wcslib")
blas = builder.dep.get_install_path("blas")
lapack = builder.dep.get_install_path("lapack")
fftw3 = builder.dep.get_install_path("fftw3")
# CMake doesn't know about ROOT_DIR for these packages, so be explicit
builder.add_option("-DBLAS_LIBRARIES=%s" % os.path.join(blas, 'lib', libblas))
builder.add_ | option("-DLAPACK_LIBRARIES=%s" % os.path.join(lapack, 'lib', liblapack))
# these work
builder.add_option("-DCFITSIO_ROOT_DIR=%s" % cfitsio)
builder.add_option("-DWCSLIB_ROOT_DIR=%s" % wcslib)
# but FFTW3_ROOT_DIR don't for the include part
builder.add_option("-DFFTW3_DISABLE_THREADS=ON")
builder.add_option("-DFFTW3_ROOT_DIR=%s" % fft | w3)
builder.add_option("-DFFTW3_INCLUDE_DIRS=%s/include" % fftw3)
builder.add_option("-DUSE_FFTW3=ON")
# save some time
builder.add_option("-DBUILD_TESTING=OFF")
builder.nowarnings = True
# Force use of raw GNU compilers. This is due to bug #5798 soon on the Cray XC30.
# Builds using the newer cmake (2.8.12) fail when cmake uses the Cray compiler
# wrappers
builder.add_option("-DCMAKE_C_COMPILER=gcc")
builder.add_option("-DCMAKE_CXX_COMPILER=g++")
builder.build()
|
lberruti/ansible | test/units/parsing/test_unquote.py | Python | gpl-3.0 | 2,073 | 0 | # coding: utf-8
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MER | CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from nose import tools
from ansible.compat.tests import unittest
from ansible.parsing.splitter import unquote
# Tests using nose's | test generators cannot use unittest base class.
# http://nose.readthedocs.org/en/latest/writing_tests.html#test-generators
class TestUnquote:
UNQUOTE_DATA = (
(u'1', u'1'),
(u'\'1\'', u'1'),
(u'"1"', u'1'),
(u'"1 \'2\'"', u'1 \'2\''),
(u'\'1 "2"\'', u'1 "2"'),
(u'\'1 \'2\'\'', u'1 \'2\''),
(u'"1\\"', u'"1\\"'),
(u'\'1\\\'', u'\'1\\\''),
(u'"1 \\"2\\" 3"', u'1 \\"2\\" 3'),
(u'\'1 \\\'2\\\' 3\'', u'1 \\\'2\\\' 3'),
(u'"', u'"'),
(u'\'', u'\''),
# Not entirely sure these are good but they match the current
# behaviour
(u'"1""2"', u'1""2'),
(u'\'1\'\'2\'', u'1\'\'2'),
(u'"1" 2 "3"', u'1" 2 "3'),
(u'"1"\'2\'"3"', u'1"\'2\'"3'),
)
def check_unquote(self, quoted, expected):
tools.eq_(unquote(quoted), expected)
def test_unquote(self):
for datapoint in self.UNQUOTE_DATA:
yield self.check_unquote, datapoint[0], datapoint[1]
|
OpenLinkedSocialData/gmane3 | gmane-org-user-groups-linux-brazil-slackware/scripts/testTriplify2.py | Python | cc0-1.0 | 2,417 | 0.023583 | import importlib, os
import multiprocessing as mp
from IPython.lib.deepreload import reload as dreload
import gmane as g, percolation as P
G=g
c=P.utils.check
importlib.reload(g.listDataStructures)
importlib.reload(g.loadMessages)
importlib.reload(g.triplifyList)
importlib.reload(P.rdf)
importlib.reload(P.utils)
importlib.reload(g.utils)
dreload(g,exclude="pytz")
#lm=g.LoadMessages("gmane.ietf.rfc822",10,basedir="~/.gmane2/")
#ds=g.ListDataStructures(lm)
#
#dl=g.DownloadGmaneData(dpath)
#dl.downloadedStats() # might take a while
dpath='/home/r/.gmane4/'
dpath='/home/r/.gmane/'
dpath='/disco/.gmane/'
load_msgs=[]
data_structs=[]
scriptpath=os.path.realpath(__file__)
fpath="./publishing/"
umbrella_dir="gmane2/"
#for list_stat in dl.lists:
# list_id=list_stat[0]
#for list_id in ['gmane.comp.gcc.libstdc++.devel']:
#for list_id in ['gmane.comp.java.hadoop.hive.user']:
#for list_id in ['gmane.politics.organizations.metareciclagem', 'gmane.comp.gcc.libstdc++.devel', 'gmane.linux.audio.devel', 'gmane.linux.audio.users']:
#for list_id in ['gmane.comp.web.egroupware.user', 'gmane.culture.language.basque.eibartarrak','gmane.org.operators.nznog', 'gmane.science.nmr.relax.scm',"gmane.linux.fbdev.devel",]:
for list_id in ['gmane.politics.marxism.marxmail', 'gmane.mail.spam.spamassassin.devel','gmane.comp.audio.supercollider.devel',
'gmane.linux.ubuntu.devel.ker | nel.general',"gmane.comp.video.ffmpeg.user","gmane.comp.mathematics.maxima.general",
"gmane.politics.activism.neurogreen","gmane.comp.encr | yption.openssl.user","gmane.org.user-groups.linux.brazil.slackware",
"gmane.comp.apache.user","gmane.comp.python.pygame",'gmane.science.linguistics.wikipedia.deutsch',
'gmane.politics.election-methods','gmane.linux.redhat.rpm.general','gmane.comp.db.postgresql.brasil.user'][4:]:
c(list_id)
# lm=g.LoadMessages(list_id,basedir=dpath,n_messages=20000)
# lm=g.LoadMessages(list_id,basedir=dpath,n_messages=200)
lm=g.LoadMessages(list_id,basedir=dpath)
ds=g.ListDataStructures(lm)
foo=G.triplifyList.makeRepo(ds,fpath,dpath+list_id,"Linked data of the email list with Gmane id: {}".format(list_id),scriptpath=scriptpath,umbrella_dir=umbrella_dir)
mm= ds.messages
ids=ds.message_ids
print("first: ", mm[ids[0]][2], "last:", mm[ids[-1]][2])
def hardClean(text):
return "".join(c for c in text if c.isalnum() or c in allowed)
|
dmlc/tvm | python/tvm/relay/transform/fake_quantization_to_integer.py | Python | apache-2.0 | 15,868 | 0.001765 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Relay functions for rewriting fake quantized ops."""
import numpy as np
import tvm
from tvm import relay
from tvm.ir import TensorAffineType, TupleAffineType
# import to register canonicalization funcs for fq2i
# pylint: disable=unused-import
from tvm.relay.qnn.op import canonicalizations
from tvm.tir import bijective_layout
from ..op import register_fake_quantization_to_integer
def fold_constant(expr):
return relay.transform.FoldConstantExpr(expr, tvm.IRModule())
def get_zeros(scale):
return fold_constant(relay.op.cast(relay.op.zeros_like(scale), "int32"))
def infer_shape(expr):
return relay.transform.InferType()(tvm.IRModule.from_expr(expr))["main"].body.checked_type.shape
def approx_equal(x, y):
x = fold_constant(x)
y = fold_constant(y)
if isinstance(x, relay.Constant) and isinstance(y, relay.Constant):
equal = np.allclose(x.data.asnumpy(), y.data.asnumpy())
else:
equal = tvm.ir.structural_equal(x, y)
return equal
@register_fake_quantization_to_integer("qnn.dequantize")
def dequantize(expr, type_map):
"""Remove dequantize op"""
out = expr.args[0]
t = type_map[expr]
return [out, t]
@register_fake_quantization_to_integer("qnn.quantize")
def quantize(expr, type_map):
"""Turn a quantize op into requantize or remove it"""
out = expr.args[0]
t = type_map[out]
in_scale = fold_constant(t.scale)
in_zero_point = fold_constant(t.zero_point)
if not (
approx_equal(in_scale, expr.args[1])
and approx_equal(in_zero_point, expr.args[2])
and tvm.ir.structural_equal(t.dtype, expr.attrs.out_dtype)
):
out = relay.qnn.op.requantize(
out,
in_scale,
in_zero_point,
expr.args[1],
expr.args[2],
out_dtype=expr.attrs.out_dtype,
axis=t.axis,
)
return [
out | ,
TensorAffineType(expr.args[1], expr.args[2], expr.attrs.out_dtype, expr.attrs.axis),
]
def register_unary_identity(op_name):
def identity(expr, type_map):
assert len(expr.args) == 1
arg = expr.args[0]
t = type_map[arg]
return [expr, t]
return register_fake_quantization_to_integer(op_name, identity)
register_unary_identity("reshape")
register_unary_identity("squeeze")
register_unary_identity("strided_slice")
register_unary_identity("transpos | e")
register_unary_identity("expand_dims")
register_unary_identity("nn.max_pool2d")
register_unary_identity("nn.batch_flatten")
register_unary_identity("nn.depth_to_space")
register_unary_identity("max")
register_unary_identity("min")
@register_fake_quantization_to_integer("nn.avg_pool2d")
def avgpool2d(expr, type_map):
"""Rewrite a avgpool op"""
arg = expr.args[0]
t = type_map[arg]
arg = relay.op.cast(arg, "int32")
out = relay.op.nn.avg_pool2d(arg, **expr.attrs)
out = relay.op.cast(out, t.dtype)
return [out, t]
@register_fake_quantization_to_integer("nn.global_avg_pool2d")
def global_avgpool2d(expr, type_map):
"""Rewrite a global_avgpool op"""
arg = expr.args[0]
t = type_map[arg]
arg = relay.op.cast(arg, "int32")
out = relay.op.nn.global_avg_pool2d(arg)
out = relay.op.cast(out, t.dtype)
return [out, t]
@register_fake_quantization_to_integer("broadcast_to")
def broadcast_to(expr, type_map):
"""Rewrite a broadcast_to op"""
arg = expr.args[0]
t = type_map[arg]
shape = expr.attrs.shape
out = relay.op.broadcast_to(arg, shape)
return [out, t]
@register_fake_quantization_to_integer("nn.bias_add")
def bias_add(expr, type_map):
"""Rewrite a bias_add op"""
x, b = expr.args
x_t = type_map[x]
b_t = type_map[b]
in_scale = fold_constant(x_t.scale)
in_zero_point = fold_constant(x_t.zero_point)
if not (
approx_equal(x_t.scale, b_t.scale)
and approx_equal(x_t.zero_point, b_t.zero_point)
and tvm.ir.structural_equal(x_t.dtype, b_t.dtype)
):
b = relay.qnn.op.requantize(
b,
b_t.scale,
b_t.zero_point,
in_scale,
in_zero_point,
out_dtype=x_t.dtype,
axis=0,
)
out = relay.op.nn.bias_add(x, b, **expr.attrs)
return [out, x_t]
@register_fake_quantization_to_integer("nn.conv2d")
def conv2d(expr, type_map):
"""Rewrite a conv2d op"""
attrs = {**expr.attrs}
attrs.pop("out_dtype")
x, weight = expr.args
x_t = type_map[x]
w_t = type_map[weight]
conv_scale = fold_constant(x_t.scale * w_t.scale)
conv_zp = get_zeros(conv_scale)
out = relay.qnn.op.conv2d(
x, weight, x_t.zero_point, w_t.zero_point, x_t.scale, w_t.scale, **attrs
)
out_layout = attrs["out_layout"] if attrs["out_layout"] != "" else attrs["data_layout"]
out_axis = bijective_layout(out_layout, "NCHW").backward_index(list(range(4)))[1]
return [out, TensorAffineType(conv_scale, conv_zp, out.attrs.out_dtype, out_axis.value)]
@register_fake_quantization_to_integer("nn.conv2d_transpose")
def conv2d_transpose(expr, type_map):
"""Rewrite a conv2d_transpose op"""
attrs = {**expr.attrs}
attrs.pop("out_dtype")
x, weight = expr.args
x_t = type_map[x]
w_t = type_map[weight]
conv_scale = fold_constant(x_t.scale * w_t.scale)
conv_zp = get_zeros(conv_scale)
out = relay.qnn.op.conv2d_transpose(
x, weight, x_t.zero_point, w_t.zero_point, x_t.scale, w_t.scale, **attrs
)
out_layout = attrs["out_layout"] if attrs["out_layout"] != "" else attrs["data_layout"]
out_axis = bijective_layout(out_layout, "NCHW").backward_index(list(range(4)))[1]
return [out, TensorAffineType(conv_scale, conv_zp, out.attrs.out_dtype, out_axis.value)]
@register_fake_quantization_to_integer("nn.dense")
def dense(expr, type_map):
"""Rewrite a dense op"""
attrs = {**expr.attrs}
attrs.pop("out_dtype")
x, weight = expr.args
x_t = type_map[x]
w_t = type_map[weight]
dense_scale = fold_constant(x_t.scale * w_t.scale)
dense_zp = get_zeros(dense_scale)
out = relay.qnn.op.dense(
x, weight, x_t.zero_point, w_t.zero_point, x_t.scale, w_t.scale, **attrs
)
return [out, TensorAffineType(dense_scale, dense_zp, out.attrs.out_dtype, 1)]
@register_fake_quantization_to_integer("nn.batch_matmul")
def batch_matmul(expr, type_map):
"""Rewrite a batch_matmul op"""
x, y = expr.args
x_t = type_map[x]
y_t = type_map[y]
matmul_scale = fold_constant(x_t.scale * y_t.scale)
matmul_zp = relay.const(0)
out = relay.qnn.op.batch_matmul(x, y, x_t.zero_point, y_t.zero_point, x_t.scale, y_t.scale)
return [out, TensorAffineType(matmul_scale, matmul_zp, out.attrs.out_dtype, x_t.axis)]
@register_fake_quantization_to_integer("concatenate")
def concat(expr, type_map):
"""Rewrite a concat op"""
scales = []
zps = []
tuple_type = type_map[expr.args[0]]
for t in tuple_type.types:
scales.append(t.scale)
zps.append(t.zero_point)
out_type = type_map[expr]
out = relay.qnn.op.concatenate(
expr.args[0],
relay.Tuple(scales),
relay.Tuple(zps),
out_type.scale,
out_type.zero_point,
**expr.attrs,
)
return [out, out_type]
@register_fake_quantization_to_integer("topk")
def topk(expr, type_map):
"""Rewrite a topk o |
prmtl/fuel-web | shotgun/shotgun/logger.py | Python | apache-2.0 | 1,285 | 0 | # Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
| # a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Lic | ense is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from shotgun.settings import LOG_FILE
def configure_logger():
"""Configures shotgun logger
"""
logger = logging.getLogger('shotgun')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s %(levelname)s %(process)d (%(module)s) %(message)s',
"%Y-%m-%d %H:%M:%S")
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
stream_handler.setFormatter(formatter)
file_handler = logging.FileHandler(LOG_FILE)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.addHandler(file_handler)
|
mbarylsk/goldbach-partition | goldbach-fast_conf-prim_check-distr.py | Python | gpl-3.0 | 3,474 | 0.012953 | #
# Copyright (c) 2016 - 2017, Marcin Barylski
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
#
import math
import unittest
import os
import time
import numpy as np
import goldbach
import sys
sys.path.insert(0, '..\\primes\\')
import primes
import dataprocessing
#############################################################
# Settings - configuration
#############################################################
# Minimal even number checked against Goldbach conjecture
minimum_n = 6
# Maximum even number checked against Goldbach conjecture
maximum_n = 10000000
# Chunk size for distributed computation
max_chunk_size = 100000
# Caching previous primality results
# o True - auxilary sets of primes and composite numbers will grow
# it will speed up further primality tests but more RAM will
# be occupied
# o False - do not cache new primality test results
caching_primality_results = False
# Helper files
# o file_input_primes - contains prime numbers
# o file_input_nonprimes - contains composite numbers
file_input_primes = '..\\primes\\t_prime_numbers.txt'
file_input_nonprimes = '..\\primes\\t_nonprime_numbers.txt'
#############################################################
# Main
#############################################################
print ("Initialize objects...")
p = primes.Primes(caching_primality_results)
gp = goldbach.GoldbachPartition (p)
dp = dataprocessing.DataProcessing()
print ("DONE")
print ("Loading helper sets...")
p.init_set(file_input_primes, True)
p.init_set(file_input_nonprimes, False)
print ("DONE")
print ("Sorting primes...")
p.sort_prime_set()
print ("DONE")
print ("Verification for all even numbers from", minimum_n, "to", maximum_n, "started ...")
i = 1
n_of_chunks = (maximum_n - minimum_n) / 2 / max_chunk_size
chunks = dp.divide_list_into_chunks (range(minimum_n, maximum_n, 2), max_chunk_size)
for chunk in chunk | s:
f | or n in chunk:
p1 = 3
p2 = n - p1
(p1, p2, d, iters) = gp.search_for_partition (p1, p2, lambda i: gp.delta_prime(i))
perc = int (100 * i / n_of_chunks)
print (" Chunk #", i, ":", chunk, "verified (", perc, "% completed )")
i+= 1
print ("DONE")
|
yarikoptic/NiPy-OLD | nipy/neurospin/datasets/setup.py | Python | bsd-3-clause | 390 | 0.005128 | def configuration(parent_package='',to | p_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('datasets', parent_package, top_path)
config.add_subpackage('volumes')
config.add_subpackage('transforms')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configu | ration(top_path='').todict())
|
kwikiel/bounce | models.py | Python | mit | 427 | 0 | from app import db
class Alternative(db.Model):
id = db.Column(db.Integer, primary | _key=True)
experiment = db.Column(db.String(500), unique=True)
copy = db.Column(db.String(2500))
def __init__(self, id, experiment, copy):
self.id = id
self.experiment = experiment
self.copy = copy
def __repr__(self):
return "<Alt {0} {1} {2}>".format(self.id, self.experiment, self. | copy)
|
thesave/SublimeLinter-contrib-lacheck | linter.py | Python | mit | 650 | 0.010769 | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Saverio Giallorenzo
# Copyright (c) 2018 Saverio Giallorenzo
#
# License: MIT
#
from SublimeLinte | r.lint import Linter, util # or NodeLinter, PythonLinter, ComposerLinter, RubyLinter
|
class Lacheck(Linter):
cmd = 'lacheck ${file}'
error_stream = util.STREAM_BOTH
regex = (
r'.+, line (?P<line>\d+): (?:(possible unwanted space at "{")|(?P<message>.+))'
)
defaults = {
'selector': 'text.tex.latex - meta.block.parameters.knitr - source.r.embedded.knitr',
}
multiline = True
line_col_base = (1, 1) |
s0r00t/ISN-DTMF | main.py | Python | gpl-3.0 | 2,967 | 0.032501 | #!/usr/bin/env python3
from tkinter import *
#ttk = themed tkinter (excepté sous linux visiblement)
from tkinter.ttk import *
from dtmf import convertDTMF
from wave import open
dialWindow = Tk()
dialWindow.title("DTMF Dialer")
#NOTE: le nombre demandé est une string car d'après le standard DTMF il peut contenir les lettres de A à D
number = StringVar()
#définition des styles des différents widgets
#les boutons de numérotation
Style(dialWindow).configure("Dial.TButton", padding=5)
#le numéro qui va être "appelé"
Style(dialWindow).configure("Nummern.TLabel", font="serif 20")
#le bouton d'appel
Style(dialWindow).configure("Call.TButton", bac | kground="white", font="serif 30", width=3, foreground="green")
#le bouton pour raccrocher
Style(dialWindow).configure("Hang.TButton", background="white", font="serif 30", width=3, foreground="red")
def appendNumber(digit):
"""
Fonction appelée dès qu'un bouton de composition est pressé.
"""
global number
if len(number.get()) < 10:
number.set(number.get()+digit)
def dialNumber():
"""
Convertit le numéro donné en tonalités DTMF avec l'aid | e des fonctions définies
dans dtmf.py.
"""
nb = number.get()
if nb == '': return #on évite de créer un fichier vide si il n'y a pas de numéro
finalList, spl = convertDTMF(nb, f, duree, amp)
#NOTE: pourquoi utiliser une liste afin de stocker les signaux?
#parce qu'ainsi writeframes n'est appelé qu'une seule fois, et cela
#accélère beaucoup la vitesse de traitement.
with open(nb+".wav", "w") as snd:
#nombre de canaux, taille d'encodage, fréquence, nombre de samples
#(les deux derniers paramètres désactivent la compression)
snd.setparams((1,1,f,spl,"NONE","not compressed"))
snd.writeframes(b''.join(finalList))
number.set('') #on réinitialise le numéro
#le numéro en train d'être composé
Label(dialWindow, textvariable=number, style="Nummern.TLabel").grid(row=0, column=0, columnspan=10)
#les touches du clavier
DTMFKey = ['1','2','3','4','5','6','7','8','9','*','0','#']
#cette variable permet d'utiliser DTMFKey par groupes de 3
start = 0
#pour chaque ligne...
for i in range(1, 5):
#...et chaque colonne
for j in range(3):
digit = DTMFKey[start+j]
#l'usage d'une fonction lambda permet d'appeler une fonction et de préciser ses paramètres
#digit=digit permet de définir l'argument de la fonction comme étant le numéro du bouton
digitBut = Button(dialWindow, text=digit, width=10, style="Dial.TButton", command=lambda digit=digit: appendNumber(digit))
digitBut.grid(row=i,column=j)
start = start+3
duree = 0.1 #nombres de secondes pour un chiffre du numéro
#note : la durée de la pause entre 2 tonalités = duree/2
f = 8000 #fréquence de sample
amp = 127.5 #255/2
Button(dialWindow, text='✆', style="Call.TButton", command=dialNumber).grid(row=5,column=0)
Button(dialWindow, text='☎', style="Hang.TButton", command=lambda: number.set('')).grid(row=5,column=2)
dialWindow.mainloop()
|
elbeardmorez/quodlibet | quodlibet/quodlibet/formats/_serialize.py | Python | gpl-2.0 | 6,347 | 0.000788 | # -*- coding: utf-8 -*-
# Copyright 2016 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""Code for serializing AudioFile instances"""
import pickle
from senf import bytes2fsn, fsn2bytes, fsnative
from quodlibet.util.picklehelper import pickle_loads, pickle_dumps
from quodlibet.util import is_windows
from quodlibet.compat import PY3, text_type
from ._audio import AudioFile
class SerializationError(Exception):
pass
def _py2_to_py3(items):
assert PY3
for i in items:
try:
l = list(i.items())
except AttributeError:
raise SerializationError
i.clear()
for k, v in l:
if isinstance(k, bytes):
k = k.decode("utf-8", "replace")
else:
# strip surrogates
try:
k.encode("utf-8")
except UnicodeEncodeError:
k = k.encode("utf-8", "replace").decode("utf-8")
if k == "~filename" or k == "~mountpoint":
if isinstance(v, bytes):
try:
v = bytes2fsn(v, "utf-8")
except ValueError:
# just in case, only on Windows
assert is_windows()
v = v.decode("utf-8", "replace")
elif isinstance(v, bytes):
v = v.decode("utf-8", "replace")
elif isinstance(v, text_type):
# strip surrogates
try:
v.encode("utf-8")
except UnicodeEncodeError:
v = v.encode("utf-8", "replace").decode("utf-8")
i[k] = v
return items
def _py3_to_py2(items):
assert PY3
is_win = is_windows()
new_list = []
for i in items:
inst = dict.__new__(i.__class__)
for key, value in i.items():
if key in ("~filename", "~mountpoint") and not is_win:
value = fsn2bytes(value, None)
try:
key = key.encode("ascii")
except UnicodeEncodeError:
pass
dict.__setitem__(inst, key, value)
new_list.append(inst)
return new_list
def _py2_to_py2(items):
# for weren't always so strict about the types in AudioFile.__setitem__
# This tries to fix things.
assert not PY3
fsn_type = type(fsnative())
fixups = []
for i in items:
try:
it = i.iteritems()
except AttributeError:
raise SerializationError
for k, v in it:
if k in ("~filename", "~mountpoint"):
if not isinstance(v, fsn_type):
# use utf-8 here since we can't be sure that the environ
# is the same as before
if isinstance(v, text_type):
v = v.encode("utf-8", "replace")
else:
v = v.decode("utf-8", "replace")
fixups.append((i, k, v))
elif k[:2] == "~#":
try:
v + 0
except:
try:
fixups.append((i, k, int(v)))
except:
try:
fixups.append((i, k, float(v)))
except:
fixups.append((i, k, 0))
elif not isinstance(v, text_type):
if isinstance(v, bytes):
fixups.append((i, k, v.decode("utf-8", "replace")))
else:
fixups.append((i, k, text_type(v)))
for item, key, value in fixups:
item[key] = value
return items
def load_audio_files(data, process=True):
"""unpickles the item list and if some class isn't found unpickle
as a dict and filter them out afterwards.
In case everything gets filtered out will raise SerializationError
(because then likely something larger went wrong)
Args:
data (bytes)
process (bool): if the dict key/value types should be converted,
either to be usable from py3 or to convert to newer types
Returns:
List[AudioFile]
Raises:
SerializationError
"""
dummy = type("dummy", (dict,), {})
error_occured = []
temp_type_cache = {}
def lookup_func(base, module, name):
try:
real_type = base(module, name)
except (ImportError, AttributeError):
error_occured.append(True)
| return dummy
if module.split(".")[0] not in ("quodlibet", "tests"):
return real_type
# return a straight dict subclass so that unpickle doesn't call
# our __setitem__. Further down we simply change the __class__
| # to our real type.
if not real_type in temp_type_cache:
new_type = type(name, (dict,), {"real_type": real_type})
temp_type_cache[real_type] = new_type
return temp_type_cache[real_type]
try:
items = pickle_loads(data, lookup_func)
except pickle.UnpicklingError as e:
raise SerializationError(e)
if error_occured:
items = [i for i in items if not isinstance(i, dummy)]
if not items:
raise SerializationError(
"all class lookups failed. something is wrong")
if process:
if PY3:
items = _py2_to_py3(items)
else:
items = _py2_to_py2(items)
try:
for i in items:
i.__class__ = i.real_type
except AttributeError as e:
raise SerializationError(e)
return items
def dump_audio_files(item_list, process=True):
"""Pickles a list of AudioFiles
Returns:
bytes
Raises:
SerializationError
"""
assert isinstance(item_list, list)
assert not item_list or isinstance(item_list[0], AudioFile)
if PY3 and process:
item_list = _py3_to_py2(item_list)
try:
return pickle_dumps(item_list, 2)
except pickle.PicklingError as e:
raise SerializationError(e)
|
vgaonkar/GaveltonLibrary-Python | GaveltonLibrary/main.py | Python | gpl-3.0 | 2,980 | 0.007047 | #main.py
#Gaonkar, Vijay
#vrgaonkar
from __future__ import print_function
from SearchEngine import SearchEngine
search_engine = SearchEngine()
results = []
choice = 0
while choice != 5:
print("\n\t\t\t ############################### GAVELTON LIBRARY ############################### \n"
"\n Welcome! Looking for something? I can help!")
search_str = raw_input("\n Enter a word or a phrase to get started: ")
print("\n How do you wanna search?\n"
" 1. Search by call number\n"
" 2. Search by title\n"
" 3. Search by subject\n"
" 4. Search by other\n"
" 5. Quit\n")
choice = input(" Your Choice: ")
if choice == 1:
results = search_engine.search_by_call_no(search_str)
if len(results) > 0:
print("\n\t\t\t ************************** Search Results ** | ************************ ")
for item in results:
item.display()
else:
print("\n Sorry no results found with <" + search_str + "> in call number")
elif choice == 2:
| results = search_engine.search_by_title(search_str)
if len(results) > 0:
print("\t\t\t ************************** Search Results ************************** ")
for item in results:
item.display()
else:
print("\n Sorry no results found with <" + search_str + "> in title")
elif choice == 3:
results = search_engine.search_by_subject(search_str)
if len(results) > 0:
print("\t\t\t ************************** Search Results ************************** ")
for item in results:
item.display()
else:
print("\n Sorry no results found with <" + search_str + "> in subject")
elif choice == 4:
results = search_engine.search_by_other(search_str)
if len(results) > 0:
print("\t\t\t ************************** Search Results ************************** ")
for item in results:
item.display()
else:
print("\n Sorry no results found with <" + search_str + ">")
elif choice == 5:
exit()
else:
print("\n How do you wanna search?\n" +
" 1. Search by call number\n" +
" 2. Search by title\n" +
" 3. Search by subject\n" +
" 4. Search by other\n" +
" 5. Quit\n")
choice = input(" Your Choice: ")
print("\n Found what you were looking for?\n" +
" 1. Yes, done searching\n" +
" 2. No, start a new search\n")
end_choice = input(" Your Choice: ")
while end_choice != 3:
if end_choice == 1:
exit()
elif end_choice == 2:
break
else:
print("\n Found what you were looking for?\n" +
" 1. Yes, done searching\n" +
" 2. No, start a new search\n")
end_choice = input(" Your Choice: ")
|
mrknow/filmkodi | plugin.video.mrknow/mylib/tests_pydevd_mainloop/gui-gtk.py | Python | apache-2.0 | 840 | 0.002381 | #!/usr/bin/env python
"""Simple GTK example to manually test event loop integration.
To run this:
1) Enable the PyDev GUI event loop integration for gtk
2) do an execfile on this script
3) ensure you have a working GUI simultaneously with an
interactive console
"""
if __name__ == '__main__':
imp | ort pygtk
pygtk.require('2.0')
import gtk
def hello_world(wigdet, data=None):
print("Hello World")
def delete_event(wi | dget, event, data=None):
return False
def destroy(widget, data=None):
gtk.main_quit()
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.connect("delete_event", delete_event)
window.connect("destroy", destroy)
button = gtk.Button("Hello World")
button.connect("clicked", hello_world, None)
window.add(button)
button.show()
window.show()
|
vkosuri/ChatterBot | tests/logic/test_mathematical_evaluation.py | Python | bsd-3-clause | 4,970 | 0.000402 | from tests.base_case import ChatBotTestCase
from chatterbot.logic import MathematicalEvaluation
from chatterbot.conversation import Statement
class MathematicalEvaluationTests(ChatBotTestCase):
def setUp(self):
super().setUp()
self.adapter = MathematicalEvaluation(self.chatbot)
def test_can_process(self):
statement = Statement(text='What is 10 + 10 + 10?')
self.assertTrue(self.adapter.can_process(statement))
def test_can_not_process(self):
statement = Statement(text='What is your favorite song?')
self.assertFalse(self.adapter.can_process(statement))
def test_addition_operator(self):
statement = Statement(text='What is 100 + 54?')
response = self.adapter.process(statement)
self.assertEqual(response.text, '100 + 54 = 154')
self.assertEqual(response.confidence, 1)
def test_subtraction_operator(self):
statement = Statement(text='What is 100 - 58?')
response = self.adapter.process(statement)
self.assertEqual(response.text, '100 - 58 = 42')
self.assertEqual(response.confidence, 1)
def test_multiplication_operator(self):
statement = Statement(text='What is 100 * 20')
response = self.adapter.process(statement)
self.assertEqual(response.text, '100 * 20 = 2000')
self.assertEqual(response.confidence, 1)
def test_division_operator(self):
statement = Statement(text='What is 100 / 20')
response = self.adapter.process(statement)
self.assertEqual(response.text, '100 / 20 = 5')
self.assertEqual(response.confidence, 1)
def test_exponent_operator(self):
statement = Statement(text='What is 2 ^ 10')
response = self.adapter.process(statement)
self.assertEqual(response.text, '2 ^ 10 = 1024')
self.assertEqual(response.confidence, 1)
def test_parenthesized_multiplication_and_addition(self):
statement = Statement(text='What is 100 + ( 1000 * 2 )?')
response = self.adapter.process(statement)
self.assertEqual(response.text, '100 + ( 1000 * 2 ) = 2100')
self.assertEqual(response.confidence, 1)
def test_parenthesized_with_words(self):
statement = Statement(text='What is four plus 100 + ( 100 * 2 )?')
response = self.adapter.process(statement)
self.assertEqual(response.text, 'four plus 100 + ( 100 * 2 ) = 304')
self.assertEqual(response.confidence, 1)
def test_word_numbers_addition(self):
statement = Statement(text='What is one hundred + four hundred?')
response = self.adapter.process(statement)
self.assertEqual(response.text, 'one hundred + four hundred = 500')
self.assertEqual(response.confidence, 1)
def test_word_division_operator(self):
statement = Statement(text='What is 100 divided by 100?')
response = self.adapter.process(statement)
self.assertEqual(response.text, '100 divided by 100 = 1')
self.assertEqual(response.confidence, 1)
def test_large_word_division_operator(self):
statement = Statement(text='What is one thousand two hundred four divided by one hundred?')
response = self.adapter.process(statement)
self.assertEqual(response.text, 'one thousand two hundred four divided by one hundred = 12.04')
self.assertEqual(response.confidence, 1)
def test_negative_multiplication(self):
statement = Statement(text='What is -105 * 5')
response = self.adapter.process(statement)
self.assertEqual(response.text, '-105 * 5 = -525')
self.assertEqual(response.confidence, 1)
def test_negative_decimal_multiplication(self):
statement = Statement(text='What is -100.5 * 20?')
response = self.adapter.process(statement)
self.assertEqual(response.text, '-100.5 * 20 = -2010.0')
self.assertEqual(response.confidence, 1)
def test_pi_constant(self):
statement = Statement(text='What is pi plus one ?')
response = self.adapter.process(statement)
self.assertEqual(response.text, 'pi plus one = 4.141693')
self.assertEqual(response.confidence | , 1)
def test_e_constant(self):
statement = Statement(text='What is e plus one ?')
response = self.adapter.process(statement)
self.as | sertEqual(response.text, 'e plus one = 3.718281')
self.assertEqual(response.confidence, 1)
def test_log_function(self):
statement = Statement(text='What is log 100 ?')
response = self.adapter.process(statement)
self.assertEqual(response.text, 'log 100 = 2.0')
self.assertEqual(response.confidence, 1)
def test_square_root_function(self):
statement = Statement(text='What is the sqrt 144 ?')
response = self.adapter.process(statement)
self.assertEqual(response.text, 'sqrt 144 = 12.0')
self.assertEqual(response.confidence, 1)
|
orende/intro-till-python | ex5d.py | Python | mit | 173 | 0.011561 | def fizzbuzz(x):
return 'fizzbuzz' if x % 15 == 0 else 'fizz' if x % 3 | == 0 else 'buzz' if x % 5 == 0 else x
for y in [fizz | buzz(x) for x in range(1, 101)]:
print y
|
pyblish/pyblish-nukestudio | pyblish_nukestudio/__init__.py | Python | lgpl-3.0 | 108 | 0 | from .version | import *
from .lib import (
show,
setup,
register_plugins | ,
add_to_filemenu,
)
|
drufat/vispy | examples/benchmark/simple_glut.py | Python | bsd-3-clause | 1,363 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vispy: testskip
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
from vispy.gloo import gl
def on_display():
gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT)
glut.glutSwapBuffers()
def on_keyboard(key, x, y):
if key == '\033':
sys.exit()
def on_idle():
global t, t0, frames
t = glut.glutGet(glut.GLUT_ELAPSED_TIME)
frames = frames + 1
elapsed = (t - t0) / 1000.0
if elapsed > 2.5:
print("FPS : %.2f (%d frames in %.2f second)"
% (frames / elapsed, frames, elapsed))
t0, frames = t, 0
glut.glutPostRedisplay()
if __name__ == '__main__':
import sys
import OpenGL.GLUT as glut
glut.glutInit(sys.argv)
glu | t.glutInitDisplayMode(
glut.GLUT_DOUBLE | glut.GLUT_RGB | | glut.GLUT_DEPTH)
glut.glutInitWindowSize(512, 512)
glut.glutCreateWindow("Do nothing benchmark (GLUT)")
glut.glutDisplayFunc(on_display)
glut.glutKeyboardFunc(on_keyboard)
t0, frames, t = glut.glutGet(glut.GLUT_ELAPSED_TIME), 0, 0
glut.glutIdleFunc(on_idle)
glut.glutMainLoop()
|
chen940303/Diaosier_home | app/decorators.py | Python | mit | 698 | 0.013289 | #-*-coding:utf-8-*-
from functools import wraps
from flask import abort
from flask.ext.login import current_user
from models import Permission
def permission_required(permission): #用于检查权限,就像检查登陆一样
def decorator(f):
@wraps(f)
def decorated_function(*arg | s, **kwargs):
if not current_user.can(permission):
abort(403) #程序如果这里出错中断,跳转
return f(*args, **kwargs)
return decorated_function
return decorator
#闭包开发做修饰器,带参数返回两层,
def admin_required(f):
return permission_required(Permissio | n.ADMINISTER)(f)
#检查了administer
|
MaximumRoot/MaxRootWeb | webMaker/maxrootweb.py | Python | mit | 718 | 0.01532 |
import os
import shutil
def pre_read(dir):
try:
filelist = os.listdir(dir)
for file in filelist:
if os.path.isdir(file):
# exception directory
| if str(file) == 'webMager' or str(file) == '_site':
continue
| pre_read(file)
else:
# read *_main.maxroot file
ext = str(file)
if ext.endswith('_main.maxroot'):
# load...
a=1
except PermissionError:
pass
os.chdir('..')
root_path = os.path.abspath(os.curdir)
result_path = os.path.join(root_path, '_site')
t
|
springmerchant/pybbm | pybb/models.py | Python | bsd-2-clause | 17,800 | 0.002303 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models, transaction, DatabaseError
from django.template.defaultfilters import slugify
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.html import strip_tags
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now as tznow
from pybb.compat import get_user_model_path, get_username_field, get_atomic_func
from pybb import defaults
from pybb.profiles import PybbProfile
from pybb.util import unescape, FilePathGenerator, _get_markup_formatter
from annoying.fields import AutoOneToOneField
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^annoying\.fields\.JSONField"])
add_introspection_rules([], ["^annoying\.fields\.AutoOneToOneField"])
except ImportError:
pass
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(_('Name'), max_length=80)
position = models.IntegerField(_('Position'), blank=True, default=0)
hidden = models.BooleanField(_('Hidden'), blank=False, null=False, default=False,
help_text=_('If checked, this category will be visible only for staff'))
slug = models.SlugField(_("Slug"), max_length=100, unique=True)
class Meta(object):
ordering = ['position']
verbose_name = _('Category')
verbose_name_plural = _('Categories')
def __str__(self):
return self.name
def forum_count(self):
return self.forums.all().count()
def get_absolute_url(self):
if defaults.PYBB_NICE_URL:
return reverse('pybb:category', kwargs={'slug': self.slug, })
return reverse('pybb:category', kwargs={'pk': self.id})
@property
def topics(self):
return Topic.objects.filter(forum__category=self).select_related()
@property
def posts(self):
return Post.objects.filter(topic__forum__category=self).select_related()
@python_2_unicode_compatible
class Forum(models.Model):
category = models.ForeignKey(Category, related_name='forums', verbose_name=_('Category'))
parent = models.ForeignKey('self', related_name='child_forums', verbose_name=_('Parent forum'),
blank=True, null=True)
name = models.CharField(_('Name'), max_length=80)
position = models.IntegerField(_('Position'), blank=True, default=0)
description = models.TextField(_('Description'), blank=True)
moderators = models.ManyToManyField(get_user_model_path(), blank=True, null=True, verbose_name=_('Moderators'))
updated = models.DateTimeField(_('Updated'), blank=True, null=True)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
topic_count = models.IntegerField(_('Topic count'), blank=True, default=0)
hidden = models.BooleanField(_('Hidden'), blank=False, null=False, default=False)
readed_by = models.ManyToManyField(get_user_model_path(), through='ForumReadTracker', related_name='readed_forums')
headline = models.TextField(_('Headline'), blank=True, null=True)
slug = models.SlugField(verbose_name=_("Slug"), max_length=100)
class Meta(object):
ordering = ['position']
verbose_name = _('Forum')
verbose_name_plural = _('Forums')
unique_together = ('category', 'slug')
def __str__(self):
return self.name
def update_counters(self):
self.topic_count = Topic.objects.filter(forum=self).count()
if self.topic_count:
posts = Post.objects.filter(topic__forum_id=self.id)
self.post_count = posts.count()
if self.post_count:
try:
last_post = posts.order_by('-created', '-id')[0]
self.updated = last_post.updated or last_post.created
except IndexError:
pass
else:
self.post_count = 0
self.save()
def get_absolute_url(self):
if defaults.PYBB_NICE_URL:
return reverse('pybb:forum', kwargs={'slug': self.slug, 'category_slug': self.category.slug})
return reverse('pybb:forum', kwargs={'pk': self.id})
@property
def posts(self):
return Post.objects.filter(topic__forum=self).select_related()
@cached_property
def last_post(self):
try:
return self.posts.order_by('-created', '-id')[0]
except IndexError:
return None
def get_parents(self):
"""
Used in templates for breadcrumb building
"""
parents = [self.category]
parent = self.parent
while parent is not None:
parents.insert(1, parent)
parent = parent.parent
return parents
@python_2_unicode_compatible
class Topic(models.Model):
POLL_TYPE_NONE = 0
POLL_TYPE_SINGLE = 1
POLL_TYPE_MULTIPLE = 2
POLL_TYPE_CHOICES = (
(POLL_TYPE_NONE, _('None')),
(POLL_TYPE_SINGLE, _('Single answer')),
(POLL_TYPE_MULTIPLE, _('Multiple answers')),
)
forum = models.ForeignKey(Forum, related_name='topics', verbose_name=_('Forum'))
name = models.CharField(_('Subject'), max_length=255)
created = models.DateTimeField(_('Created'), null=True)
updated = models.DateTimeField(_('Updated'), null=True)
user = models.ForeignKey(get_user_model_path(), verbose_name=_('User'))
views = models.IntegerField(_('Views count'), blank=True, default=0)
sticky = models.BooleanField(_('Sticky'), blank=True, default=False)
closed = models.BooleanField(_('Closed'), blank=True, default=False)
subscribers = models.ManyToManyField(get_user_model_path(), related_name='subscriptions',
verbose_name=_('Subscribers' | ), b | lank=True)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
readed_by = models.ManyToManyField(get_user_model_path(), through='TopicReadTracker', related_name='readed_topics')
on_moderation = models.BooleanField(_('On moderation'), default=False)
poll_type = models.IntegerField(_('Poll type'), choices=POLL_TYPE_CHOICES, default=POLL_TYPE_NONE)
poll_question = models.TextField(_('Poll question'), blank=True, null=True)
slug = models.SlugField(verbose_name=_("Slug"), max_length=100)
class Meta(object):
ordering = ['-created']
verbose_name = _('Topic')
verbose_name_plural = _('Topics')
unique_together = ('forum', 'slug')
def __str__(self):
return self.name
@cached_property
def head(self):
try:
return self.posts.all().order_by('created', 'id')[0]
except IndexError:
return None
@cached_property
def last_post(self):
try:
return self.posts.order_by('-created', '-id').select_related('user')[0]
except IndexError:
return None
def get_absolute_url(self):
if defaults.PYBB_NICE_URL:
return reverse('pybb:topic', kwargs={'slug': self.slug, 'forum_slug': self.forum.slug, 'category_slug': self.forum.category.slug})
return reverse('pybb:topic', kwargs={'pk': self.id})
def save(self, *args, **kwargs):
if self.id is None:
self.created = self.updated = tznow()
forum_changed = False
old_topic = None
if self.id is not None:
old_topic = Topic.objects.get(id=self.id)
if self.forum != old_topic.forum:
forum_changed = True
super(Topic, self).save(*args, **kwargs)
if forum_changed:
old_topic.forum.update_counters()
self.forum.update_counters()
def delete(self, using=None):
super(Topic, self).delete(using)
self.forum.update_counters()
def update_counters(self):
self.post_count = self.posts.count()
# force cache overwrite to get the real latest updated post
if hasattr(self, 'last_post'):
del self.last_post
if sel |
GetStream/stream-python | setup.py | Python | bsd-3-clause | 2,165 | 0.000462 | #!/usr/bin/env python
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
from stream import __version__, __maintainer__, __email__, __license__
import sys
tests_require = ["pytest", "unittest2", "pytest-cov", "python-dateutil"]
ci_require = ["black", "flake8", "pytest-cov"]
long_description = open("README.md", "r").read()
install_requires = [
"pycryptodomex>=3.8.1,<4",
"requests>=2.3.0,<3",
"pyjwt>=2.0.0,<3",
"pytz>=2019.3",
]
class PyTest(TestCommand):
def finalize_option | s(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(["-v", "--cov=./"])
sys.exit(errno)
setup(
name="stream-python",
version=__version__,
author=__maintainer__,
author_email=__email__,
url="http://github.com/GetStream/stream-python",
| description="Client for getstream.io. Build scalable newsfeeds & activity streams in a few hours instead of weeks.",
long_description=long_description,
long_description_content_type="text/markdown",
license=__license__,
packages=find_packages(),
zip_safe=False,
install_requires=install_requires,
extras_require={"test": tests_require, "ci": ci_require},
cmdclass={"test": PyTest},
tests_require=tests_require,
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
librallu/cohorte-herald | python/herald/transports/bluetooth/__init__.py | Python | apache-2.0 | 1,915 | 0.000522 | #!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Herald Bluetooth transport implementation
:author: Luc Libralesso
:copyright: Copyright 2014, isandlaTech
:license: Apache License 2.0
:version: 0.0.3
:status: Alpha
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in complianc | e with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS | IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
CONTENT_TYPE_JSON = "application/json"
""" MIME type: JSON data """
# ------------------------------------------------------------------------------
ACCESS_ID = "bluetooth"
"""
Access ID used by the Bluetooth transport implementation
"""
# ------------------------------------------------------------------------------
SERVICE_BLUETOOTH_DIRECTORY = "herald.bluetooth.directory"
"""
Specification of the Bluetooth transport directory
"""
SERVICE_BLUETOOTH_RECEIVER = "herald.bluetooth.receiver"
"""
Specification of the Bluetooth transport servlet (reception side)
"""
SERVICE_BLUETOOTH_TRANSPORT = "herald.bluetooth.transport"
"""
Specification of the Bluetooth transport implementation (sending side)
"""
BLUETOOTH_DISCOVERY_SERVICE = "herald.transports.bluetooth.discovery"
"""
Service discovery for bluetooth
"""
BLUETOOTH_MANAGER_SERVICE = "herald.transports.bluetooth.manager"
"""
Service for managing bluetooth connections
""" |
cernops/python-neutronclient | neutronclient/tests/unit/test_cli20_port.py | Python | apache-2.0 | 21,891 | 0 | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import itertools
import sys
from mox3 import mox
from neutronclient.neutron.v2_0 import port
from neutronclient import shell
from neutronclient.tests.unit import test_cli20
class CLITestV20PortJSON(test_cli20.CLITestV20Base):
def setUp(self):
super(CLITestV20PortJSON, self).setUp(plurals={'tags': 'tag'})
def test_create_port(self):
"""Create port: netid."""
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
args = [netid]
position_names = ['network_id']
position_values = []
position_values.extend([netid])
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_port_extra_dhcp_opts_args(self):
"""Create port: netid --extra_dhcp_opt."""
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
extra_dhcp_opts = [{'opt_name': 'bootfile-name',
'opt_value': 'pxelinux.0'},
{'opt_name': 'tftp | -server',
'opt_value': '123.123.123.123'},
{'opt_name': 'server-ip-address',
'opt_valu | e': '123.123.123.45'}]
args = [netid]
for dhcp_opt in extra_dhcp_opts:
args += ['--extra-dhcp-opt',
('opt_name=%(opt_name)s,opt_value=%(opt_value)s' %
dhcp_opt)]
position_names = ['network_id', 'extra_dhcp_opts']
position_values = [netid, extra_dhcp_opts]
position_values.extend([netid])
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_port_extra_dhcp_opts_args_ip_version(self):
"""Create port: netid --extra_dhcp_opt."""
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
extra_dhcp_opts = [{'opt_name': 'bootfile-name',
'opt_value': 'pxelinux.0',
'ip_version': "4"},
{'opt_name': 'tftp-server',
'opt_value': '2001:192:168::1',
'ip_version': "6"},
{'opt_name': 'server-ip-address',
'opt_value': '123.123.123.45',
'ip_version': "4"}]
args = [netid]
for dhcp_opt in extra_dhcp_opts:
args += ['--extra-dhcp-opt',
('opt_name=%(opt_name)s,opt_value=%(opt_value)s,'
'ip_version=%(ip_version)s' %
dhcp_opt)]
position_names = ['network_id', 'extra_dhcp_opts']
position_values = [netid, extra_dhcp_opts]
position_values.extend([netid])
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_port_full(self):
"""Create port: --mac_address mac --device_id deviceid netid."""
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
args = ['--mac_address', 'mac', '--device_id', 'deviceid', netid]
position_names = ['network_id', 'mac_address', 'device_id']
position_values = [netid, 'mac', 'deviceid']
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
# Test dashed options
args = ['--mac-address', 'mac', '--device-id', 'deviceid', netid]
position_names = ['network_id', 'mac_address', 'device_id']
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_port_tenant(self):
"""Create port: --tenant_id tenantid netid."""
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
args = ['--tenant_id', 'tenantid', netid, ]
position_names = ['network_id']
position_values = []
position_values.extend([netid])
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
# Test dashed options
args = ['--tenant-id', 'tenantid', netid, ]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tenant_id='tenantid')
def test_create_port_tags(self):
"""Create port: netid mac_address device_id --tags a b."""
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
args = [netid, '--tags', 'a', 'b']
position_names = ['network_id']
position_values = []
position_values.extend([netid])
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
tags=['a', 'b'])
def test_create_port_secgroup(self):
"""Create port: --security-group sg1_id netid."""
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
args = ['--security-group', 'sg1_id', netid]
position_names = ['network_id', 'security_groups']
position_values = [netid, ['sg1_id']]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_port_secgroups(self):
"""Create port: <security_groups> netid
The <security_groups> are
--security-group sg1_id --security-group sg2_id
"""
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
args = ['--security-group', 'sg1_id',
'--security-group', 'sg2_id',
netid]
position_names = ['network_id', 'security_groups']
position_values = [netid, ['sg1_id', 'sg2_id']]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_port_secgroup_off(self):
resource = 'port'
cmd = port.CreatePort(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
netid = 'netid'
args = ['--no-security-group', netid]
position_names = ['network_id', 'security_groups']
position_values = [netid, []]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_port_secgroups_list(self):
"""Create port: netid <security_groups>
The <security_grou |
bloodstalker/mutator | bruiser/wasm/dwasm.py | Python | gpl-3.0 | 855 | 0.005848 | #!/usr/bin/python3
import argparse
import code
import readline
import signal
import sys
from parse import Argparser, premain, SigHandler_SIGINT,PythonInterpreter
| from utils import ParseFlags
def getWASMModule():
module_path = sys.argv[1]
interpreter = PythonInterpreter()
module = interpreter.parse(module_path)
def main():
signal.signal(signal.SIGINT, SigHandler_SIGINT)
argparser = Argparser()
if argparser.args.dbg:
try:
premain(argparser)
except Exception as e:
print(e.__doc__)
if e.message: print(e.message)
variables = globals().copy()
variables.update(locals())
shell = code.InteractiveConsole(variables)
shell.interact(banner="DEVIWASM REPL")
else:
premain(argparser)
if __name__ == "__main__":
main()
| |
aseli1/dm_excel_form_builder | setup.py | Python | mit | 487 | 0 | from setupt | ools import setup
setup(name='excel_form_builder',
version='0.1.1',
description='Convert Word/PDF documents to Excel Workbooks',
url='https://github.com/aseli1/dm_excel_form_builder',
author='Anthony Seliga',
author_email='anthony.seliga@gmail.com',
license='MIT',
packages=['excel_form_builder'],
install_requires=[
'openpyxl' | ,
'colorama',
],
scripts=['bin/create_form'],
zip_safe=False)
|
luoyetx/mxnet | example/image-classification/common/fit.py | Python | apache-2.0 | 12,877 | 0.001165 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" example train fit utility """
import logging
import os
import time
import re
import math
import mxnet as mx
def _get_lr_scheduler(args, kv):
if 'lr_factor' not in args or args.lr_factor >= 1:
return (args.lr, None)
epoch_size = args.num_examples / args.batch_size
if 'dist' in args.kv_store:
epoch_size /= kv.num_workers
begin_epoch = args.load_epoch if args.load_epoch else 0
if 'pow' in args.lr_step_epochs:
lr = args.lr
max_up = args.num_epochs * epoch_size
pwr = float(re.sub('pow[- ]*', '', args.lr_step_epochs))
poly_sched = mx.lr_scheduler.PolyScheduler(max_up, lr, pwr)
return (lr, poly_sched)
step_epochs = [int(l) for l in args.lr_step_epochs.split(',')]
lr = args.lr
for s in step_epochs:
if begin_epoch >= s:
lr *= args.lr_factor
if lr != args.lr:
logging.info('Adjust learning rate to %e for epoch %d',
lr, begin_epoch)
steps = [epoch_size * (x - begin_epoch)
for x in step_epochs if x - begin_epoch > 0]
retu | rn (lr, mx.lr_scheduler.MultiFactorScheduler(step=steps, factor=args.lr_factor))
def _load_model(args, rank=0):
if 'load_epoch' not in args or args.load_epoch is None:
return (None, None, None)
a | ssert args.model_prefix is not None
model_prefix = args.model_prefix
if rank > 0 and os.path.exists("%s-%d-symbol.json" % (model_prefix, rank)):
model_prefix += "-%d" % (rank)
sym, arg_params, aux_params = mx.model.load_checkpoint(
model_prefix, args.load_epoch)
logging.info('Loaded model %s_%04d.params', model_prefix, args.load_epoch)
return (sym, arg_params, aux_params)
def _save_model(args, rank=0):
if args.model_prefix is None:
return None
dst_dir = os.path.dirname(args.model_prefix)
if not os.path.isdir(dst_dir):
os.mkdir(dst_dir)
return mx.callback.do_checkpoint(args.model_prefix if rank == 0 else "%s-%d" % (
args.model_prefix, rank))
def add_fit_args(parser):
"""
parser : argparse.ArgumentParser
return a parser added with args required by fit
"""
train = parser.add_argument_group('Training', 'model training')
train.add_argument('--network', type=str,
help='the neural network to use')
train.add_argument('--num-layers', type=int,
help='number of layers in the neural network, \
required by some networks such as resnet')
train.add_argument('--gpus', type=str,
help='list of gpus to run, e.g. 0 or 0,2,5. empty means using cpu')
train.add_argument('--kv-store', type=str, default='device',
help='key-value store type')
train.add_argument('--num-epochs', type=int, default=100,
help='max num of epochs')
train.add_argument('--lr', type=float, default=0.1,
help='initial learning rate')
train.add_argument('--lr-factor', type=float, default=0.1,
help='the ratio to reduce lr on each step')
train.add_argument('--lr-step-epochs', type=str,
help='the epochs to reduce the lr, e.g. 30,60')
train.add_argument('--initializer', type=str, default='default',
help='the initializer type')
train.add_argument('--optimizer', type=str, default='sgd',
help='the optimizer type')
train.add_argument('--mom', type=float, default=0.9,
help='momentum for sgd')
train.add_argument('--wd', type=float, default=0.0001,
help='weight decay for sgd')
train.add_argument('--batch-size', type=int, default=128,
help='the batch size')
train.add_argument('--disp-batches', type=int, default=20,
help='show progress for every n batches')
train.add_argument('--model-prefix', type=str,
help='model prefix')
parser.add_argument('--monitor', dest='monitor', type=int, default=0,
help='log network parameters every N iters if larger than 0')
train.add_argument('--load-epoch', type=int,
help='load the model on an epoch using the model-load-prefix')
train.add_argument('--top-k', type=int, default=0,
help='report the top-k accuracy. 0 means no report.')
train.add_argument('--loss', type=str, default='',
help='show the cross-entropy or nll loss. ce strands for cross-entropy, nll-loss stands for likelihood loss')
train.add_argument('--test-io', type=int, default=0,
help='1 means test reading speed without training')
train.add_argument('--dtype', type=str, default='float32',
help='precision: float32 or float16')
train.add_argument('--gc-type', type=str, default='none',
help='type of gradient compression to use, \
takes `2bit` or `none` for now')
train.add_argument('--gc-threshold', type=float, default=0.5,
help='threshold for 2bit gradient compression')
# additional parameters for large batch sgd
train.add_argument('--macrobatch-size', type=int, default=0,
help='distributed effective batch size')
train.add_argument('--warmup-epochs', type=int, default=5,
help='the epochs to ramp-up lr to scaled large-batch value')
train.add_argument('--warmup-strategy', type=str, default='linear',
help='the ramping-up strategy for large batch sgd')
return train
def fit(args, network, data_loader, **kwargs):
"""
train a model
args : argparse returns
network : the symbol definition of the nerual network
data_loader : function that returns the train and val data iterators
"""
# kvstore
kv = mx.kvstore.create(args.kv_store)
if args.gc_type != 'none':
kv.set_gradient_compression({'type': args.gc_type,
'threshold': args.gc_threshold})
# logging
head = '%(asctime)-15s Node[' + str(kv.rank) + '] %(message)s'
logging.basicConfig(level=logging.DEBUG, format=head)
logging.info('start with arguments %s', args)
# data iterators
(train, val) = data_loader(args, kv)
if args.test_io:
tic = time.time()
for i, batch in enumerate(train):
for j in batch.data:
j.wait_to_read()
if (i + 1) % args.disp_batches == 0:
logging.info('Batch [%d]\tSpeed: %.2f samples/sec', i,
args.disp_batches * args.batch_size / (time.time() - tic))
tic = time.time()
return
# load model
if 'arg_params' in kwargs and 'aux_params' in kwargs:
arg_params = kwargs['arg_params']
aux_params = kwargs['aux_params']
else:
sym, arg_params, aux_params = _load_model(args, kv.rank)
if sym is not None:
assert sym.tojson() == network.tojson()
# save model
checkpoint = _save_model(args, kv.rank)
# devices for training
devs = mx.cpu() if args.gpus is None or args.gpus == "" else [
mx.gpu(int(i)) for i in args.gpus.split(',')]
# learning rate
|
peterbartha/ImmunoMod | res_mods/mods/packages/xvm_hangar/python/svcmsg.py | Python | mit | 675 | 0.004458 | import traceback |
import BigWorld
from gui.Scaleform.daapi.view.lobby.messengerBar.NotificationListButton import NotificationListButton
from xfw import *
import xvm_main.python.config as config
from xvm_main.python.logger import *
###
@overrideMethod(NotificationListButton, 'as_setStateS')
def _NotificationListButton_as_setStateS(base, self, isBlinking, counterValue):
notificationsButtonType = config.get('hangar | /notificationsButtonType', 'full').lower()
if notificationsButtonType == 'none':
isBlinking = False
counterValue = ''
elif notificationsButtonType == 'blink':
counterValue = ''
base(self, isBlinking, counterValue)
|
icandigitbaby/openchange | python/openchange/provision.py | Python | gpl-3.0 | 31,424 | 0.002228 | # OpenChange provisioning
# Copyright (C) Jelmer Vernooij <jelmer@openchange.org> 2008-2009
# Copyright (C) Julien Kerihuel <j.kerihuel@openchange.org> 2009
#
# This program is free s | oftware; you can redistribute it and/or | modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from base64 import b64encode
import os
import re
from openchange import mailbox
from samba import Ldb, dsdb
from samba.samdb import SamDB
import ldb
from ldb import LdbError, SCOPE_SUBTREE, SCOPE_BASE
from samba import read_and_sub_file
from samba.auth import system_session
from samba.provision import (setup_add_ldif, setup_modify_ldif)
from samba.net import Net
from samba.dcerpc import nbt
from openchange.urlutils import openchangedb_url
__docformat__ = 'restructuredText'
DEFAULTSITE = "Default-First-Site-Name"
class NotProvisionedError(Exception):
"""Raised when an action expects the server to be provisioned and it's not."""
class ServerInUseError(Exception):
"""Raised when a server is still in use when requested to be removed."""
# This is a hack. Kind-of cute, but still a hack
def abstract():
import inspect
caller = inspect.getouterframes(inspect.currentframe())[1][3]
raise NotImplementedError(caller + ' must be implemented in subclass')
class AbstractProgressReporter(object):
"""Define an abstraction for progress reporting from the provisioning"""
def __init__(self):
self.currentStep = 0
def reportNextStep(self, stepName):
self.currentStep = self.currentStep + 1
self.doReporting(stepName)
def doReporting(self, stepName):
abstract()
class TextProgressReporter(AbstractProgressReporter):
"""A concrete example of a progress reporter - just provides text output
for each new step."""
def doReporting(self, stepName):
print "[+] Step %d: %s" % (self.currentStep, stepName)
class ProvisionNames(object):
def __init__(self):
self.rootdn = None
self.domaindn = None
self.configdn = None
self.schemadn = None
self.dnsdomain = None
self.netbiosname = None
self.hostname = None
self.serverrole = None
self.firstorg = None
self.firstou = None
self.firstorgdn = None
# OpenChange dispatcher database specific
self.ocfirstorgdn = None
self.ocserverdn = None
self._domain = None
@property
def domain(self):
if self._domain:
return self._domain
elif self.ocserverdn:
serverdn_parts = self.ocserverdn.split(',')
return serverdn_parts[-2] + "." + serverdn_parts[-1]
@domain.setter
def domain(self, value):
self._domain = value
def guess_names_from_smbconf(lp, creds=None, firstorg=None, firstou=None):
"""Guess configuration settings to use from smb.conf.
:param lp: Loadparm context.
:param firstorg: OpenChange Organization Name
:param firstou: OpenChange Administrative Group
"""
netbiosname = lp.get("netbios name")
hostname = netbiosname.lower()
dnsdomain = lp.get("realm")
dnsdomain = dnsdomain.lower()
serverrole = lp.get("server role")
# Note: "server role" can have many forms, even for the same function:
# "member server", "domain controller", "active directory domain
# controller"...
if "domain controller" in serverrole or serverrole == "member server":
domain = lp.get("workgroup")
domaindn = "DC=" + dnsdomain.replace(".", ",DC=")
else:
domain = netbiosname
domaindn = "CN=" + netbiosname
rootdn = domaindn
configdn = "CN=Configuration," + rootdn
schemadn = "CN=Schema," + configdn
sitename = DEFAULTSITE
names = ProvisionNames()
names.serverrole = serverrole
names.rootdn = rootdn
names.domaindn = domaindn
names.configdn = configdn
names.schemadn = schemadn
names.dnsdomain = dnsdomain
names.domain = domain
names.netbiosname = netbiosname
names.hostname = hostname
names.sitename = sitename
db = Ldb(url=get_ldb_url(lp, creds, names), session_info=system_session(),
credentials=creds, lp=lp)
exchangedn = 'CN=Microsoft Exchange,CN=Services,%s' % configdn
if not firstorg:
firstorg = db.searchone(
'name', exchangedn, '(objectclass=msExchOrganizationContainer)',
ldb.SCOPE_SUBTREE)
assert(firstorg)
firstorgdn = "CN=%s,%s" % (firstorg, exchangedn)
if not firstou:
firstou = db.searchone(
'name', firstorgdn,
'(&(objectclass=msExchAdminGroup)(msExchDefaultAdminGroup=TRUE))',
ldb.SCOPE_SUBTREE)
assert(firstou)
names.firstorg = firstorg
names.firstou = firstou
names.firstorgdn = firstorgdn
names.serverdn = "CN=%s,CN=Servers,CN=%s,CN=Sites,%s" % (netbiosname, sitename, configdn)
# OpenChange dispatcher DB names
names.ocserverdn = "CN=%s,%s" % (names.netbiosname, names.domaindn)
names.ocfirstorg = firstorg
names.ocfirstorgdn = "CN=%s,CN=%s,%s" % (firstou, names.ocfirstorg, names.ocserverdn)
return names
def provision_schema(setup_path, names, lp, creds, reporter, ldif, msg, modify_mode=False):
"""Provision/modify schema using LDIF specified file
:param setup_path: Path to the setup directory.
:param names: provision names object.
:param lp: Loadparm context
:param creds: Credentials Context
:param reporter: A progress reporter instance (subclass of AbstractProgressReporter)
:param ldif: path to the LDIF file
:param msg: reporter message
:param modify_mode: whether entries are added or modified
"""
session_info = system_session()
db = SamDB(url=get_ldb_url(lp, creds, names), session_info=session_info,
credentials=creds, lp=lp)
db.transaction_start()
try:
reporter.reportNextStep(msg)
if modify_mode:
ldif_function = setup_modify_ldif
else:
ldif_function = setup_add_ldif
ldif_function(db, setup_path(ldif), {"FIRSTORG": names.firstorg,
"FIRSTORGDN": names.firstorgdn,
"FIRSTOU": names.firstou,
"CONFIGDN": names.configdn,
"SCHEMADN": names.schemadn,
"DOMAINDN": names.domaindn,
"DOMAIN": names.domain,
"DNSDOMAIN": names.dnsdomain,
"NETBIOSNAME": names.netbiosname,
"HOSTNAME": names.hostname})
except:
db.transaction_cancel()
raise
db.transaction_commit()
def modify_schema(setup_path, names, lp, creds, reporter, ldif, msg):
"""Modify schema using LDIF specified file
:param setup_path: Path to the setup directory.
:param names: provision names object.
:param lp: Loadparm context
:param creds: Credentials Context
:param reporter: A progress reporter instance (subclass of AbstractProgressReporter)
:param ldif: path to the LDIF file
:param msg: reporter message
"""
return provision_schema(setup_path, names, lp, creds, reporter, ldif, msg, True)
def deprovision_schema(setup_path, names, lp, creds, reporter, ldif, msg, modify_mode=False):
"""Deprovision/unmodify schema using LDIF specified file, by reverting the
modifications contained therein.
:param setup_path: Path to the setup direc |
Akrog/cinder | cinder/zonemanager/drivers/brocade/brcd_fabric_opts.py | Python | apache-2.0 | 2,207 | 0 | # (c) Copyright 2014 Brocade Communications Systems Inc.
# All Rights Reserved.
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_config import cfg
from cinder.openstack.common import log as logging
from cinder.volume import configuration
brcd_zone_opts = [
cfg.StrOpt('fc_fabric_address',
default='',
help='Management IP of fabric'),
cfg.StrOpt('fc_fabric_user',
default='',
help='Fabric user ID'),
cfg.StrOpt('fc_fabric_password',
default='',
help='Password for user',
secret=True),
cfg.IntOpt('fc_fabric_port',
default=22,
help='Connecting port'),
cfg.StrOpt('zoning_policy',
default='initiator-target',
help='overridden zoning policy'),
cfg.BoolOpt('zone_activate',
default=True,
help='overridden zoning activation state'),
cfg.StrOpt('zone_name_prefix',
default=None,
help='overridden zone name prefix'),
cfg.StrOpt('principal_switch_wwn',
default=None,
help='Principal switch WWN of the fabric'),
]
CONF = cfg.CONF
CONF.register_opts(brcd_zone_opts, | 'BRCD_FABRIC_EXAMPLE')
LOG = logging.getLogger(__name__)
def load_fabric_configurations(fabric_names):
fabric_configs = {}
for fabric_name in fabric_names:
config = configuration.Configuration(brcd_zone_opts, fabric_name) |
LOG.debug("Loaded FC fabric config %s" % fabric_name)
fabric_configs[fabric_name] = config
return fabric_configs
|
PalouseRobosub/robosub | src/rqt/rqt_control/src/rqt_control/control.py | Python | gpl-3.0 | 5,890 | 0.00034 | import os
import rospy
import rospkg
from python_qt_binding import QT_BINDING
from rqt_gui_py.plugin import Plugin
from python_qt_binding import loadUi
from python_qt_binding.QtCore import QTimer
# Attempt to load QWidget from pyqt4
try:
from python_qt_binding.QtGui import QWidget
# if not load from pyqt5
except ImportError:
from python_qt_binding.QtWidgets import QWidget
from robosub_msgs.msg import control, control_status
state_types = {
0: "NONE",
1: "ABSOLUTE",
2: "RELATIVE",
3: "ERROR"
}
class Control(Plugin):
def __init__(self, context):
super(Control, self).__init__(context)
# Give QObjects reasonable names
self.setObjectName('Control')
# Create QWidget
self._widget = QWidget()
# Get path to UI file which should be in the "resource" folder of
# this package
ui_file = os.path.join(rospkg.RosPack().get_path('robosub'),
'src/rqt/ | rqt_control/resource', 'Control.ui')
# Extend the widget with all attributes and children from UI file
loadUi(ui_file, self._widget)
self.control_timer = QTimer(self)
self.control_timer.timeout.connect(self.control_missed)
| self.control_timer.start(1000)
self.control_status_timer = QTimer(self)
self.control_status_timer.timeout.connect(self.control_status_missed)
self.control_status_timer.start(1000)
# Give QObjects reasonable names
self._widget.setObjectName('Control')
if context.serial_number() > 1:
self._widget.setWindowTitle(self._widget.windowTitle() +
(' (%d)' % context.serial_number()))
# Add widget to the user interface
context.add_widget(self._widget)
self._widget.statusActive.hide()
self._widget.controlActive.hide()
self.con_sub = rospy.Subscriber('control', control,
self.control_callback, queue_size=1)
self.cs_sub = rospy.Subscriber('control_status', control_status,
self.control_status_callback,
queue_size=1)
img_file = os.path.join(rospkg.RosPack().get_path('robosub'),
'src/rqt/resource/robosub_logo.png')
self._widget.setStyleSheet(".QWidget {background-image: url(" +
img_file +
"); background-repeat: no-repeat;" +
"background-position:bottom right}")
def control_missed(self):
if not self._widget.controlStale.isVisible():
self._widget.controlStale.show()
self._widget.controlActive.hide()
def control_status_missed(self):
if not self._widget.statusStale.isVisible():
self._widget.statusStale.show()
self._widget.statusActive.hide()
def control_status_callback(self, m):
try:
self.control_status_timer.stop()
except RuntimeError:
pass
if self._widget.statusStale.isVisible():
self._widget.statusStale.setVisible(False)
self._widget.statusActive.setVisible(True)
# Set the states
self._widget.forwardStatusState.setText(m.forward_state)
self._widget.strafeStatusState.setText(m.strafe_left_state)
self._widget.diveStatusState.setText(m.dive_state)
self._widget.rollStatusState.setText(m.roll_right_state)
self._widget.pitchStatusState.setText(m.pitch_down_state)
self._widget.yawStatusState.setText(m.yaw_left_state)
self._widget.forwardGoal.setText("{:.4f}".format(m.forward_goal))
self._widget.strafeGoal.setText("{:.4f}".format(m.strafe_left_goal))
self._widget.diveGoal.setText("{:.4f}".format(m.dive_goal))
self._widget.rollGoal.setText("{:.4f}".format(m.roll_right_goal))
self._widget.pitchGoal.setText("{:.4f}".format(m.pitch_down_goal))
self._widget.yawGoal.setText("{:.4f}".format(m.yaw_left_goal))
self.control_status_timer.start(1000)
def control_callback(self, m):
try:
self.control_timer.stop()
except RuntimeError:
pass
if self._widget.controlStale.isVisible():
self._widget.controlStale.hide()
self._widget.controlActive.show()
# Set the states
self._widget.forwardState.setText(state_types[m.forward_state])
self._widget.strafeState.setText(state_types[m.strafe_state])
self._widget.diveState.setText(state_types[m.dive_state])
self._widget.rollState.setText(state_types[m.roll_state])
self._widget.pitchState.setText(state_types[m.pitch_state])
self._widget.yawState.setText(state_types[m.yaw_state])
self._widget.forwardValue.setText("{:.4f}".format(m.forward))
self._widget.strafeValue.setText("{:.4f}".format(m.strafe_left))
self._widget.diveValue.setText("{:.4f}".format(m.dive))
self._widget.rollValue.setText("{:.4f}".format(m.roll_right))
self._widget.pitchValue.setText("{:.4f}".format(m.pitch_down))
self._widget.yawValue.setText("{:.4f}".format(m.yaw_left))
self.control_timer.start(1000)
def shutdown_plugin(self):
self.cs_sub.unregister()
self.con_sub.unregister()
self.control_timer.stop()
self.control_status_timer.stop()
def save_settings(self, plugin_settings, instance_settings):
# TODO save intrinsic configuration, usually using:
# instance_settings.set_value(k, v)
pass
def restore_settings(self, plugin_settings, instance_settings):
# TODO restore intrinsic configuration, usually using:
# v = instance_settings.value(k)
pass
|
crateio/crate.pypi | crate/pypi/processor.py | Python | bsd-2-clause | 23,954 | 0.003381 | import base64
import hashlib
import logging
import re
import urllib
import urlparse
import xmlrpclib
import redis
import requests
import lxml.html
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.files.base import ContentFile
from django.db import transaction
from django.utils.timezone import utc
from crate.web.history.models import Event
from crate.web.packages.models import Package, Release, TroveClassifier
from crate.web.packages.models import ReleaseRequire, ReleaseProvide, ReleaseObsolete, ReleaseURI, ReleaseFile
from crate.pypi.exceptions import PackageHashMismatch
from crate.pypi.models import PyPIMirrorPage, PyPIServerSigPage
from crate.pypi.utils.serversigs import load_key, verify
logger = logging.getLogger(__name__)
INDEX_URL = "http://pypi.python.org/pypi"
SIMPLE_URL = "http://pypi.python.org/simple/"
SERVERSIG_URL = "http://pypi.python.org/serversig/"
SERVERKEY_URL = "http://pypi.python.org/serverkey"
SERVERKEY_KEY = "crate:pypi:serverkey"
_disutils2_version_capture = re.compile("^(.*?)(?:\(([^()]+)\))?$")
_md5_re = re.compile(r"(https?://pypi\.python\.org/packages/.+)#md5=([a-f0-9]+)")
def get_helper(data, key, default=None):
if data.get(key) and data[key] != "UNKNOWN":
return data[key]
return "" if default is None else default
def split_meta(meta):
meta_split = meta.split(";", 1)
meta_name, meta_version = _disutils2_version_capture.search(meta_split[0].strip()).groups()
meta_env = meta_split[1].strip() if len(meta_split) == 2 else ""
return {
"name": meta_name,
"version": meta_version if meta_version is not None else "",
"environment": meta_env,
}
class PyPIPackage(object):
def __init__(self, name, version=None):
self.name = name
self.version = version
self.stored = False
self.pypi = xmlrpclib.ServerProxy(INDEX_URL, use_datetime=True)
self.datastore = redis.StrictRedis(**dict([(x.lower(), y) for x, y in settings.REDIS[settings.PYPI_DATASTORE].items()]))
def process(self, bulk=False, download=True, skip_modified=True):
self.bulk = bulk
self.skip_modified = skip_modified
self.fetch()
self.build()
with transaction.commit_on_success():
self.store()
if download:
self.download()
def delete(self):
wit | h transaction.commit_on_success():
self.verify_and_sync_pages()
if self.version is None:
# Delete the entire package
packages = Package.objects.filter(name=self.name | ).select_for_update()
releases = Release.objects.filter(package__in=packages).select_for_update()
for package in packages:
package.delete()
else:
# Delete only this release
try:
package = Package.objects.get(name=self.name)
except Package.DoesNotExist:
return
releases = Release.objects.filter(package=package, version=self.version).select_for_update()
for release in releases:
release.hidden = True
release.save()
def remove_files(self, *files):
self.verify_and_sync_pages()
packages = Package.objects.filter(name=self.name)
releases = Release.objects.filter(package__in=packages)
for rf in ReleaseFile.objects.filter(release__in=releases, filename__in=files):
rf.hidden = True
rf.save()
def fetch(self):
logger.debug("[FETCH] %s%s" % (self.name, " %s" % self.version if self.version else ""))
# Fetch meta data for this release
self.releases = self.get_releases()
self.release_data = self.get_release_data()
self.release_url_data = self.get_release_urls()
def build(self):
logger.debug("[BUILD] %s%s" % (self.name, " %s" % self.version if self.version else ""))
# Check to Make sure fetch has been ran
if not hasattr(self, "releases") or not hasattr(self, "release_data") or not hasattr(self, "release_url_data"):
raise Exception("fetch must be called prior to running build") # @@@ Make a Custom Exception
# Construct our representation of the releases
self.data = {}
for release in self.releases:
data = {}
data["package"] = self.name
data["version"] = release
data["author"] = get_helper(self.release_data[release], "author")
data["author_email"] = get_helper(self.release_data[release], "author_email")
data["maintainer"] = get_helper(self.release_data[release], "maintainer")
data["maintainer_email"] = get_helper(self.release_data[release], "maintainer_email")
data["summary"] = get_helper(self.release_data[release], "summary")
data["description"] = get_helper(self.release_data[release], "description")
data["license"] = get_helper(self.release_data[release], "license")
data["keywords"] = get_helper(self.release_data[release], "keywords") # @@@ Switch This to a List
data["platform"] = get_helper(self.release_data[release], "platform")
data["download_uri"] = get_helper(self.release_data[release], "download_url") # @@@ Should This Go Under URI?
data["requires_python"] = get_helper(self.release_data[release], "required_python")
data["stable_version"] = get_helper(self.release_data[release], "stable_version") # @@@ What Is This?
data["classifiers"] = get_helper(self.release_data[release], "classifiers", [])
# Construct the URIs
data["uris"] = {}
if get_helper(self.release_data[release], "home_page"):
data["uris"]["Home Page"] = get_helper(self.release_data[release], "home_page")
if get_helper(self.release_data[release], "bugtrack_url"):
data["uris"]["Bug Tracker"] = get_helper(self.release_data[release], "bugtrack_url")
for label, url in [x.split(",", 1) for x in get_helper(self.release_data[release], "project_url", [])]:
data["uris"][label] = url
# Construct Requires
data["requires"] = []
for kind in ["requires", "requires_dist", "requires_external"]:
for require in get_helper(self.release_data[release], kind, []):
req = {"kind": kind if kind is not "requires_external" else "external"}
req.update(split_meta(require))
data["requires"].append(req)
# Construct Provides
data["provides"] = []
for kind in ["provides", "provides_dist"]:
for provides in get_helper(self.release_data[release], kind, []):
req = {"kind": kind}
req.update(split_meta(provides))
data["provides"].append(req)
# Construct Obsoletes
data["obsoletes"] = []
for kind in ["obsoletes", "obsoletes_dist"]:
for provides in get_helper(self.release_data[release], kind, []):
req = {"kind": kind}
req.update(split_meta(provides))
data["obsoletes"].append(req)
# Construct Files
data["files"] = []
for url_data in self.release_url_data[release]:
data["files"].append({
"comment": get_helper(url_data, "comment_text"),
"downloads": get_helper(url_data, "downloads", 0),
"file": get_helper(url_data, "url"),
"filename": get_helper(url_data, "filename"),
"python_version": get_helper(url_data, "python_version"),
"type": get_helper(url_data, "packagetype"),
"digests": {
"md5": url_data["md5_digest"].lower(),
}
})
if u |
CARocha/ciat_plataforma | analysis/configuration/migrations/0003_auto__del_sector__add_sector_en.py | Python | mit | 6,487 | 0.006937 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'Sector'
db.delete_table(u'configuration_sector')
# Adding model 'Sector_en'
db.create_table(u'configuration_sector_en', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'configuration', ['Sector_en'])
def backwards(self, orm):
# Adding model 'Sector'
db.create_table(u'configuration_sector', (
('nombre', self.gf('django.db.models.fields.CharField')(max_length=200)),
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal(u'configuration', ['Sector'])
# Deleting model 'Sector_en'
db.delete_table(u'configuration_sector_en')
models = {
u'configuration.areaaccion': {
'Meta': {'object_name': 'AreaAccion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'configuration.categoria': {
'Meta': {'object_name': 'Categoria'},
'categoria': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.categoria_conocimiento': {
'Meta': {'object_name': 'Categoria_Conocimiento'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.categoria_fuente': {
'Meta': {'object_name': 'Categoria_Fuente'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.categoria_innovacion': {
'Meta': {'object_name': 'Categoria_Innovacion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.grupo': {
'Meta': {'object_name': 'Grupo'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.grupo_beneficiario': {
'Meta': {'object_name': 'Grupo_Beneficiario'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.papel': {
'Meta': {'object_name': 'Papel'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.plataforma | ': {
'Meta': {'object_name': 'Plataforma'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'sitio_accion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuration.SitioAccion']"})
},
u'configuration.sector_en': {
'Meta': {'object_name': 'Sector_en'},
u' | id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'configuration.seleccion_7a': {
'Meta': {'object_name': 'Seleccion_7a'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.seleccion_7b': {
'Meta': {'object_name': 'Seleccion_7b'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.sitioaccion': {
'Meta': {'object_name': 'SitioAccion'},
'area_accion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuration.AreaAccion']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'configuration.status_legal': {
'Meta': {'object_name': 'Status_Legal'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'configuration.tema': {
'Meta': {'object_name': 'Tema'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tema': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'configuration.tema_relacion': {
'Meta': {'object_name': 'Tema_Relacion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.tipo_estudio': {
'Meta': {'object_name': 'Tipo_Estudio'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'configuration.ubicacion': {
'Meta': {'object_name': 'Ubicacion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ubicacion': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['configuration'] |
hill1303/CCGParaphraseGenerator | novel_disambiguation/utilities/rewrite_utilities.py | Python | lgpl-2.1 | 17,853 | 0.000224 | __author__ = 'Ethan A. Hill'
import copy
import string
import re
import itertools
import logging
from xml.etree import cElementTree as ElementTree
from ..constants import ccg_values
from ..models.rewrite import Rewrite
from ..utilities import reversal_utilities
__logger = logging.getLogger(__name__)
def find_parse_tree_node(node_index, parse):
parse_nodes = parse.xml_lf.findall('.//node[@id]')
for node in parse_nodes:
parse_index = node.attrib.get('id')
# prefixed with a single character
parse_index_match = re.match('.([0-9]+).*', parse_index)
if int(parse_index_match.group(1)) == node_index:
return node
# This should never happen
return None
def parent_verb_node(node, parse):
child_parent_tree_map = {c: p for p in parse.xml_lf.iter() for c in p}
while node is not None and node.tag != 'lf':
node = child_parent_tree_map.get(node)
if 'id' in node.attrib:
node_index = node.attrib.get('id')
# prefixed with a single character
node_index_match = re.match('.([0-9]+).*', node_index)
node_index = int(node_index_match.group(1))
tag = parse.pos_tag_of_word_at_index(node_index)
if 'VB' in tag:
return node
# This should probably throw an exception too but maybe not
__logger.warning('Parent verb was not found for parse %s', parse)
return None
def has_node_as_child(node, other_node):
other_node_index = other_node.attrib.get('id')
if node.find('.//node[@id="%s"]' % other_node_index) is not None:
return True
else:
return False |
def ambiguous_parent_verb_index(node_index, parse_pair):
parse, other_parse = parse_pair
# Find the parent verb of the node in question from each parse
parse_node = find_parse_tree_node(node_index, parse)
parent_verb = parent_verb_node(parse_node, parse)
other_parse_node = find_parse_tree_node(node_index, other_parse)
other_parent_verb = parent_verb_node(other_parse_node, other_parse)
# Do the parent verb nodes exist?
if parent_verb is not None and other_pa | rent_verb is not None:
# Now see which verb is higher up in the tree
if has_node_as_child(parent_verb, other_parent_verb):
top_level_verb = parent_verb
else:
top_level_verb = other_parent_verb
return top_level_verb.attrib.get('id')
return None
def append_rewrite_to_xml_file(rewrite_xml, xml_file_path):
xml_file = ElementTree.parse(xml_file_path)
root = xml_file.getroot()
root.append(rewrite_xml)
xml_file.write(xml_file_path)
def rewrite_validation_map(parse_lf, rewrite_lf):
verification_map = {}
parent_child_map = {c: p for p in rewrite_lf.iter() for c in p}
for node in parse_lf.findall('.//node[@id]'):
node_id = node.attrib.get('id')
# The node id's should be the same before realization
rewrite_node = rewrite_lf.find('.//node[@id="%s"]' % node_id)
# Use reversal utilities xpath builder...
rewrite_path = reversal_utilities.build_xpath_to_node(
rewrite_node, parent_child_map)
verification_map[node_id] = rewrite_path
return verification_map
def attempt_cleft_rewrite(verb_index, parse, xml_path):
xml_lf_copy = copy.deepcopy(parse.xml_lf)
# Use the copy to make the changes
verb_node = xml_lf_copy.find('.//node[@id="%s"]' % verb_index)
subject_node = verb_node.find('rel[@name="Arg0"]/node[@id]')
object_node = verb_node.find('rel[@name="Arg1"]/node[@id]')
# If these nodes don't exist, then we can't apply this rewrite
if subject_node is not None and object_node is not None:
xml_lf_copy.attrib['info'] += '#cleft'
verb_parent = xml_lf_copy.find('.//node[@id="%s"]/..' % verb_index)
# Create a be verb above the verb parent
be_node = ElementTree.SubElement(
verb_parent, 'node', {'id': 'w0b', 'pred': 'be'})
# Gather attributes from verb and transplant into be node
for attribute, val in verb_node.items():
if attribute not in ['id', 'pred']:
be_node.attrib[attribute] = val
if 'mood' in verb_node.attrib:
verb_node.attrib.pop('mood')
# Find the object under the verb
object_parent = verb_node.find("./rel[@name='Arg1']/node/..")
# Add object to 'be' verb, and remove object from verb
object_parent_be = ElementTree.SubElement(
be_node, 'rel', {'name': 'Arg0'})
object_parent_be.append(object_node)
verb_node.remove(object_parent)
# Create long arg chain for verb node to fall under
be_verb_child = ElementTree.SubElement(
be_node, 'rel', {'name': 'Arg1'})
x1_node = ElementTree.SubElement(
be_verb_child, 'node', {'id': 'x1'})
gen_rel = ElementTree.SubElement(
x1_node, 'rel', {'name': 'GenRel'})
gen_rel.append(verb_node)
# Append an x2 node to the verb
verb_node_child = ElementTree.SubElement(
verb_node, 'rel', {'name': 'Arg1'})
ElementTree.SubElement(verb_node_child, 'node', {'idref': 'x2'})
# Remove verb from old parent
verb_parent.remove(verb_node)
# Append the newly created tree to the list of parse lfs
append_rewrite_to_xml_file(xml_lf_copy, xml_path)
# Add the rewrite to the parse
rewrite_id = xml_lf_copy.attrib.get('info')
validation_map = rewrite_validation_map(parse.xml_lf, xml_lf_copy)
parse.rewrites.append(Rewrite(rewrite_id, validation_map))
def attempt_passive_rewrite(verb_index, parse, xml_path):
xml_lf_copy = copy.deepcopy(parse.xml_lf)
# Use the copy to make the changes
verb_node = xml_lf_copy.find('.//node[@id="%s"]' % verb_index)
subject_node = verb_node.find('rel[@name="Arg0"]/node[@id]')
object_node = verb_node.find('rel[@name="Arg1"]/node[@id]')
# If these nodes don't exist, then we can't apply this rewrite
if subject_node is not None and object_node is not None:
# Create a copy of this node and set up its attributes
xml_lf_copy.attrib['info'] += '#passive'
verb_parent = xml_lf_copy.find('.//node[@id="%s"]/..' % verb_index)
passive_node = ElementTree.SubElement(
verb_parent, 'node', {'id': 'w0pass', 'pred': 'PASS'})
# Gather attributes from verb and transplant into passive node
for attribute, value in verb_node.items():
if attribute not in ['id', 'pred']:
verb_node.attrib.pop(attribute)
passive_node.attrib[attribute] = value
verb_node.attrib['partic'] = 'pass'
# Move the object node from the verb node to the passive node
object_parent = verb_node.find('rel[@name="Arg1"]/node/..')
object_parent.set('name', 'Arg0')
passive_node.append(object_parent)
verb_node.remove(object_parent)
# Add a reference to the object to the parent
object_reference_parent = ElementTree.SubElement(
verb_node, 'rel', {'name': 'Arg1'})
object_reference_id = object_node.attrib.get(
'id', object_node.attrib.get('idref'))
ElementTree.SubElement(
object_reference_parent, 'node', {'idref': object_reference_id})
# Create a 'by' node
by_node_parent = ElementTree.SubElement(
verb_node, 'rel', {'name': 'Arg0'})
by_node = ElementTree.SubElement(
by_node_parent, 'node', {'id': 'w0by', 'pred': 'by'})
# Move the subject over to the passive node, under the by node
subject_parent = verb_node.find('rel[@name="Arg0"]/node/..')
subject_parent.set('name', 'Arg1')
# Change the subject pred if it is a pronoun
if subject_node.attrib.get('pred') in ccg_values.OBJECT_PRONOUNS:
pronoun_to_change = subject_node.attrib.get('pred')
new_pronoun = ccg_values.OBJECT_PRONOUNS.get(pronoun_to_change)
subject_node.set('pred', new_pronoun)
by_node.append(subject_parent)
|
harikishen/addons-server | src/olympia/amo/decorators.py | Python | bsd-3-clause | 7,584 | 0 | import datetime
import functools
import json
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.db import connection, transaction
import olympia.core.logger
from olympia import core
from olympia.accounts.utils import redirect_for_login
from olympia.users.utils import get_task_user
from . import models as context
from .utils import AMOJSONEncoder
task_log = olympia.core.logger.getLogger('z.task')
def login_required(f=None, redirect=True):
"""
Like Django's login_required, but with to= instead of next=.
If redirect=False then we return 401 instead of redirecting to the
login page. That's nice for ajax views.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(request, *args, **kw):
if request.user.is_authenticated():
return func(request, *args, **kw)
else:
if redirect:
return redirect_for_login(request)
else:
return http.HttpResponse(status=401)
return wrapper
if f:
return decorator(f)
else:
return decorator
def post_required(f):
@functools.wraps(f)
def wrapper(request, *args, **kw):
if request.method != 'POST':
return http.HttpResponseNotAllowed(['POST'])
else:
return f(request, *args, **kw)
return wrapper
def permission_required(permission):
def decorator(f):
@functools.wraps(f)
@login_required
def wrapper(request, *args, **kw):
from olympia.access import acl
if acl.action_allowed(request, permission):
return f(request, *args, **kw)
else:
raise PermissionDenied
return wrapper
return decorator
def any_permission_required(permissions):
"""
If any permission passes, call the function. Otherwise raise 403.
"""
def decorator(f):
@functools.wraps(f)
@login_required
def wrapper(request, *args, **kw):
from olympia.access import acl
for permission in permissions:
if acl.action_allowed(request, permission):
| return f(request, *args, **kw)
raise PermissionDenied
return wrapper
return decorator
def json_response(response, has_trans=False, status_code=200):
"""
Return a response as JSON. If you are just wrapping a view,
then use the json_view decorator.
"""
if has_trans:
response = json.dumps(response, cls=AMOJSONEncoder)
else:
response = json.dumps(response)
return http.HttpResponse(response,
content_type='application/json',
status=status_code)
def json_view(f=None, has_trans=False, status_code=200):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
response = func(*args, **kw)
if isinstance(response, http.HttpResponse):
return response
else:
return json_response(response, has_trans=has_trans,
status_code=status_code)
return wrapper
if f:
return decorator(f)
else:
return decorator
json_view.error = lambda s: http.HttpResponseBadRequest(
json.dumps(s), content_type='application/json')
class skip_cache(object):
def __init__(self, f):
functools.update_wrapper(self, f)
# Do this after `update_wrapper`, or it may be overwritten
# by a value from `f.__dict__`.
self.f = f
def __call__(self, *args, **kw):
with context.skip_cache():
return self.f(*args, **kw)
def __repr__(self):
return '<SkipCache %r>' % self.f
def __get__(self, obj, typ=None):
return skip_cache(self.f.__get__(obj, typ))
def use_master(f):
@functools.wraps(f)
def wrapper(*args, **kw):
with context.use_master():
return f(*args, **kw)
return wrapper
def write(f):
return use_master(skip_cache(f))
def set_modified_on(f):
"""
Will update the modified timestamp on the provided objects
when the wrapped function exits successfully (returns True).
Looks up objects defined in the set_modified_on kwarg.
"""
from olympia.amo.tasks import set_modified_on_object
@functools.wraps(f)
def wrapper(*args, **kw):
objs = kw.pop('set_modified_on', None)
result = f(*args, **kw)
if objs and result:
for obj in objs:
task_log.info('Delaying setting modified on object: %s, %s' %
(obj.__class__.__name__, obj.pk))
set_modified_on_object.apply_async(
args=[obj], kwargs=None,
eta=(datetime.datetime.now() +
datetime.timedelta(seconds=settings.NFS_LAG_DELAY)))
return result
return wrapper
def allow_cross_site_request(f):
"""Allow other sites to access this resource, see
https://developer.mozilla.org/en/HTTP_access_control."""
@functools.wraps(f)
def wrapper(request, *args, **kw):
response = f(request, *args, **kw)
"""If Access-Control-Allow-Credentials isn't set, the browser won't
return data required cookies to see. This is a good thing, let's keep
it that way."""
response['Access-Control-Allow-Origin'] = '*'
response['Access-Control-Allow-Methods'] = 'GET'
return response
return wrapper
def set_task_user(f):
"""Sets the user to be the task user, then unsets it."""
@functools.wraps(f)
def wrapper(*args, **kw):
old_user = core.get_user()
core.set_user(get_task_user())
try:
result = f(*args, **kw)
finally:
core.set_user(old_user)
return result
return wrapper
def allow_mine(f):
@functools.wraps(f)
def wrapper(request, username, *args, **kw):
"""
If the author is `mine` then show the current user's collection
(or something).
"""
if username == 'mine':
if not request.user.is_authenticated():
return redirect_for_login(request)
username = request.user.username
return f(request, username, *args, **kw)
return wrapper
def atomic(fn):
"""Set the transaction isolation level to SERIALIZABLE and then delegate
to transaction.atomic to run the specified code atomically. The
SERIALIZABLE level will run SELECTs in LOCK IN SHARE MODE when used in
conjunction with transaction.atomic.
Docs: https://dev.mysql.com/doc/refman/5.6/en/set-transaction.html.
"""
# TODO: Make this the default for all transactions.
@functools.wraps(fn)
@write
def inner(*args, **kwargs):
cursor = connection.cursor()
cursor.execute('SET TRANSACTION ISOLATION LEVEL SERIALIZABLE')
with transaction.atomic():
return fn(*args, **kwargs)
# The non_atomic version is essentially just a non-decorated version of the
# function. This is just here to handle the fact that django's tests are
# run in a transaction and setting this will make mysql blow up. You can
# mock your function to the non-atomic version to make it run in a test.
#
# with mock.patch('module.func', module.func.non_atomic):
# test_something()
inner.non_atomic = fn
return inner
| |
freephys/python_ase | ase/md/npt.py | Python | gpl-3.0 | 27,683 | 0.003757 | '''Constant pressure/stress and temperature dynamics.
Combined Nose-Hoover and Parrinello-Rahman dynamics, creating an NPT
(or N,stress,T) ensemble.
The method is the one proposed by Melchionna et al. [1] and later
modified by Melchionna [2]. The differential equations are integrated
using a centered difference method [3].
1. S. Melchionna, G. Ciccotti and B. L. Holian, "Hoover NPT dynamics
for systems varying in shape and size", Molecular Physics 78, p. 533
(1993).
2. S. Melchionna, "Constrained systems and statistical distribution",
Physical Review E, 61, p. 6165 (2000).
3. B. L. Holian, A. J. De Groot, W. G. Hoover, and C. G. Hoover,
"Time-reversible equilibrium and nonequilibrium isothermal-isobaric
simulations with centered-difference Stoermer algorithms.", Physical
Review A, 41, p. 455 | 2 (19 | 90).
'''
__docformat__ = 'reStructuredText'
from numpy import *
import sys
import time
import weakref
from ase.md import MolecularDynamics
#from ASE.Trajectories.NetCDFTrajectory import NetCDFTrajectory
# Delayed imports: If the trajectory object is reading a special ASAP version
# of HooverNPT, that class is imported from Asap.Dynamics.NPTDynamics.
class NPT(MolecularDynamics):
'''Constant pressure/stress and temperature dynamics.
Combined Nose-Hoover and Parrinello-Rahman dynamics, creating an
NPT (or N,stress,T) ensemble.
The method is the one proposed by Melchionna et al. [1] and later
modified by Melchionna [2]. The differential equations are integrated
using a centered difference method [3]. See also NPTdynamics.tex
The dynamics object is called with the following parameters:
atoms
The list of atoms.
dt
The timestep in units matching eV, A, u.
temperature
The desired temperature in eV.
externalstress
The external stress in eV/A^3. Either a symmetric
3x3 tensor, a 6-vector representing the same, or a
scalar representing the pressure. Note that the
stress is positive in tension whereas the pressure is
positive in compression: giving a scalar p is
equivalent to giving the tensor (-p, -p, -p, 0, 0, 0).
ttime
Characteristic timescale of the thermostat.
Set to None to disable the thermostat.
pfactor
A constant in the barostat differential equation. If
a characteristic barostat timescale of ptime is
desired, set pfactor to ptime^2 * B (where B is the
Bulk Modulus). Set to None to disable the barostat.
Typical metallic bulk moduli are of the order of
100 GPa or 0.6 eV/A^3.
mask=None
Optional argument. A tuple of three integers (0 or 1),
indicating if the system can change size along the
three Cartesian axes. Set to (1,1,1) or None to allow
a fully flexible computational box. Set to (1,1,0)
to disallow elongations along the z-axis etc.
Useful parameter values:
* The same timestep can be used as in Verlet dynamics, i.e. 5 fs is fine
for bulk copper.
* The ttime and pfactor are quite critical[4], too small values may
cause instabilites and/or wrong fluctuations in T / p. Too
large values cause an oscillation which is slow to die. Good
values for the characteristic times seem to be 25 fs for ttime,
and 75 fs for ptime (used to calculate pfactor), at least for
bulk copper with 15000-200000 atoms. But this is not well
tested, it is IMPORTANT to monitor the temperature and
stress/pressure fluctuations.
It has the following methods:
__call__(n)
Perform n timesteps.
initialize()
Estimates the dynamic variables for time=-1 to start
the algorithm. This is automatically called before
the first timestep.
set_stress()
Set the external stress. Use with care. It is
preferable to set the right value when creating the
object.
set_mask()
Change the mask. Use with care, as you may "freeze"
a fluctuation in the strain rate.
get_gibbs_free_energy()
Gibbs free energy is supposed to be preserved by this
dynamics. This is mainly intended as a diagnostic
tool.
References:
1) S. Melchionna, G. Ciccotti and B. L. Holian, Molecular
Physics 78, p. 533 (1993).
2) S. Melchionna, Physical
Review E 61, p. 6165 (2000).
3) B. L. Holian, A. J. De Groot, W. G. Hoover, and C. G. Hoover,
Physical Review A 41, p. 4552 (1990).
4) F. D. Di Tolla and M. Ronchetti, Physical
Review E 48, p. 1726 (1993).
'''
classname = "NPT" # Used by the trajectory.
def __init__(self, atoms,
timestep, temperature, externalstress, ttime, pfactor,
mask=None, trajectory=None):
MolecularDynamics.__init__(self, atoms, timestep, trajectory)
#self.atoms = atoms
#self.timestep = timestep
self.zero_center_of_mass_momentum(verbose=1)
self.temperature = temperature
self.set_stress(externalstress)
self.set_mask(mask)
self.eta = zeros((3,3), float)
self.zeta = 0.0
self.zeta_integrated = 0.0
self.initialized = 0
self.ttime = ttime
self.pfactor_given = pfactor
self._calculateconstants()
self.timeelapsed = 0.0
self.frac_traceless = 1
def set_temperature(self, temperature):
self.temperature = temperature
self._calculateconstants()
def set_stress(self, stress):
"""Set the applied stress.
Must be a symmetric 3x3 tensor, a 6-vector representing a symmetric
3x3 tensor, or a number representing the pressure.
"""
if type(stress) == type(1.0) or type(stress) == type(1):
stress = array((-stress, -stress, -stress, 0.0, 0.0, 0.0))
elif stress.shape == (3,3):
if not self._issymmetric(stress):
raise ValueError, "The external stress must be a symmetric tensor."
stress = array((stress[0,0], stress[1,1], stress[2,2], stress[1,2],
stress[0,2], stress[0,1]))
elif stress.shape != (6,):
raise ValueError, "The external stress has the wrong shape."
self.externalstress = stress
def set_mask(self, mask):
"""Set the mask indicating dynamic elements of the computational box.
If set to None, all elements may change. If set to a 3-vector
of ones and zeros, elements which are zero specify directions
along which the size of the computational box cannot change.
For example, if mask = {1,1,0} the length of the system along
the z-axis cannot change, although xz and yz shear is still
possible. To disable shear globally, set the mode to diagonal
(not yet implemented).
"""
if mask is None:
mask = ones((3,))
if not hasattr(mask, "shape"):
mask = array(mask)
if mask.shape != (3,) and mask.shape != (3,3):
raise "The mask has the wrong shape (must be a 3-vector or 3x3 matrix)"
else:
mask = not_equal(mask, 0) # Make sure it is 0/1
if mask.shape == (3,):
self.mask = outer(mask, mask)
else:
self.mask = mask
def set_fraction_traceless(self, fracTraceless):
"""set what fraction of the traceless part of the force
on eta is kept.
By setting this to zero, the volume may change but the shape may not.
"""
self.frac_traceless = fracTraceless
def get_strain_rate(self):
"Get the strain rate as an upper-triangular 3x3 matrix"
return array(self.eta, copy=1)
def set_strain_rate(self, rate):
"Set the strain rate. Must be an upper triangular 3x3 matrix."
if not (rate.shape == (3,3) and self._isuppertriangular(rate)):
raise ValueError, "Strain rate must be an upper triangular matrix."
self.eta = rate
if self.initialized:
# Rec |
tmm1/pygments.rb | vendor/pygments-main/pygments/styles/colorful.py | Python | mit | 2,778 | 0 | # -*- coding: utf-8 -*-
"""
pygments.styles.colorful
~~~~~~~~~~~~~~~~~~~~~~~~
A colorful style, inspired by CodeRay.
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class ColorfulStyle(Style):
"""
A colorful style, inspired by CodeRay.
"""
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "#888",
Comment.Preproc: "#579",
Comment.Special: "bold #cc0000",
Keyword: "bold #080",
Keyword.Pseudo: "#038",
Keyword.Type: "#339",
Operator: " | #333",
Operator.Word: "bold #000",
Name.Builtin: | "#007020",
Name.Function: "bold #06B",
Name.Class: "bold #B06",
Name.Namespace: "bold #0e84b5",
Name.Exception: "bold #F00",
Name.Variable: "#963",
Name.Variable.Instance: "#33B",
Name.Variable.Class: "#369",
Name.Variable.Global: "bold #d70",
Name.Constant: "bold #036",
Name.Label: "bold #970",
Name.Entity: "bold #800",
Name.Attribute: "#00C",
Name.Tag: "#070",
Name.Decorator: "bold #555",
String: "bg:#fff0f0",
String.Char: "#04D bg:",
String.Doc: "#D42 bg:",
String.Interpol: "bg:#eee",
String.Escape: "bold #666",
String.Regex: "bg:#fff0ff #000",
String.Symbol: "#A60 bg:",
String.Other: "#D20",
Number: "bold #60E",
Number.Integer: "bold #00D",
Number.Float: "bold #60E",
Number.Hex: "bold #058",
Number.Oct: "bold #40E",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "#F00 bg:#FAA"
}
|
denismakogon/cloudvalidation-dashboard | cloudvalidation/ostf_tests/tables.py | Python | apache-2.0 | 2,739 | 0 | from django import shortcuts
from django import http
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import tables
from horizon import messages
from cloudvalidation.api import cloudv
class CreateJob(tables.BatchAction):
name = "create"
verbose_name = "Create Job"
classes = ("btn-launch",)
icon = "plus"
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Create job",
u"Create jobs",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Create job",
u"Created jobs",
count
)
def handle(self, table, request, obj_ids):
requ | est.session['tests'] = obj_ids
return shortcuts.redirect('/cloudvalidation_portal/jobs/create')
class ExecuteTest(tables.BatchAction):
name = "execute"
classes = ('btn-launch',)
help_text = _("Execute test.")
@staticmethod
def action_present(count):
return unget | text_lazy(
u"Execute test",
u"Execute tests",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Executed test",
u"Executed tests",
count
)
def action(self, request, datum_id):
report = (cloudv.cloudvalidation_ostf_client().
tests.run(datum_id, "fuel_health"))[0]
return report
def handle(self, table, request, obj_ids):
reports = []
for id in obj_ids:
report = self.action(reports, id)
self.update(request, id)
_test = ("Test %(test)s.\n"
"Duration: %(duration)s.\n"
"Result: %(result)s.\n"
"Report: %(report)s.\n" % report)
reports.append(_test)
response = http.HttpResponse(status=200, reason="OK")
response['Content-Disposition'] = 'attachment; filename="reports"'
response['Content-Type'] = 'application/octet-stream'
view = ('Executed tests:'
'\n%(tests)s\n'
'\n%(reports)s\n')
response.write(view % {"tests": "\n".join(obj_ids),
"reports": "\n".join(reports)})
response.close()
return response
class OSTFTable(tables.DataTable):
test = tables.Column("test", verbose_name=_("Test"))
def get_object_id(self, datum):
return datum.test
class Meta(object):
name = "OSTF tests"
verbose_name = _("OSTF tests")
table_actions = (ExecuteTest, CreateJob)
row_actions = (ExecuteTest, )
|
italomaia/turtle-linux | games/DigbyMarshmallow/lib/effects.py | Python | gpl-3.0 | 472 | 0.012712 | import random
import actor
from vector import Vector as v
class SmokePuff(act | or.Actor):
collides = False
def __init__(self, | world, pos):
super(SmokePuff, self).__init__(world, pos=pos, radius=10, image_file="images/all/star.svgz")
self.apply_impulse(v((random.gauss(0,2),random.gauss(0,2))))
def tick(self):
super(SmokePuff, self).tick()
self.radius += .5
if self.radius > 20:
self.dead = True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.