repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
raphaelvalentin/Utils
|
spectre/syntax/smu.py
|
Python
|
gpl-2.0
| 4,809
| 0.021418
|
from spectre.syntax import *
class SMU2P(Netlist):
__name__ = "SMU2P"
__type__ = "netlist"
def __init__(self, name='SMU2P', nodes=('1', '2'), V1=0, V2=0):
Netlist.__init__(self)
self.name = name
self.nodes = nodes
self.V1 = V1; self.V2 = V2
save = dict(V1=nodes[0], V2=nodes[1])
self.append( Vsource(name='V1', nodes=(nodes[0], '0'), dc=V1, type='dc') )
save['I1'] = 'V1:2'
self.append( Vsource(name='V2', nodes=(nodes[1], '0'), dc=V2, type='dc') )
save['I2'] = 'V2:2'
self.append( Save(**save) )
class SMU4P(Netlist):
__name__ = "SMU4P"
__type__ = "netlist"
def __init__(self, name='SMU4P', nodes=('1', '2', '3', '4'), V1=0, V2=0, V3=0, V4=0):
Netlist.__init__(self)
self.name = name
self.nodes = nodes
self.V1 = V1; self.V2 = V2; self.V3 = V3; self.V4 = V4
self.append( Vsource(name='V1', nodes=(nodes[0], '0'), dc=V1, type='dc') )
self.append( Vsource(name='V2', nodes=(nodes[1], '0'), dc=V2, type='dc') )
self.append( Vsource(name='V3', nodes=(nodes[2], '0'), dc=V3, type='dc') )
self.append( Vsource(name='V4', nodes=(nodes[3], '0'), dc=V4, type='dc'))
self.append( Save(I1='V1:2', I2="V2:2", I3="V3:2", I4="V4:2", V1=nodes[0], V2=nodes[1], V3=nodes[2], V4=nodes[3]) )
class SMU5P(Netlist):
__name__ = "SMU5P"
__type__ = "netlist"
def __init__(self, name='SMU5P', nodes=('1', '2', '3', '4', '5'), V1=0, V2=0, V3=0, V4=0, V5=0):
Netlist.__init__(self)
self.name = name
self.nodes = nodes
self.V1 = V1; self.V2 = V2; self.V3 = V3; self.V4 = V4; self.V5 = V5
self.append( Vsource(name='V1', nodes=(nodes[0], '0'), dc=V1, type='dc') )
self.append( Vsource(name='V2', nodes=(nodes[1], '0'), dc=V2, type='dc') )
self.append( Vsource(name='V3', nodes=(nodes[2], '0'), dc=V3, type='dc') )
self.append( Vsource(name='V4', nodes=(nodes[3], '0'), dc=V4, type='dc'))
self.append( Vsource(name='V5', nodes=(nodes[4], '0'), dc=V5, type='dc'))
self.append( Save(I1='V1:2', I2="V2:2", I3="V3:2", I4="V4:2", I5="V5:2", V1=nodes[0], V2=nodes[1], V3=nodes[2], V4=nodes[3], V5=nodes[4]) )
class SMU3P(Netlist):
__name__ = "SMU3P"
__type__ = "netlist"
def __init__(self, name='SMU3P', nodes=('1', '2', '3'), V1=0, V2=0, V3=0, I1=None, I2=None, I3=None):
Netlist.__init__(self)
self.name = name
self.nodes = nodes
self.V1 = V1; self.V2 = V2; self.V3 = V3;
save = { 'V1':nodes[0], 'V2':nodes[1], 'V3':nodes[2] }
if I1 == None:
self.append( Vsource(name='V1', nodes=(nodes[0], '0'), dc=V1, type='dc') )
save['I1'] = 'V1:2'
else:
self.append( Isource(name='I1', nodes=(nodes[0], '0'), dc=I1, type='dc') )
save['I1'] = 'I1:2'
if I2 == None:
self.append( Vsource(name='V2', nodes=(nodes[1], '0'), dc=V2, type='dc') )
save['I2'] = 'V2:2'
else:
self.append( Isource(name='I2', nodes=(nodes[1], '0'), dc=I2, type='dc') )
save['I2'] = 'I2:2'
if I3 == None:
self.append( Vsource(name='V3', nodes=(nodes[2], '0'), dc=V3, type='dc') )
save['I3'] = 'V3:2'
else:
self.append( Isource(name='I3', nodes=(nodes[2], '0'), dc=I3, type='dc') )
save['I3'] = 'I3:2'
self.append( Save(**save) )
class SMU(Netlist):
__name__ = "SMU"
__type__ = "netlist"
def __init__(self, name='SMU', n
|
odes=()
|
, **parameters):
Netlist.__init__(self)
self.name = name
self.nodes = nodes
save = {}
for node in nodes:
if 'V{node}'.format(node=node) in parameters:
dc = parameters['V{node}'.format(node=node)]
self.append( Vsource(name='V{node}'.format(node=node), nodes=(node, '0'), dc=dc, type='dc') )
save['I{node}'.format(node=node)] = 'V{node}:2'.format(node=node)
save['V{node}'.format(node=node)] = node
elif 'I{node}'.format(node=node) in parameters:
dc = parameters['I{node}'.format(node=node)]
self.append( Isource(name='I{node}'.format(node=node), nodes=(node, '0'), dc=dc, type='dc') )
save['I{node}'.format(node=node)] = 'I{node}:2'.format(node=node)
save['V{node}'.format(node=node)] = node
else:
self.append( Vsource(name='V{node}'.format(node=node), nodes=(node, '0'), dc=0, type='dc') )
save['I{node}'.format(node=node)] = 'V{node}:2'.format(node=node)
save['V{node}'.format(node=node)] = node
if len(save):
self.append( Save(**save) )
|
pkimber/compose
|
compose/tests/test_header.py
|
Python
|
apache-2.0
| 291
| 0
|
# -*- encoding: utf-8 -*-
# from django.test import TestCase
# from block.tests.helper import check_content
# from compose.tests.factories import HeaderFactory
# class TestHeader(TestCase):
#
# def test_content_methods(self)
|
:
# c = HeaderFactory()
#
|
check_content(c)
|
Shadow5523/zabbix_api
|
lib/auth.py
|
Python
|
gpl-2.0
| 669
| 0.014948
|
# -*- coding: utf-8 -*-
import json
import urllib2
def get_authkey(zabbix_server, zabbix_user, zab
|
bix_pass, head):
url = "http://" + zabbix_server + "/zabbix/api_jsonrpc.php"
pdata = json.dumps({"jsonrpc" : "2.0",
|
"method" : "user.login",
"params" : {
"user" : zabbix_user,
"password" : zabbix_pass},
"auth" : None,
"id" : 1})
result = urllib2.urlopen(urllib2.Request(url, pdata, head)).read()
try:
return json.loads(result)['result']
except:
return 1
|
commaai/openpilot
|
selfdrive/car/subaru/values.py
|
Python
|
mit
| 11,405
| 0.001841
|
from selfdrive.car import dbc_dict
from cereal import car
Ecu = car.CarParams.Ecu
class CarControllerParams:
def __init__(self, CP):
if CP.carFingerprint == CAR.IMPREZA_2020:
self.STEER_MAX = 1439
else:
self.STEER_MAX = 2047
self.STEER_STEP = 2 # how often we update the steer cmd
self.STEER_DELTA_UP = 50 # torque increase per refresh, 0.8s to max
self.STEER_DELTA_DOWN = 70 # torque decrease per refresh
self.STEER_DRIVER_ALLOWANCE = 60 # allowed driver torque before start limiting
self.STEER_DRIVER_MULTIPLIER = 10 # weight driver torque heavily
self.STEER_DRIVER_FACTOR = 1 # from dbc
class CAR:
ASCENT = "SUBARU ASCENT LIMITED 2019"
IMPREZA = "SUBARU IMPREZA LIMITED 2019"
IMPREZA_2020 = "SUBARU IMPREZA SPORT 2020"
FORESTER = "SUBARU FORESTER 2019"
FORESTER_PREGLOBAL = "SUBARU FORESTER 2017 - 2018"
LEGACY_PREGLOBAL = "SUBARU LEGACY 2015 - 2018"
OUTBACK_PREGLOBAL = "SUBARU OUTBACK 2015 - 2017"
OUTBACK_PREGLOBAL_2018 = "SUBARU OUTBACK 2018 - 2019"
FINGERPRINTS = {
CAR.IMPREZA_2020: [{
2: 8, 64: 8, 65: 8, 72: 8, 73: 8, 280: 8, 281: 8, 282: 8, 290: 8, 312: 8, 313: 8, 314: 8, 315: 8, 316: 8, 326: 8, 372: 8, 544: 8, 545: 8, 546: 8, 552: 8, 554: 8, 557: 8, 576: 8, 577: 8, 722: 8, 801: 8, 802: 8, 803: 8, 805: 8, 808: 8, 816: 8, 826: 8, 837: 8, 838: 8, 839: 8, 842: 8, 912: 8, 915: 8, 9
|
40: 8, 1617: 8, 1632: 8, 1650: 8, 1677: 8, 1697: 8, 1722: 8, 1743: 8, 1759: 8, 1786: 5, 1787: 5, 1788: 8, 1809: 8, 1813: 8, 1817: 8, 1821: 8, 1840: 8, 1848: 8, 19
|
24: 8, 1932: 8, 1952: 8, 1960: 8, 1968: 8, 1976: 8, 2015: 8, 2016: 8, 2024: 8
},
{
2: 8, 64: 8, 65: 8, 72: 8, 73: 8, 280: 8, 281: 8, 282: 8, 290: 8, 312: 8, 313: 8, 314: 8, 315: 8, 316: 8, 326: 8, 544: 8, 545: 8, 546: 8, 554: 8, 557: 8, 576: 8, 577: 8, 801: 8, 802: 8, 803: 8, 805: 8, 808: 8, 816: 8, 826: 8, 837: 8, 838: 8, 839: 8, 842: 8, 912: 8, 915: 8, 940: 8, 1614: 8, 1617: 8, 1632: 8, 1657: 8, 1658: 8, 1677: 8, 1697: 8, 1743: 8, 1759: 8, 1786: 5, 1787: 5, 1788: 8, 1809: 8, 1813: 8, 1817: 8, 1821: 8, 1840: 8, 1848: 8, 1924: 8, 1932: 8, 1952: 8, 1960: 8
}],
CAR.FORESTER: [{
2: 8, 64: 8, 65: 8, 72: 8, 73: 8, 280: 8, 281: 8, 282: 8, 290: 8, 312: 8, 313: 8, 314: 8, 315: 8, 316: 8, 326: 8, 372: 8, 544: 8, 545: 8, 546: 8, 552: 8, 554: 8, 557: 8, 576: 8, 577: 8, 722: 8, 801: 8, 802: 8, 803: 8, 805: 8, 808: 8, 811: 8, 816: 8, 826: 8, 837: 8, 838: 8, 839: 8, 842: 8, 912: 8, 915: 8, 940: 8, 961: 8, 984: 8, 1614: 8, 1617: 8, 1632: 8, 1650: 8, 1651: 8, 1657: 8, 1658: 8, 1677: 8, 1697: 8, 1698: 8, 1722: 8, 1743: 8, 1759: 8, 1787: 5, 1788: 8, 1809: 8, 1813: 8, 1817: 8, 1821: 8, 1840: 8, 1848: 8, 1924: 8, 1932: 8, 1952: 8, 1960: 8
}],
}
FW_VERSIONS = {
CAR.ASCENT: {
(Ecu.esp, 0x7b0, None): [
b'\xa5 \x19\x02\x00',
b'\xa5 !\002\000',
b'\xf1\x82\xa5 \x19\x02\x00',
],
(Ecu.eps, 0x746, None): [
b'\x85\xc0\xd0\x00',
b'\005\xc0\xd0\000',
b'\x95\xc0\xd0\x00',
],
(Ecu.fwdCamera, 0x787, None): [
b'\x00\x00d\xb9\x1f@ \x10',
b'\000\000e~\037@ \'',
b'\x00\x00e@\x1f@ $',
b'\x00\x00d\xb9\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\xbb,\xa0t\a',
b'\xf1\x82\xbb,\xa0t\x87',
b'\xf1\x82\xbb,\xa0t\a',
b'\xf1\x82\xd9,\xa0@\a',
b'\xf1\x82\xd1,\xa0q\x07',
],
(Ecu.transmission, 0x7e1, None): [
b'\x00\xfe\xf7\x00\x00',
b'\001\xfe\xf9\000\000',
b'\x01\xfe\xf7\x00\x00',
b'\xf1\x00\xa4\x10@',
],
},
CAR.IMPREZA: {
(Ecu.esp, 0x7b0, None): [
b'\x7a\x94\x3f\x90\x00',
b'\xa2 \x185\x00',
b'\xa2 \x193\x00',
b'z\x94.\x90\x00',
b'z\x94\b\x90\x01',
b'\xa2 \x19`\x00',
b'z\x94\f\x90\001',
b'z\x9c\x19\x80\x01',
b'z\x94\x08\x90\x00',
b'z\x84\x19\x90\x00',
],
(Ecu.eps, 0x746, None): [
b'\x7a\xc0\x0c\x00',
b'z\xc0\b\x00',
b'\x8a\xc0\x00\x00',
b'z\xc0\x04\x00',
b'z\xc0\x00\x00',
b'\x8a\xc0\x10\x00',
],
(Ecu.fwdCamera, 0x787, None): [
b'\x00\x00\x64\xb5\x1f\x40\x20\x0e',
b'\x00\x00d\xdc\x1f@ \x0e',
b'\x00\x00e\x1c\x1f@ \x14',
b'\x00\x00d)\x1f@ \a',
b'\x00\x00e+\x1f@ \x14',
b'\000\000e+\000\000\000\000',
b'\000\000dd\037@ \016',
b'\000\000e\002\037@ \024',
b'\x00\x00d)\x00\x00\x00\x00',
b'\x00\x00c\xf4\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\xaa\x61\x66\x73\x07',
b'\xbeacr\a',
b'\xc5!`r\a',
b'\xaa!ds\a',
b'\xaa!`u\a',
b'\xaa!dq\a',
b'\xaa!dt\a',
b'\xf1\x00\xa2\x10\t',
b'\xc5!ar\a',
b'\xbe!as\a',
b'\xc5!ds\a',
b'\xc5!`s\a',
b'\xaa!au\a',
b'\xbe!at\a',
b'\xaa\x00Bu\x07',
b'\xc5!dr\x07',
b'\xaa!aw\x07',
],
(Ecu.transmission, 0x7e1, None): [
b'\xe3\xe5\x46\x31\x00',
b'\xe4\xe5\x061\x00',
b'\xe5\xf5\x04\x00\x00',
b'\xe3\xf5G\x00\x00',
b'\xe3\xf5\a\x00\x00',
b'\xe3\xf5C\x00\x00',
b'\xe5\xf5B\x00\x00',
b'\xe5\xf5$\000\000',
b'\xe4\xf5\a\000\000',
b'\xe3\xf5F\000\000',
b'\xe4\xf5\002\000\000',
b'\xe3\xd0\x081\x00',
b'\xe3\xf5\x06\x00\x00',
b'\xf1\x00\xa4\x10@',
],
},
CAR.IMPREZA_2020: {
(Ecu.esp, 0x7b0, None): [
b'\xa2 \0314\000',
b'\xa2 \0313\000',
b'\xa2 !i\000',
b'\xa2 !`\000',
],
(Ecu.eps, 0x746, None): [
b'\x9a\xc0\000\000',
b'\n\xc0\004\000',
],
(Ecu.fwdCamera, 0x787, None): [
b'\000\000eb\037@ \"',
b'\000\000e\x8f\037@ )',
],
(Ecu.engine, 0x7e0, None): [
b'\xca!ap\a',
b'\xca!`p\a',
b'\xca!`0\a',
b'\xcc\"f0\a',
b'\xcc!fp\a',
],
(Ecu.transmission, 0x7e1, None): [
b'\xe6\xf5\004\000\000',
b'\xe6\xf5$\000\000',
b'\xe7\xf6B0\000',
b'\xe7\xf5D0\000',
],
},
CAR.FORESTER: {
(Ecu.esp, 0x7b0, None): [
b'\xa3 \030\024\000',
b'\xa3 \024\000',
b'\xa3 \031\024\000',
b'\xa3 \024\001',
],
(Ecu.eps, 0x746, None): [
b'\x8d\xc0\004\000',
],
(Ecu.fwdCamera, 0x787, None): [
b'\000\000e!\037@ \021',
b'\000\000e\x97\037@ 0',
b'\000\000e`\037@ ',
b'\xf1\x00\xac\x02\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\xb6\"`A\a',
b'\xcf"`0\a',
b'\xcb\"`@\a',
b'\xcb\"`p\a',
b'\xf1\x00\xa2\x10\n',
],
(Ecu.transmission, 0x7e1, None): [
b'\032\xf6B0\000',
b'\032\xf6F`\000',
b'\032\xf6b`\000',
b'\032\xf6B`\000',
b'\xf1\x00\xa4\x10@',
],
},
CAR.FORESTER_PREGLOBAL: {
(Ecu.esp, 0x7b0, None): [
b'\x7d\x97\x14\x40',
b'\xf1\x00\xbb\x0c\x04',
],
(Ecu.eps, 0x746, None): [
b'}\xc0\x10\x00',
b'm\xc0\x10\x00',
],
(Ecu.fwdCamera, 0x787, None): [
b'\x00\x00\x64\x35\x1f\x40\x20\x09',
b'\x00\x00c\xe9\x1f@ \x03',
b'\x00\x00d\xd3\x1f@ \t'
],
(Ecu.engine, 0x7e0, None): [
b'\xba"@p\a',
b'\xa7)\xa0q\a',
b'\xf1\x82\xa7)\xa0q\a',
b'\xba"@@\a',
],
(Ecu.transmission, 0x7e1, None): [
b'\xdc\xf2\x60\x60\x00',
b'\xdc\xf2@`\x00',
b'\xda\xfd\xe0\x80\x00',
b'\xdc\xf2`\x81\000',
b'\xdc\xf2`\x80\x00',
],
},
CAR.LEGACY_PREGLOBAL: {
(Ecu.esp, 0x7b0, None): [
b'k\x97D\x00',
b'[\xba\xc4\x03',
b'{\x97D\x00',
b'[\x97D\000',
],
(Ecu.eps, 0x746, None): [
b'[\xb0\x00\x01',
b'K\xb0\x00\x01',
b'k\xb0\x00\x00',
],
(Ecu.fwdCamera, 0x787, None): [
b'\x00\x00c\xb7\x1f@\x10\x16',
b'\x00\x00c\x94\x1f@\x10\x08',
b'\x00\x00c\xec\x1f@ \x04',
],
(Ecu.engine, 0x7e0, None): [
b'\xab*@r\a',
b'\xa0+@p\x07',
b'\xb4"@0\x07',
b'\xa0"@q\a',
],
(Ecu.transmission, 0x7e1, None): [
b'\xbe\xf2\x00p\x00',
b'\xbf\xfb\xc0\x80\x00',
b'\xbd\xf2\x00`\x00',
b'\xbf\xf2\000\x80\000',
],
},
CAR.OUTBACK_PREGLOBAL: {
(Ecu.esp, 0x7b0, None): [
b'{\x9a\xac\x00',
b'k\x97\xac\x00',
b
|
neumerance/deploy
|
openstack_dashboard/dashboards/project/firewalls/views.py
|
Python
|
apache-2.0
| 10,365
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: KC Wang, Big Switch Networks
import logging
import re
from django.core.urlresolvers import reverse_lazy # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon import tabs
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.firewalls \
import forms as fw_forms
from openstack_dashboard.dashboards.project.firewalls \
import tabs as fw_tabs
from openstack_dashboard.dashboards.project.firewalls \
import workflows as fw_workflows
InsertRuleToPolicy = fw_forms.InsertRuleToPolicy
RemoveRuleFromPolicy = fw_forms.RemoveRuleFromPolicy
UpdateFirewall = fw_forms.UpdateFirewall
UpdatePolicy = fw_forms.UpdatePolicy
UpdateRule = fw_forms.UpdateRule
FirewallDetailsTabs = fw_tabs.FirewallDetailsTabs
FirewallTabs = fw_tabs.FirewallTabs
PolicyDetailsTabs = fw_tabs.PolicyDetailsTabs
RuleDetailsTabs = fw_tabs.RuleDetailsTabs
AddFirewall = fw_workflows.AddFirewall
AddPolicy = fw_workflows.AddPolicy
AddRule = fw_workflows.AddRule
LOG = logging.getLogger(__name__)
class IndexView(tabs.TabView):
tab_group_class = (FirewallTabs)
template_name = 'project/firewalls/details_tabs.html'
def post(self, request, *args, **kwargs):
obj_ids = request.POST.getlist('object_ids')
action = request.POST['action']
obj_type = re.search('.delete([a-z]+)', action).group(1)
if not obj_ids:
obj_ids.append(re.search('([0-9a-z-]+)$', action).group(1))
if obj_type == 'rule':
for obj_id in obj_ids:
try:
api.fwaas.rule_delete(request, obj_id)
messages.success(
|
request, 'Deleted rule %s' % obj_id)
except Exception as e:
exceptions.handle(request,
_('Unable to delete rule. %s' % e))
if obj_type == 'policy':
for obj_id in obj_ids:
try:
api.fwaas.policy_delete(request, obj_id)
messages.success(request, 'Deleted policy %s' % obj_id)
except Exception as e:
exceptions.ha
|
ndle(request,
_('Unable to delete policy. %s' % e))
if obj_type == 'firewall':
for obj_id in obj_ids:
try:
api.fwaas.firewall_delete(request, obj_id)
messages.success(request, 'Deleted firewall %s' % obj_id)
except Exception as e:
exceptions.handle(request,
_('Unable to delete firewall. %s' % e))
return self.get(request, *args, **kwargs)
class AddRuleView(workflows.WorkflowView):
workflow_class = AddRule
template_name = "project/firewalls/addrule.html"
class AddPolicyView(workflows.WorkflowView):
workflow_class = AddPolicy
template_name = "project/firewalls/addpolicy.html"
class AddFirewallView(workflows.WorkflowView):
workflow_class = AddFirewall
template_name = "project/firewalls/addfirewall.html"
class RuleDetailsView(tabs.TabView):
tab_group_class = (RuleDetailsTabs)
template_name = 'project/firewalls/details_tabs.html'
class PolicyDetailsView(tabs.TabView):
tab_group_class = (PolicyDetailsTabs)
template_name = 'project/firewalls/details_tabs.html'
class FirewallDetailsView(tabs.TabView):
tab_group_class = (FirewallDetailsTabs)
template_name = 'project/firewalls/details_tabs.html'
class UpdateRuleView(forms.ModalFormView):
form_class = UpdateRule
template_name = "project/firewalls/updaterule.html"
context_object_name = 'rule'
success_url = reverse_lazy("horizon:project:firewalls:index")
def get_context_data(self, **kwargs):
context = super(UpdateRuleView, self).get_context_data(**kwargs)
context['rule_id'] = self.kwargs['rule_id']
obj = self._get_object()
if obj:
context['name'] = obj.name
return context
def _get_object(self, *args, **kwargs):
if not hasattr(self, "_object"):
rule_id = self.kwargs['rule_id']
try:
self._object = api.fwaas.rule_get(self.request, rule_id)
self._object.set_id_as_name_if_empty()
except Exception:
redirect = self.success_url
msg = _('Unable to retrieve rule details.')
exceptions.handle(self.request, msg, redirect=redirect)
return self._object
def get_initial(self):
rule = self._get_object()
initial = rule.get_dict()
return initial
class UpdatePolicyView(forms.ModalFormView):
form_class = UpdatePolicy
template_name = "project/firewalls/updatepolicy.html"
context_object_name = 'policy'
success_url = reverse_lazy("horizon:project:firewalls:index")
def get_context_data(self, **kwargs):
context = super(UpdatePolicyView, self).get_context_data(**kwargs)
context["policy_id"] = self.kwargs['policy_id']
obj = self._get_object()
if obj:
context['name'] = obj.name
return context
def _get_object(self, *args, **kwargs):
if not hasattr(self, "_object"):
policy_id = self.kwargs['policy_id']
try:
self._object = api.fwaas.policy_get(self.request, policy_id)
self._object.set_id_as_name_if_empty()
except Exception:
redirect = self.success_url
msg = _('Unable to retrieve policy details.')
exceptions.handle(self.request, msg, redirect=redirect)
return self._object
def get_initial(self):
policy = self._get_object()
initial = policy.get_dict()
return initial
class UpdateFirewallView(forms.ModalFormView):
form_class = UpdateFirewall
template_name = "project/firewalls/updatefirewall.html"
context_object_name = 'firewall'
success_url = reverse_lazy("horizon:project:firewalls:index")
def get_context_data(self, **kwargs):
context = super(UpdateFirewallView, self).get_context_data(**kwargs)
context["firewall_id"] = self.kwargs['firewall_id']
obj = self._get_object()
if obj:
context['name'] = obj.name
return context
def _get_object(self, *args, **kwargs):
if not hasattr(self, "_object"):
firewall_id = self.kwargs['firewall_id']
try:
self._object = api.fwaas.firewall_get(self.request,
firewall_id)
self._object.set_id_as_name_if_empty()
except Exception:
redirect = self.success_url
msg = _('Unable to retrieve firewall details.')
exceptions.handle(self.request, msg, redirect=redirect)
return self._object
def get_initial(self):
firewall = self._get_object()
initial = firewall.get_dict()
return initial
class InsertRuleToPolicyView(forms.ModalFormView):
form_class = InsertRuleToPolicy
template_name = "project/firewalls/insert_rule_to_policy.html"
context_object_name = 'policy'
success_url = reverse_lazy("horizon:project:firewalls:index")
def get_context_data(self, **kwargs):
context = super(InsertRuleToPolicyV
|
BackupTheBerlios/pixies-svn
|
pixies/reportlab/pdfgen/textobject.py
|
Python
|
gpl-2.0
| 14,031
| 0.005274
|
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfgen/textobject.py
__version__=''' $Id$ '''
__doc__="""
PDFTextObject is an efficient way to add text to a Canvas. Do not
instantiate directly, obtain one from the Canvas instead.
Progress Reports:
8.83, 2000-01-13, gmcm:
created from pdfgen.py
"""
import string
from types import *
from reportlab.lib import colors
from reportlab.lib.colors import ColorType
from reportlab.lib.utils import fp_str
from reportlab.pdfbase import pdfmetrics
_SeqTypes=(TupleType,ListType)
class PDFTextObject:
"""PDF logically separates text and graphics drawing; text
operations need to be bracketed between BT (Begin text) and
ET operators. This class ensures text operations are
properly encapusalted. Ask the canvas for a text object
with beginText(x, y). Do not construct one directly.
Do not use multiple text objects in parallel; PDF is
not multi-threaded!
It keeps track of x and y coordinates relative to its origin."""
def __init__(self, canvas, x=0,y=0):
self._code = ['BT'] #no point in [] then append RGB
self._canvas = canvas #canvas sets this so it has access to size info
self._fontname = self._canvas._fontname
self._fontsize = self._canvas._fontsize
self._leading = self._canvas._leading
font = pdfmetrics.getFont(self._fontname)
self._dynamicFont = getattr(font, '_dynamicFont', 0)
self._curSubset = -1
self.setTextOrigin(x, y)
def getCode(self):
"pack onto one line; used internally"
self._code.append('ET')
return string.join(self._code, ' ')
def setTextOrigin(self, x, y):
if self._canvas.bottomup:
self._code.append('1 0 0 1 %s Tm' % fp_str(x, y)) #bottom up
else:
self._code.append('1 0 0 -1 %s Tm' % fp_str(x, y)) #top down
# The current cursor position is at the text origin
self._x0 = self._x = x
self._y0 = self._y = y
def setTextTransform(self, a, b, c, d, e, f):
"Like setTextOrigin, but does rotation, scaling etc."
if not self._canvas.bottomup:
c = -c #reverse bottom row of the 2D Transform
d = -d
self._code.append('%s Tm' % fp_str(a, b, c, d, e, f))
# The current cursor position is at the text origin Note that
# we aren't keeping track of all the transform on these
# coordinates: they are relative to the rotations/sheers
# defined in the matrix.
self._x0 = self._x = e
self._y0 = self._y = f
def moveCursor(self, dx, dy):
"""Starts a new line at an offset dx,dy from the start of the
current line. This does not move the cursor relative to the
current position, and it changes the current offset of every
future line drawn (i.e. if you next do a textLine() call, it
will move the cursor to a position one line lower than the
position specificied in this call. """
# Check if we have a previous move cursor call, and combine
# them if possible.
if self._code and self._code[-1][-3:]==' Td':
L = string.split(self._code[-1])
if len(L)==3:
del self._code[-1]
else:
self._code[-1] = string.join(L[:-4])
# Work out the last movement
lastDx = float(L[-3])
lastDy = float(L[-2])
# Combine the two movement
dx += lastDx
dy -= lastDy
# We will soon add the movement to the line origin, so if
# we've already done this for lastDx, lastDy, remove it
# first (so it will be right when added back again).
self._x0 -= lastDx
self._y0 -= lastDy
|
# Output the move text cursor call.
self._code.append('%s Td' % fp_str(dx, -dy))
# Keep track of the new line offsets and the cursor position
self._x0 += dx
self._y0 += dy
self.
|
_x = self._x0
self._y = self._y0
def setXPos(self, dx):
"""Starts a new line dx away from the start of the
current line - NOT from the current point! So if
you call it in mid-sentence, watch out."""
self.moveCursor(dx,0)
def getCursor(self):
"""Returns current text position relative to the last origin."""
return (self._x, self._y)
def getStartOfLine(self):
"""Returns a tuple giving the text position of the start of the
current line."""
return (self._x0, self._y0)
def getX(self):
"""Returns current x position relative to the last origin."""
return self._x
def getY(self):
"""Returns current y position relative to the last origin."""
return self._y
def _setFont(self, psfontname, size):
"""Sets the font and fontSize
Raises a readable exception if an illegal font
is supplied. Font names are case-sensitive! Keeps track
of font anme and size for metrics."""
self._fontname = psfontname
self._fontsize = size
font = pdfmetrics.getFont(self._fontname)
self._dynamicFont = getattr(font, '_dynamicFont', 0)
if self._dynamicFont:
self._curSubset = -1
else:
pdffontname = self._canvas._doc.getInternalFontName(psfontname)
self._code.append('%s %s Tf' % (pdffontname, fp_str(size)))
def setFont(self, psfontname, size, leading = None):
"""Sets the font. If leading not specified, defaults to 1.2 x
font size. Raises a readable exception if an illegal font
is supplied. Font names are case-sensitive! Keeps track
of font anme and size for metrics."""
self._fontname = psfontname
self._fontsize = size
if leading is None:
leading = size * 1.2
self._leading = leading
font = pdfmetrics.getFont(self._fontname)
self._dynamicFont = getattr(font, '_dynamicFont', 0)
if self._dynamicFont:
self._curSubset = -1
else:
pdffontname = self._canvas._doc.getInternalFontName(psfontname)
self._code.append('%s %s Tf %s TL' % (pdffontname, fp_str(size), fp_str(leading)))
def setCharSpace(self, charSpace):
"""Adjusts inter-character spacing"""
self._charSpace = charSpace
self._code.append('%s Tc' % fp_str(charSpace))
def setWordSpace(self, wordSpace):
"""Adjust inter-word spacing. This can be used
to flush-justify text - you get the width of the
words, and add some space between them."""
self._wordSpace = wordSpace
self._code.append('%s Tw' % fp_str(wordSpace))
def setHorizScale(self, horizScale):
"Stretches text out horizontally"
self._horizScale = 100 + horizScale
self._code.append('%s Tz' % fp_str(horizScale))
def setLeading(self, leading):
"How far to move down at the end of a line."
self._leading = leading
self._code.append('%s TL' % fp_str(leading))
def setTextRenderMode(self, mode):
"""Set the text rendering mode.
0 = Fill text
1 = Stroke text
2 = Fill then stroke
3 = Invisible
4 = Fill text and add to clipping path
5 = Stroke text and add to clipping path
6 = Fill then stroke and add to clipping path
7 = Add to clipping path"""
assert mode in (0,1,2,3,4,5,6,7), "mode must be in (0,1,2,3,4,5,6,7)"
self._textRenderMode = mode
self._code.append('%d Tr' % mode)
def setRise(self, rise):
"Move text baseline up or down to allow superscrip/subscripts"
self._rise = rise
self._y = self._y - rise # + ? _textLineMatrix?
self._code.append('%s Ts' % fp_str(rise))
def setStrokeColorRGB(self, r, g, b):
self._strokeColorRGB = (r, g, b)
self._code.append('%s RG' % fp_str(r,g,b))
def setF
|
takeshineshiro/nova
|
nova/tests/functional/v3/test_console_auth_tokens.py
|
Python
|
apache-2.0
| 2,711
| 0
|
# Copyright 2013 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from oslo_config import cfg
from oslo_serialization import jsonutils
from nova.tests.functional.v3 import test_servers
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class ConsoleAuth
|
TokensSampleJsonTests(test_servers.ServersSampleBase):
ADMIN_API = True
extension_name = "os-conso
|
le-auth-tokens"
extra_extensions_to_load = ["os-remote-consoles", "os-access-ips"]
# TODO(gmann): Overriding '_api_version' till all functional tests
# are merged between v2 and v2.1. After that base class variable
# itself can be changed to 'v2'
_api_version = 'v2'
def _get_flags(self):
f = super(ConsoleAuthTokensSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.consoles.Consoles')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.console_auth_tokens.'
'Console_auth_tokens')
return f
def _get_console_url(self, data):
return jsonutils.loads(data)["console"]["url"]
def _get_console_token(self, uuid):
response = self._do_post('servers/%s/action' % uuid,
'get-rdp-console-post-req',
{'action': 'os-getRDPConsole'})
url = self._get_console_url(response.content)
return re.match('.+?token=([^&]+)', url).groups()[0]
def test_get_console_connect_info(self):
self.flags(enabled=True, group='rdp')
uuid = self._post_server()
token = self._get_console_token(uuid)
response = self._do_get('os-console-auth-tokens/%s' % token)
subs = self._get_regexes()
subs["uuid"] = uuid
subs["host"] = r"[\w\.\-]+"
subs["port"] = "[0-9]+"
subs["internal_access_path"] = ".*"
self._verify_response('get-console-connect-info-get-resp', subs,
response, 200)
|
khizkhiz/swift
|
utils/split_file.py
|
Python
|
apache-2.0
| 1,030
| 0.000971
|
#!/usr/bin/env python
"""
split_file.py [-o <dir>] <path>
Take the file at <path> and write it to multiple files, switching to a new file
every time an annotation of the form "// BEGIN file1.swift" is encountered. If
<dir> is specified, place the files in <dir>; otherwise, put them in the
current directory.
"""
import getopt
import os
import re
import sys
def usage():
sys.stderr.write(__doc__.strip() + "\n")
sys.exit(1)
|
fp_out = None
dest_dir = '.'
try:
opts, args = getopt.getopt(sys.argv[1:], 'o:h')
for (opt, arg) in opts:
if opt == '-o':
dest_dir = arg
elif opt == '-h':
usage()
except getopt.GetoptError:
usage()
if len(args) != 1:
usage()
fp_in = open(args[0], 'r')
f
|
or line in fp_in:
m = re.match(r'^//\s*BEGIN\s+([^\s]+)\s*$', line)
if m:
if fp_out:
fp_out.close()
fp_out = open(os.path.join(dest_dir, m.group(1)), 'w')
elif fp_out:
fp_out.write(line)
fp_in.close()
if fp_out:
fp_out.close()
|
crazyleen/msp430-gdb-7.2a
|
gdb/testsuite/gdb.python/py-prettyprint.py
|
Python
|
gpl-2.0
| 7,344
| 0.012527
|
# Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This file is part of the GDB testsuite. It tests python pretty
# printers.
import re
# Test returning a Value from a printer.
class string_print:
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['whybother']['contents']
# Test a class-based printer.
class ContainerPrinter:
class _iterator:
def __init__ (self, pointer, len):
self.start = pointer
self.pointer = pointer
self.end = pointer + len
def __iter__(self):
return self
def next(self):
if self.pointer == self.end:
raise StopIteration
result = self.pointer
self.pointer = self.pointer + 1
return ('[%d]' % int (result - self.start), result.dereference())
def __init__(self, val):
self.val = val
def to_string(self):
return 'container %s with %d elements' % (self.val['name'], self.val['len'])
def children(self):
return self._iterator(self.val['elements'], self.val['len'])
# Test a printer where to_string is None
class NoStringContainerPrinter:
class _iterator:
def __init__ (self, pointer, len):
self.start = pointer
self.pointer = pointer
self.end = pointer + len
def __iter__(self):
return self
def next(self):
if self.pointer == self.end:
raise StopIteration
result = self.pointer
self.pointer = self.pointer + 1
return ('[%d]' % int (result - self.start), result.dereference())
def __init__(self, val):
self.val = val
def to_string(self):
return None
def children(self):
return self._iterator(self.val['elements'], self.val['len'])
class pp_s:
def __init__(self, val):
self.val = val
def to_string(self):
a = self.val["a"]
b = self.val["b"]
if a.address != b:
raise Exception("&a(%s) != b(%s)" % (str(a.address), str(b)))
return " a=<" + str(self.val["a"]) + "> b=<" + str(self.val["b"]) + ">"
class pp_ss:
def __init__(self, val):
self.val = val
def to_string(self):
return "a=<" + str(self.val["a"]) + "> b=<" + str(self.val["b"]) + ">"
class pp_sss:
def __init__(self, val):
self.val = val
def to_string(self):
return "a=<" + str(self.val['a']) + "> b=<" + str(self.val["b"]) + ">"
class pp_multiple_virtual:
def __init__ (self, val):
self.val = val
def to_string (self):
return "pp value variable is: " + str (self.val['value'])
class pp_vbase1:
def __init__ (self, val):
self.val = val
def to_string (self):
return "pp class name: " + self.val.type.tag
class pp_nullstr:
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['s'].string(gdb.target_charset())
class pp_ns:
"Print a std::basic_string of some kind"
def __init__(self, val):
self.val = val
def to_string(self):
len = self.val['length']
return self.val['null_str'].string (gdb.target_charset(), length = len)
def display_hint (self):
return 'string'
class pp_ls:
"Print a std::basic_string of some kind"
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['lazy_str'].lazy_string()
def display_hint (self):
return 'string'
class pp_outer:
"Print struct outer"
def __init__ (self, val):
self.val = val
def to_string (self):
return "x = %s" % self.val['x']
def children (self):
yield 's', self.val['s']
yield 'x', self.val['x']
def lookup_function (val):
"Look-up and return a pretty-printer that can print val."
# Get the type.
type = val.type
# If it points to a reference, get the reference.
if type.code == gdb.TYPE_CODE_REF:
type = type.target ()
# Get the unqualified type, stripped of typedefs.
type = type.unqualified ().strip_typedefs ()
# Get the type name.
typename = type.tag
if typename == None:
return None
# Iterate over local dictionary of types to determine
# if a printer is registered for that type. Return an
# instantiation of the printer if found.
for function in pretty_printers_dict:
if function.match (typename):
return pretty_printers_dict[function] (val)
# Cannot find a pretty printer. Return None.
return None
def disable_lookup_function ():
lookup_function.enabled = False
def enable_lookup_function ():
lookup_function.enabled = True
def register_pretty_printers ():
pretty_printers_dict[re.compile ('^struct s$')] = pp_s
pretty_printers_dict[re.compile ('^s$')] = pp_s
pretty_printers_dict[re.compile ('^S$')] = pp_s
pretty_printers_dict[re.compile ('^struct ss$')] = pp_ss
pretty_printers_dict[re.compile ('^ss$')] = pp_ss
pretty_printers_dict[re.compile ('^const S &$')] = pp_s
pretty_printers_dict[re.compile ('^SSS$')] = pp_sss
pretty_printers_dict[re.compile ('^VirtualTest$')] = pp_multiple_virtual
pretty_printers_dict[re.compile ('^Vbase1$')] = pp_vbase1
pretty_printers_dict[re.compile ('^struct nullstr$')] = pp_nullstr
pretty_printers_dict[re.compile ('^nullstr$')] = pp_nullstr
# Note that we purposely omit the typedef names here.
# Printer lookup is based on canonical name.
# However, we do need both tagged and untagged variants, to handle
# both the C and C++ cases.
pretty_printers_dict[re.compile ('^struct string_repr$')] = string_print
pretty_printers_dict[re.com
|
pile ('^struct container$')] = ContainerPrinter
pret
|
ty_printers_dict[re.compile ('^struct justchildren$')] = NoStringContainerPrinter
pretty_printers_dict[re.compile ('^string_repr$')] = string_print
pretty_printers_dict[re.compile ('^container$')] = ContainerPrinter
pretty_printers_dict[re.compile ('^justchildren$')] = NoStringContainerPrinter
pretty_printers_dict[re.compile ('^struct ns$')] = pp_ns
pretty_printers_dict[re.compile ('^ns$')] = pp_ns
pretty_printers_dict[re.compile ('^struct lazystring$')] = pp_ls
pretty_printers_dict[re.compile ('^lazystring$')] = pp_ls
pretty_printers_dict[re.compile ('^struct outerstruct$')] = pp_outer
pretty_printers_dict[re.compile ('^outerstruct$')] = pp_outer
pretty_printers_dict = {}
register_pretty_printers ()
gdb.pretty_printers.append (lookup_function)
|
playandbuild/scrapy-history-middleware
|
history/middleware.py
|
Python
|
mit
| 3,854
| 0.002076
|
from datetime import datetime
from parsedatetime import parsedatetime, Constants
from scrapy import signals
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import NotConfigured, IgnoreRequest
from scrapy.utils.misc import load_object
class HistoryMiddleware(object):
DATE_FORMAT = '%Y%m%d'
def __init__(self, crawler):
self.stats = crawler.stats
settings = crawler.settings
history = settings.get('HISTORY', None)
if not history:
raise NotConfigured()
# EPOCH:
# == False: don't retrieve historical data
# == True : retrieve most recent version
# == datetime(): retrieve next version after datetime()
self.epoch = self.parse_epoch(settings.get('EPOCH', False))
self.retrieve_if = load_object(history.get(
'RETRIEVE_IF', 'history.logic.RetrieveNever'))(settings)
self.store_if = load_object(history.get(
'STORE_IF', 'history.logic.StoreAlways'))(settings)
self.storage = load_object(history.get(
'BACKEND', 'history.storage.S3CacheStorage'))(settings)
self.ignore_missing = settings.getbool('HTTPCACHE_IGNORE_MISSING')
dispatcher.connect(self.spider_opened, signal=signals.spider_opened)
dispatcher.connect(self.spider_closed, signal=signals.spider_closed)
@classmethod
def from_crawler(cls, crawler):
return cls(crawler)
def spider_opened(self, spider):
self.storage.open_spider(spider)
self.store_if.spider_opened(spider)
self.retrieve_if.spider_opened(spider)
def spider_closed(self, spider):
self.storage.close_spider(spider)
self.store_if.spider_closed(spider)
self.retrieve_if.spider_closed(spider)
def process_request(self, request, spider):
"""
A request is approaching the Downloader.
Decide if we would like to intercept the request and supply a
response ourselves.
"""
if self.epoch and self.retrieve_if(spider, request):
request.meta['epoch'] = self.epoch
response = self.storage.retrieve_response(spider, request)
if response:
response.flags.append('historic')
return response
elif self.ignore_missing:
raise IgnoreRequest("Ignored; request not in history: %s" % request)
def process_response(self, request, response, spider):
"""
A response is leaving the Downloader. It was either retreived
from the web or from another middleware.
Decide if we would like to store it in the history.
"""
if self.store_if(spider, request, response):
self.storage.store_response(spider, request, response)
|
self.stats.set_value('history/cached', True, spider=spider)
return response
def parse_epoch(self, epoch):
"""
bool => bool
datetime => datetime
str => datetime
"""
if isinstance(epoch, bool) or isinstance(epoch, datetime):
return epo
|
ch
elif epoch == 'True':
return True
elif epoch == 'False':
return False
try:
return datetime.strptime(epoch, self.DATE_FORMAT)
except ValueError:
pass
parser = parsedatetime.Calendar(Constants())
time_tupple = parser.parse(epoch) # 'yesterday' => (time.struct_time, int)
if not time_tupple[1]:
raise NotConfigured('Could not parse epoch: %s' % epoch)
time_struct = time_tupple[0] #=> time.struct_time(tm_year=2012, tm_mon=4, tm_mday=7, tm_hour=22, tm_min=8, tm_sec=6, tm_wday=5, tm_yday=98, tm_isdst=-1)
return datetime(*time_struct[:6]) #=> datetime.datetime(2012, 4, 7, 22, 8, 6)
|
d120/pyofahrt
|
workshops/migrations/0028_auto_20161110_1931.py
|
Python
|
agpl-3.0
| 795
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-
|
11-10 18:31
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0027_auto_20161110_0311'),
]
operations = [
migrations.AlterField(
model_name='slot',
name='begin',
field=models.DateTimeField(
|
default=datetime.datetime(2016, 11, 11, 0, 0),
verbose_name='Start'),
),
migrations.AlterField(
model_name='slot',
name='end',
field=models.DateTimeField(
default=datetime.datetime(2016, 11, 11, 0, 0),
verbose_name='Ende'),
),
]
|
ANR-COMPASS/shesha
|
shesha/constants.py
|
Python
|
gpl-3.0
| 5,648
| 0.003718
|
## @package shesha.constants
## @brief Numerical constants for shesha and config enumerations for safe-typing
## @author COMPASS Team <https://github.com/ANR-COMPASS>
## @version 5.2.1
## @date 2022/01/24
## @copyright GNU Lesser General Public License
#
# This file is part of COMPASS <https://anr-compass.github.io/compass/>
#
# Copyright (C) 2011-2022 COMPASS Team <https://github.com/ANR-COMPASS>
# All rights reserved.
# Distributed under GNU - LGPL
#
# COMPASS is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser
# General Public License as published by the Free Software Foundation, either version 3 of the License,
# or any later version.
#
# COMPASS: End-to-end AO simulation tool using GPU acceleration
# The COMPASS platform was designed to meet the need of high-performance for the simulation of AO systems.
#
# The final product includes a software package for simulating all the critical subcomponents of AO,
# particularly in the context of the ELT and a real-time core based on several control approaches,
# with performances consistent with its integration into an instrument. Taking advantage of the specific
# hardware architecture of the GPU, the COMPASS tool allows to achieve adequate execution speeds to
# conduct large simulation campaigns called to the ELT.
#
# The COMPASS platform can be used to carry a wide variety of simulations to both testspecific components
# of
|
AO of the E-ELT (such as wavefront analysis device with a pyramid or elongated Laser star), and
# various systems configurations such as multi-conjugate AO.
#
# COMPASS is distrib
|
uted in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with COMPASS.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.txt>.
import numpy as np
from aenum import MultiValueEnum
class CONST:
RAD2ARCSEC = 3600. * 360. / (2 * np.pi)
ARCSEC2RAD = 2. * np.pi / (360. * 3600.)
RAD2DEG = 180. / np.pi
DEG2RAD = np.pi / 180.
def check_enum(cls, name):
"""
Create a safe-type enum instance from bytes contents
"""
if not isinstance(name, str) or \
not name in vars(cls).values():
raise ValueError("Invalid enumeration value for enum %s, value %s" % (cls, name))
return name
class DmType:
"""
Types of deformable mirrors
"""
PZT = 'pzt'
TT = 'tt'
KL = 'kl'
class PatternType:
"""
Types of Piezo DM patterns
"""
SQUARE = 'square'
HEXA = 'hexa'
HEXAM4 = 'hexaM4'
class KLType:
"""
Possible KLs for computations
"""
KOLMO = 'kolmo'
KARMAN = 'karman'
class InfluType:
"""
Influence function types
"""
DEFAULT = 'default'
RADIALSCHWARTZ = 'radialSchwartz'
SQUARESCHWARTZ = 'squareSchwartz'
BLACKNUTT = 'blacknutt'
GAUSSIAN = 'gaussian'
BESSEL = 'bessel'
PETAL = 'petal'
class ControllerType:
"""
Controller types
"""
GENERIC = 'generic'
GENERIC_LINEAR = 'generic_linear'
LS = 'ls'
MV = 'mv'
CURED = 'cured'
GEO = 'geo'
class CommandLawType:
"""
Command law types for generic controller only
"""
INTEGRATOR = 'integrator'
MODAL_INTEGRATOR = 'modal_integrator'
TWO_MATRICES = '2matrices'
class CentroiderType:
"""
Centroider types
"""
COG = 'cog'
TCOG = 'tcog'
WCOG = 'wcog'
BPCOG = 'bpcog'
CORR = 'corr'
PYR = 'pyr'
MASKEDPIX = 'maskedpix'
class CentroiderFctType:
MODEL = 'model'
GAUSS = 'gauss'
class PyrCentroiderMethod:
"""
Pyramid centroider methods
Local flux normalization (eq SH quad-cell, ray optics. Ragazzonni 1996)
Global flux normalization (Verinaud 2004, most > 2010 Pyr applications)
Resulting (A+/-B-/+C-D)/(A+B+C+D) or sin((A+/-B-/+C-D)/(A+B+C+D))
ref. code sutra_centroider_pyr.h
"""
NOSINUSGLOBAL = 0
SINUSGLOBAL = 1
NOSINUSLOCAL = 2
SINUSLOCAL = 3
OTHER = 4
class WFSType:
"""
WFS Types
"""
SH = 'sh'
PYRHR = 'pyrhr'
PYRLR = 'pyrlr'
class TargetImageType:
"""
Target Images
"""
SE = 'se'
LE = 'le'
class ApertureType:
"""
Telescope apertures
"""
GENERIC = 'Generic'
EELT_NOMINAL = 'EELT-Nominal' # Alexis Carlotti method
EELT = 'EELT' # E. Gendron method
EELT_BP1 = 'EELT-BP1'
EELT_BP3 = 'EELT-BP3'
EELT_BP5 = 'EELT-BP5'
EELT_CUSTOM = 'EELT-Custom'
VLT = 'VLT'
KECK = 'keck'
class SpiderType:
"""
Spiders
"""
FOUR = 'four'
SIX = 'six'
class ProfType:
"""
Sodium profile for LGS
"""
GAUSS1 = 'Gauss1'
GAUSS2 = 'Gauss2'
GAUSS3 = 'Gauss3'
EXP = 'Exp'
MULTIPEAK = 'Multipeak'
FILES = dict({
GAUSS1: "allProfileNa_withAltitude_1Gaussian.npy",
GAUSS2: "allProfileNa_withAltitude_2Gaussian.npy",
GAUSS3: "allProfileNa_withAltitude_3Gaussian.npy",
EXP: "allProfileNa_withAltitude.npy",
MULTIPEAK: "multipeakProfileNa_withAltitude.npy"
})
class FieldStopType:
"""
WFS field stop
"""
SQUARE = 'square'
ROUND = 'round'
class PupilType(MultiValueEnum):
"""Compass pupil enumeration
"""
SPUPIL = "spupil", "s"
MPUPIL = "mpupil", "m"
IPUPIL = "ipupil", "i"
|
ufieeehw/IEEE2015
|
ros/ieee2015_simulator/nodes/mecanum_simulation.py
|
Python
|
gpl-2.0
| 7,968
| 0.009538
|
#!/usr/bin/python
from __future__ import division
## Math
import numpy as np
import math
## Display
import pygame
import time
## Ros
import rospy
import os, sys
from geometry_msgs.msg import Twist, Pose, PoseStamped, Point, Quaternion
from tf import transformations as tf_trans
from std_msgs.msg import Header
from ieee2015_msgs.msg import Mecanum
from xmega_connector.srv import *
#constants
SCREEN_DIM = (700, 350)
fpath = os.path.dirname(os.path.realpath(__file__))
background = pygame.image.load(os.path.join(fpath, "stage.jpg"))
background = pygame.transform.scale(background, SCREEN_DIM)
rect = background.get_rect()
PXL_PER_METER = 50
#3779.527559055 #Change the number to the correct value!
"""Dimensions of the IEEE Competition
Course:
4 ft. x 8 ft.
1.2192 m x 1.8288 m
Robot:
1 ft. x 1 ft.
0.3048 m x 0.3048 m"""
dt = .5
radius = 10
ORIGIN = np.array([SCREEN_DIM[0]/2.0, SCREEN_DIM[1]/2.0])
## Function to calculate the cross-torque of an array of velocities (top left, top right, bottom left, bottom right)
def crosstorque(velocity):
forcex = 0.0
forcey = 0.0
forcex += velocity[0] * np.sin(np.pi / 4)
forcex += velocity[1] * np.sin(np.pi / 4)
forcex += velocity[2] * np.sin(np.pi / 4)
forcex += velocity[3] * np.sin(np.pi / 4)
# N = kg m/s^2 = m/s * (r)
# M_i =
forcey += velocity[0] * np.sin(np.pi / 4)
forcey -= velocity[1] * np.sin(np.pi / 4)
forcey += velocity[2] * np.sin(np.pi / 4)
forcey -= velocity[3] * np.sin(np.pi / 4)
return np.array([forcex, forcey])
## Class to define a robot object that moves with velocity, acceleration, and force
class Robot(object):
def __init__(self, (x, y), height, width):
self.position = np.array([x, y], dtype=np.float32)
self.position += [0, (150 - height)]
self.velocity = np.array([0, 0], dtype=np.float32)
self.acceleration = np.array([0, 0], dtype=np.float32)
self.force = np.array([0, 0], dtype=np.float32)
# Implementation for rotation of object
self.angle = 0
self.omega = 0
self.alpha = 0
self.mecanum = np.array([0, 0, 0, 0], dtype=np.float32)
rospy.init_node('simulation', anonymous=True)
|
self.pose_pub = rospy.Publisher('pose', PoseStamped, queue_size=10)
self.height = height
self.width = width
a = +(self.width / 2)
b = -(self.width / 2)
c = +(self.height / 2)
d = -(self.height / 2)
self.pointlist = map(lambda vector: np.array(vector, dtype=np.float32), [[b, d], [a, d], [a, c], [b, c]])
rospy.Service('/xmega_connector/set_wheel_speeds', SetW
|
heelSpeeds, self.set_wheel_speed_service)
def set_wheel_speed_service(self, ws_req):
if abs(ws_req.wheel1) > .00000000001 and abs(ws_req.wheel2) > .00000000000001 and abs(ws_req.wheel3) > .00000000000001 and abs(ws_req.wheel4) > .00000000000001:
self.mecanum[0] = ws_req.wheel1
self.mecanum[1] = ws_req.wheel2
self.mecanum[2] = ws_req.wheel3
self.mecanum[3] = ws_req.wheel4
else:
self.mecanum[0] = 0
self.mecanum[1] = 0
self.mecanum[2] = 0
self.mecanum[3] = 0
#print('Wheel speeds set')
return SetWheelSpeedsResponse()
def update(self):
self.publish_pose()
# Update velocity and position
#self.position[0] += self.velocity[0] * dt
#self.position[1] += self.velocity[1] * dt
#self.velocity += self.acceleration * dt
self.position += self.force * dt
#self.acceleration = self.force
# Update rotation of object
self.angle += self.omega * dt
self.omega += self.alpha * dt
self.force = crosstorque(self.mecanum)
# Makes sure the object stays in the window
if self.position[0] + (self.width / 2) >= 700 or self.position[0] - (self.width / 2) <= 0:
self.velocity[0] *= -1
if self.position[1] + (self.height / 2) >= 350 or self.position[1] - (self.height / 2) <= 0:
self.velocity[1] *= -1
def rotate(self):
# Rotates the object by some angle, in degrees, clockwise
radangle = math.radians(self.angle)
rotmatrix = np.matrix([[math.cos(radangle), -math.sin(radangle)], [math .sin(radangle), math.cos(radangle)]])
templist = []
"""one = np.matrix([
[1, 0, 50],
[0, 1, 0],
[0, 0, 1]])
two = np.matrix([[math.cos(radangle), -math.sin(radangle), 0], [math.sin(radangle), math.cos(radangle), 0], [0, 0, 1]])
three = np.matrix([
[1, 0, -50],
[0, 1, 0],
[0, 0, 1]])
rotmatrix = one * two * three"""
for point in self.pointlist:
matpoint = np.matrix(point).T
matpoint = rotmatrix * matpoint
point = np.array(matpoint.T)[0]
templist.append(point)
self.pointlist = templist
def publish_pose(self):
'''Publish Pose'''
_orientation = tf_trans.quaternion_from_euler(0, 0, self.angle)
self.pose_pub.publish(
PoseStamped(
header = Header(
stamp=rospy.Time.now(),
frame_id='/world',
),
pose = Pose(
position = Point(self.position[0], self.position[1], 0.0),
orientation = Quaternion(*_orientation) #Radians
)
)
)
def draw(self, display):
# polygon(Surface, color, pointlist, width=0) -> Rect
# pointlist = [(1, 2), (7, 9), (21, 50)]
"""roundedlist = []
for point in self.pointlist:
roundedlist.append(round_point(point + self.position))
print roundedlist"""
"""Change position coordinates from meters to pixels."""
listInMeters = (self.position + self.pointlist)
display.blit(background, (0,0))
pygame.draw.polygon(display, (0,255,0), listInMeters, 0)
class World:
def __init__(self, Robot, waypoints):
self.Robot = Robot
self.waypoints = waypoints
def main():
pygame.init()
display = pygame.display.set_mode(SCREEN_DIM)
background.convert()
dimensions = (background.get_width(), background.get_height())
clock = pygame.time.Clock()
Rob = Robot((50, 50), 50, 50)
while not rospy.is_shutdown():
for event in pygame.event.get():
if event.type == pygame.QUIT:
return
if event.type == pygame.MOUSEBUTTONDOWN:
Rob.desired_pose_pub = rospy.Publisher('desired_pose', PoseStamped, queue_size=10)
pt = pygame.mouse.get_pos()
# Publish this coordinate in meters as the desired pose
print 'publishing desired pose'
# _orientation = tf_trans.quaternion_from_euler(0, 0, Rob.angle)
_orientation = tf_trans.quaternion_from_euler(0.0, 0.0, 0.0)
Rob.desired_pose_pub.publish(
PoseStamped(
header = Header(
stamp=rospy.Time.now(),
frame_id='/world'
),
pose = Pose(
# Update the position to reflect meters per second, not pixels
position = Point(pt[0], pt[1], 0.0),
orientation = Quaternion(*_orientation) #Radians
)
)
)
Rob.rotate()
Rob.draw(display)
Rob.update()
pygame.display.update()
clock.tick(20)
display.fill((0, 0, 0))
if __name__ == '__main__':
main()
|
powerds/python-tacoclient
|
tacoclient/shell.py
|
Python
|
apache-2.0
| 904
| 0.002212
|
import sys
from cliff import app
from cliff import commandmanager as cm
from conf import default
import tacoclient
class TacoClientApp(app.App):
def __init__(self, **kwargs):
super(TacoClientApp, self).__init__(
description='tacoclient - CLI client for TACO(SKT All Container \
Openstack)',
version=tacoclient.__version__,
command_manager=cm.CommandManager('tacoclient'),
**kwargs)
def build_option_parser(self, description, version, argparse_kwargs=None):
parser = super(TacoClientApp, self).build_option_parser(
description, version, argparse_kwargs)
return parser
def
|
configure_logging(self):
super(TacoClientApp, self).configure_logging()
default.register_opts()
def main(argv=None)
|
:
if argv is None:
argv = sys.argv[1:]
return TacoClientApp().run(argv)
|
jalr/privacyidea
|
privacyidea/lib/eventhandler/base.py
|
Python
|
agpl-3.0
| 2,637
| 0
|
# -*- coding: utf-8 -*-
#
# 2016-05-04 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Initial writup
#
# License: AGPLv3
# (c) 2016. Cornelius Kölbel
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
__doc__ = """This is the base class for an event handler module.
The event handler module is bound to an event together with
* a condition and
* an action
* optional options ;-)
"""
import logging
log = logging.getLogger(__name__)
class BaseEventHandler(object):
"""
An Eventhandler needs to return a list of actions, which it can handle.
It also returns a list of allowed action and conditions
It returns an identifier, which can be used in the eventhandlig definitions
"""
identifier = "BaseEventHandler"
description = "This is the base class of an EventHandler with no " \
"functionality"
def __init__(self):
pass
@property
def actions(cls):
"""
This method returns a list of available actions, that are provided
by this event handler.
:return: list of actions
"""
actions = ["sample_action_1", "sample_action_2"]
return actions
@property
def events(cls):
"""
This method returns a list allowed events, that this event handler
can be bound to and which it can handle with the corresponding actions.
An eventhandler may return an asterisk ["*"] indicating, that it can
|
be used in all events.
:return: list of events
"""
events = ["*"]
return events
def check_condition(self):
"""
TODO
:return:
"""
# TODO
return True
def do(self, action, options=None):
"""
This method executes the defined action in the given event.
:param action:
:param options:
|
:return:
"""
log.info("In fact we are doing nothing, be we presume we are doing"
"{0!s}".format(action))
return True
|
MediaKraken/MediaKraken_Deployment
|
source/web_app_sanic/blueprint/user/bp_user_metadata_periodical.py
|
Python
|
gpl-3.0
| 4,831
| 0.006417
|
from common import common_global
from common import common_isbn
from common import common_logging_elasticsearch_httpx
from common import common_pagination_bootstrap
from sanic import Blueprint
blueprint_user_metadata_periodical = Blueprint('name_blueprint_user_metadata_periodical',
url_prefix='/user')
@blueprint_user_metadata_periodical.route('/user_meta_periodical', methods=['GET', 'POST'])
@common_global.jinja_template.template('bss_user/metadata/bss_user_metadata_periodical.html')
@common_global.auth.login_required
async def url_bp_user_metadata_periodical(request):
"""
Display periodical list page
"""
page, offset = common_pagination_bootstrap.com_pagination_page_calc(request)
item_list = []
db_connection = await request.app.db_pool.acquire()
for item_data in await request.app.db_functions.db_meta_periodical_list(offset,
int(request.ctx.session[
'per_page']),
request.ctx.session[
'search_text'],
db_connection=db_connection):
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'person data': item_data})
item_image = "img/missing_icon.jpg"
item_list.append((item_data['mm_metadata_book_guid'],
item_data['mm_metadata_book_name'], item_image))
request.ctx.session['search_page'] = 'meta_periodical'
pagination = common_pagination_bootstrap.com_pagination_boot_html(page,
url='/user/user_meta_periodical',
item_count=await request.app.db_functions.db_meta_periodical_list_count(
request.ctx.session[
'search_text'],
db_connection=db_connection),
client_items_per_page=
int(request.ctx.session[
'per_page']),
format_number=True)
await request.app.db_pool.release(db_connection)
return {
'media_person': item_list,
'pagination_links': pagination,
}
@blueprint_user_metadata_periodical.route('/user_meta_periodical_detail/<guid>')
@common_global.jinja_template.template('bss_user/metadata/bss_user_metadata_periodical_detail.
|
html')
@common_global.auth.login_required
async def url_bp_user_metadata_periodical_detail(request, guid):
"""
Display periodical detail page
"""
db_connection = await request.app.db_pool.acquire()
json_metadata = await request.app.db_functions.db_meta_periodical_by_uuid(guid,
db_connection=db_connection)
aw
|
ait request.app.db_pool.release(db_connection)
try:
data_name = json_metadata['mm_metadata_book_json']['title']
except KeyError:
data_name = 'NA'
try:
data_isbn = common_isbn.com_isbn_mask(json_metadata['mm_metadata_book_json']['isbn10'])
except KeyError:
data_isbn = 'NA'
try:
data_overview = json_metadata['mm_metadata_book_json']['summary']
except KeyError:
data_overview = 'NA'
try:
data_author = json_metadata['mm_metadata_book_json']['author_data'][0]['name']
except KeyError:
data_author = 'NA'
try:
data_publisher = json_metadata['mm_metadata_book_json']['publisher_name']
except KeyError:
data_publisher = 'NA'
try:
data_pages = json_metadata['mm_metadata_book_json']['physical_description_text']
except KeyError:
data_pages = 'NA'
return {
'data_name': data_name,
'data_isbn': data_isbn,
'data_overview': data_overview,
'data_author': data_author,
'data_publisher': data_publisher,
'data_pages': data_pages,
'data_item_image': "img/missing_icon.jpg",
}
|
cshallue/models
|
research/cognitive_planning/embedders.py
|
Python
|
apache-2.0
| 19,820
| 0.006105
|
# Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Interface for different embedders for modalities."""
import abc
import numpy as np
import tensorflow as tf
import preprocessing
from tensorflow.contrib.slim.nets import resnet_v2
slim = tf.contrib.slim
class Embedder(object):
"""Represents the embedder for different modalities.
Modalities can be semantic segmentation, depth channel, object detection and
so on, which require specific embedder for them.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def build(self, observation):
"""Builds the model to embed the observation modality.
Args:
observation: tensor that contains the raw observation from modality.
Returns:
Embedding tensor for the given observation tensor.
"""
raise NotImplementedError(
'Needs to be implemented as part of Embedder Interface')
class DetectionBoxEmbedder(Embedder):
"""Represents the model that encodes the detection boxes from images."""
def __init__(self, rnn_state_size, scope=None):
self._rnn_state_size = rnn_state_size
self._scope = scope
def build(self, observations):
"""Builds the model to embed object detection observations.
Args:
observations: a tuple of (dets, det_num).
dets is a tensor of BxTxLxE that has the detection boxes in all the
images of the batch. B is the batch size, T is the maximum length of
episode, L is the maximum number of detections per image in the batch
and E is the size of each detection embedding.
det_num is a tensor of BxT that contains the number of detected boxes
each image of each sequence in the batch.
Returns:
For each image in the batch, returns the accumulative embedding of all the
detection boxes in that image.
"""
with tf.variable_scope(self._scope, default_name=''):
shape = observations[0].shape
dets = tf.reshape(observations[0], [-1, shape[-2], shape[-1]])
det_num = tf.reshape(observations[1], [-1])
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(self._rnn_state_size)
batch_size = tf.shape(dets)[0]
lstm_outputs, _ = tf.nn.dynamic_rnn(
cell=lstm_cell,
inputs=dets,
sequence_length=det_num,
initial_state=lstm_cell.zero_state(batch_size, dtype=tf.float32),
dtype=tf.float32)
# Gathering the last state of each sequence in the batch.
batch_range = tf.range(batch_size)
indices = tf.stack([batch_range, det_num - 1], axis=1)
last_lstm_outputs = tf.gather_nd(lstm_outputs, indices)
last_lstm_outputs = tf.reshape(last_lstm_outputs,
[-1, shape[1], self._rnn_state_size])
return last_lstm_outputs
class ResNet(Embedder):
"""Residual net embedder for image data."""
def __init__(self, params, *args, **kwargs):
super(ResNet, self).__init__(*args, **kwargs)
self._params = params
self._extra_train_ops = []
def build(self, images):
shape = images.get_shape().as_list()
if len(shape) == 5:
images = tf.reshape(images,
[shape[0] * shape[1], shape[2], shape[3], shape[4]])
embedding = self._build_model(images)
if len(shape) == 5:
embedding = tf.reshape(embedding, [shape[0], shape[1], -1])
return embedding
@property
def extra_train_ops(self):
return self._extra_train_ops
def _build_model(self, images):
"""Builds the model."""
# Convert images to floats and normalize them.
images = tf.to_float(images)
bs = images.get_shape().as_list()[0]
images = [
tf.image.per_image_standardization(tf.squeeze(i))
for i in tf.split(images, bs)
]
images = tf.concat([tf.expand_dims(i, axis=0) for i in images], axis=0)
with tf.variable_scope('init'):
x = self._conv('init_conv', images, 3, 3, 16, self._stride_arr(1))
strides = [1, 2, 2]
activate_before_residual = [True, False, False]
if self._params.use_bottleneck:
res_func = self._bottleneck_residual
filters = [16, 64, 128, 256]
else:
res_func = self._residual
filters = [16, 16, 32, 128]
with tf.variable_scope('unit_1_0'):
x = res_func(x, filters[0], filters[1], self._stride_arr(strides[0]),
activate_before_residual[0])
for i in xrange(1, self._params.num_residual_units):
with tf.variable_scope('unit_1_%d' % i):
x = res_func(x, filters[1], filters[1], self._stride_arr(1), False)
with tf.variable_scope('unit_2_0'):
x = res_func(x, filters[1], filters[2], self._stride_arr(strides[1]),
activate_before_residual[1])
for i in xrange(1, self._params.num_residual_units):
with tf.variable_scope('unit_2_%d' % i):
x = res_func(x, filters[2], filters[2], self._stride_arr(1), False)
with tf.variable_scope('unit_3_0'):
x = res_func(x, filters[2], filters[3], self._stride_arr(strides[2]),
activate_before_residual[2])
for i in xrange(1, self._params.num_residual_units):
with tf.variable_scope('unit_3_%d' % i):
x = res_func(x, filters[3], filters[3], self._stride_arr(1), False)
with tf.variable_scope('unit_last'):
x = self._batch_norm('final_bn', x)
x = self._relu(x, self._params.relu_leakiness)
with tf.variable_scope('pool_logit'):
x = self._global_avg_pooling(x)
return x
def _stride_arr(self, stride):
return [1, stride, stride, 1]
def _batch_norm(self, name, x):
"""batch norm implementation."""
with tf.variable_scope(name):
params_shape = [x.shape[-1]]
beta = tf.get_variable(
'beta',
params_shape,
tf.float32,
initializer=tf.constant_initializer(0.0, tf.float32))
gamma = tf.get_variable(
'gamma',
params_shape,
tf.float32,
initializer=tf.constant_initializer(1.0, tf.float32))
if self._par
|
ams.is_train:
mean, variance = tf.nn.moments(x, [0, 1, 2], name='moments')
moving_mean = tf.get_variable(
'moving_mean',
params_shape,
tf.float32,
initializer=tf.constant_initializer(0.0, tf.float32),
trainable=False)
moving_variance = tf.get_variable(
'moving_variance',
params_shape,
t
|
f.float32,
initializer=tf.constant_initializer(1.0, tf.float32),
trainable=False)
self._extra_train_ops.append(
tf.assign_moving_average(moving_mean, mean, 0.9))
self._extra_train_ops.append(
tf.assign_moving_average(moving_variance, variance, 0.9))
else:
mean = tf.get_variable(
'moving_mean',
params_shape,
tf.float32,
initializer=tf.constant_initializer(0.0, tf.float32),
trainable=False)
variance = tf.get_variable(
'moving_variance',
params_shape,
tf.float32,
initializer=tf.constant_initializer(1.0, tf.float32),
trainable=False)
tf.summary.histogram(mean.op.name, mean)
tf.summary.histogram(variance.op.name, variance)
# elipson used to be 1e-5. Maybe 0.001 solves NaN problem in deeper net.
y = tf.nn.batch_normalization(x, mean, variance, beta, gamma, 0.001)
y.set_shape(x.shape)
return y
def _residual(self,
x,
in_filter,
o
|
googleapis/python-dataplex
|
samples/generated_samples/dataplex_v1_generated_dataplex_service_delete_task_sync.py
|
Python
|
apache-2.0
| 1,521
| 0.000657
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteTask
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dataplex
|
# [START dataplex_v1_
|
generated_DataplexService_DeleteTask_sync]
from google.cloud import dataplex_v1
def sample_delete_task():
# Create a client
client = dataplex_v1.DataplexServiceClient()
# Initialize request argument(s)
request = dataplex_v1.DeleteTaskRequest(
name="name_value",
)
# Make the request
operation = client.delete_task(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END dataplex_v1_generated_DataplexService_DeleteTask_sync]
|
Venturi/cms
|
env/lib/python2.7/site-packages/aldryn_people/south_migrations/0013_auto__add_field_person_vcard_enabled.py
|
Python
|
gpl-2.0
| 14,400
| 0.007917
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Person.vcard_enabled'
db.add_column(u'aldryn_people_person', 'vcard_enabled',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Person.vcard_enabled'
db.delete_column(u'aldryn_people_person', 'vcard_enabled')
models = {
u'aldryn_people.group': {
'Meta': {'object_name': 'Group'},
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'aldryn_people.grouptranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'GroupTranslation', 'db_table': "u'aldryn_people_group_translation'"},
'company_description': ('djangocms_text_ckeditor.fields.HTMLField', [], {'blank': 'True'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['aldryn_people.Group']"})
},
u'aldryn_people.peopleplugin': {
'Meta': {'object_name': 'PeoplePlugin', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'group_by_group': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'people': ('sortedm2m.fields.SortedManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['aldryn_people.Person']", 'null': 'True', 'blank': 'True'}),
'show_links': ('django.db.models.fields.BooleanField', [], {'def
|
ault': 'False'}),
'style': ('django.db.models.fields.CharField', [], {'default': "'standard'", 'max_length': '50'})
},
u'aldryn_people.person': {
'Meta': {'object_name': 'Person'},
'email': ('django.db.models.fields.Em
|
ailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['aldryn_people.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'vcard_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'visual': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['filer.Image']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'aldryn_people.persontranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'PersonTranslation', 'db_table': "u'aldryn_people_person_translation'"},
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'function': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['aldryn_people.Person']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields
|
hongquan/saleor
|
saleor/product/models/products.py
|
Python
|
bsd-3-clause
| 1,423
| 0
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import pgettext_lazy
from .base import Product
from .variants import (ProductVariant, PhysicalProduct, ColoredVariant,
StockedProduct)
class Bag(PhysicalProduct, Product, ColoredVariant):
class Meta:
app_label = 'product'
class Shirt(PhysicalProduct, Product, ColoredVariant):
class Meta:
app_label = 'product'
class BagVariant(ProductVariant, StockedProduct):
product = models.ForeignKey(Bag, related_name='variants')
class Meta:
|
app_label = 'product'
@python
|
_2_unicode_compatible
class ShirtVariant(ProductVariant, StockedProduct):
SIZE_CHOICES = (
('xs', pgettext_lazy('Variant size', 'XS')),
('s', pgettext_lazy('Variant size', 'S')),
('m', pgettext_lazy('Variant size', 'M')),
('l', pgettext_lazy('Variant size', 'L')),
('xl', pgettext_lazy('Variant size', 'XL')),
('xxl', pgettext_lazy('Variant size', 'XXL')))
product = models.ForeignKey(Shirt, related_name='variants')
size = models.CharField(
pgettext_lazy('Variant field', 'size'), choices=SIZE_CHOICES,
max_length=3)
class Meta:
app_label = 'product'
def __str__(self):
return '%s (%s)' % (self.product.name, self.size)
|
jmlong1027/multiscanner
|
analytics/ssdeep_analytics.py
|
Python
|
mpl-2.0
| 10,551
| 0.001516
|
#!/usr/bin/env python
'''
Set of analytics based on ssdeep hash.
- compare
Simple implementation of ssdeep comparisions using a few optimizations
described at the links below
https://www.virusbulletin.com/virusbulletin/2015/11/optimizing-ssdeep-use-scale
http://www.intezer.com/intezer-community-tip-ssdeep-comparisons-with-elasticsearch/
Designed to be run on a regular basis (e.g., nightly).
For each sample that has not run ssdeep analytic, search for samples where
ssdeep.compare > 0 based on chunksize, chunk 7grams, and double-chunk
7grams. Update sample with any matches and mark ssdeep analytic as having
run.
- group
Returns SHA256 hashes of samples grouped based on ssdeep hash.
'''
import argparse
import configparser
import json
import os
import sys
from pprint import pprint
import ssdeep
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
if MS_WD not in sys.path:
sys.path.insert(0, os.path.join(MS_WD))
import common
import elasticsearch_storage
import multiscanner
class SSDeepAnalytic:
def __init__(self, debug=False):
storage_conf = multiscanner.common.get_config_path(multiscanner.CONFIG, 'storage')
config_object = configparser.SafeConfigParser()
config_object.optionxform = str
config_object.read(storage_conf)
conf = common.parse_config(config_object)
storage_handler = multiscanner.storage.StorageHandler(configfile=storage_conf)
es_handler = None
for handler in storage_handler.loaded_storage:
if isinstance(handler, elasticsearch_storage.ElasticSearchStorage):
es_handler = handler
break
if not es_handler:
print('[!] ERROR: This analytic only works with ES stroage module.')
sys.exit(0)
# probably not ideal...
self.es = es_handler.es
self.index = conf['ElasticSearchStorage']['index']
self.doc_type = 'sample'
self.debug = debug
def ssdeep_compare(self):
# get all of the samples where ssdeep_compare has not been run
# e.g., ssdeepmeta.analyzed == false
query = {
'_source': ['ssdeep', 'SHA256'],
'query': {
'bool': {
'must': [
{'match': {'ssdeep.analyzed': 'false'}}
]
}
}
}
page = self.es.search(
self.index,
scroll='2m',
size=1000,
body=query)
records_list = []
while len(page['hits']['hits']) > 0:
for hit in page['hits']['hits']:
records_list.append(hit)
sid = page['_scroll_id']
page = self.es.scroll(scroll_id=sid, scroll='2m')
for new_ssdeep_hit in records_list:
new_ssdeep_hit_src = new_ssdeep_hit.get('_source')
chunksize = new_ssdeep_hit_src.get('ssdeep').
|
get('chunksize')
chunk = new_ssdeep_hit_src.get('ssdeep').get('chunk')
double_chunk = new
|
_ssdeep_hit_src.get('ssdeep').get('double_chunk')
new_sha256 = new_ssdeep_hit_src.get('SHA256')
# build new query for docs that match our optimizations
# https://github.com/intezer/ssdeep-elastic/blob/master/ssdeep_elastic/ssdeep_querying.py#L35
opti_query = {
'_source': ['ssdeep', 'SHA256'],
'query': {
'bool': {
'must': [
{
'terms': {
'ssdeep.chunksize': [chunksize, chunksize / 2, chunksize * 2]
}
},
{
'bool': {
'should': [
{
'match': {
'ssdeep.chunk': {
'query': chunk
}
}
},
{
'match': {
'ssdeep.double_chunk': {
'query': double_chunk
}
}
}
],
'minimum_should_match': 1
}
},
{
'bool': {
'must_not': {
'match': {
'SHA256': new_sha256
}
}
}
}
]
}
}
}
# this bool condition isn't working how I expect
# if we have already updated the match dictionary to
# include a hit, don't rerun it for the inverse
# {
# 'bool': {
# 'must_not': {
# 'exists': {
# 'field': 'ssdeep.matches.' + new_sha256
# }
# }
# }
# }
opti_page = self.es.search(
self.index,
scroll='2m',
size=1000,
body=opti_query)
while len(opti_page['hits']['hits']) > 0:
# for each hit, ssdeep.compare != 0; update the matches
for opti_hit in opti_page['hits']['hits']:
opti_hit_src = opti_hit.get('_source')
opti_sha256 = opti_hit_src.get('SHA256')
result = ssdeep.compare(
new_ssdeep_hit_src.get('ssdeep').get('ssdeep_hash'),
opti_hit_src.get('ssdeep').get('ssdeep_hash'))
if self.debug:
print(
new_ssdeep_hit_src.get('SHA256'),
opti_hit_src.get('SHA256'),
result)
msg = {'doc': {'ssdeep': {'matches': {opti_sha256: result}}}}
self.es.update(
index=self.index,
doc_type=self.doc_type,
id=new_ssdeep_hit.get('_id'),
body=json.dumps(msg))
msg = {'doc': {'ssdeep': {'matches': {new_sha256: result}}}}
self.es.update(
index=self.index,
doc_type=self.doc_type,
id=opti_hit.get('_id'),
body=json.dumps(msg))
opti_sid = opti_page['_scroll_id']
opti_page = self.es.scroll(scroll_id=opti_sid, scroll='2m')
# analytic has run against sample, set ssdeep.analyzed = true
msg = {'doc': {'ssdeep': {'analyzed': 'true'}}}
self.es.update(
index=self.index,
doc_type=self.doc_type,
id=new_ssdeep_hit.get('_id'),
body=json.dumps(msg))
def ssdeep_group(self):
# get all of the samples where ssdeep_compare has not been run
# e.g., ssdeepmeta.analyzed == false
query = {
'_source': ['ssdeep', 'SHA256'],
|
RDFLib/PyRDFa
|
pyRdfa/rdfs/cache.py
|
Python
|
bsd-3-clause
| 15,733
| 0.038075
|
# -*- coding: utf-8 -*-
"""
Managing Vocab Caching.
@summary: RDFa parser (distiller)
@requires: U{RDFLib<http://rdflib.net>}
@organization: U{World Wide Web Consortium<http://www.w3.org>}
@author: U{Ivan Herman<a href="http://www.w3.org/People/Ivan/">}
@license: This software is available for use under the
U{W3C® SOFTWARE NOTICE AND LICENSE<href="http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231">}
"""
import os, sys, datetime, re
import rdflib
from rdflib import URIRef
from rdflib import Literal
from rdflib import BNode
from rdflib import Namespace
if rdflib.__version__ >= "3.0.0" :
from rdflib import RDF as ns_rdf
from rdflib import RDFS as ns_rdfs
from rdflib import Graph
else :
from rdflib.RDFS import RDFSNS as ns_rdfs
from rdflib.RDF import RDFNS as ns_rdf
from rdflib.Graph import Graph
import urllib, urlparse, urllib2
from pyRdfa import HTTPError, RDFaError
from pyRdfa.host import MediaTypes, HostLanguage
from pyRdfa.utils import create_file_name, URIOpener, quote_URI
from pyRdfa.options import Options
from pyRdfa import ns_rdfa
from pyRdfa.rdfs import err_outdated_cache
from pyRdfa.rdfs import err_unreachable_vocab
from pyRdfa.rdfs import err_unparsable_Turtle_vocab
from pyRdfa.rdfs import err_unparsable_xml_vocab
from pyRdfa.rdfs import err_unparsable_ntriples_vocab
from pyRdfa.rdfs import err_unparsable_rdfa_vocab
from pyRdfa.rdfs import err_unrecognised_vocab_type
from pyRdfa.rdfs import VocabCachingInfo
# Regular expression object for a general XML application media type
xml_application_media_type = re.compile("application/[a-zA-Z0-9]+\+xml")
from pyRdfa.utils import URIOpener
#===========================================================================================
import cPickle as pickle
# Protocol to be used for pickle files. 0 is good for debug, it stores the data in ASCII; 1 is better for deployment,
# it stores data in binary format. Care should be taken for consistency; when changing from 0 to 1 or back, all
# cached data should be removed/regenerated, otherwise mess may occur...
_Pickle_Protocol = 1
# If I could rely on python 2.5 or 2.6 (or higher) I could use the with...as... idiom for what is below, it
# is indeed nicer. But I cannot...
def _load(fname) :
"""
Load a cached file and return the resulting object
@param fname: file name
"""
try :
f = open(fname)
return pickle.load(f)
finally :
f.close()
def _dump(obj, fname) :
"""
Dump an object into cached file
@param obj: Python object to store
@param fname: file name
"""
try :
f = open(fname, "w")
pickle.dump(obj, f, _Pickle_Protocol)
f.flush()
finally :
f.close()
#===========================================================================================
class CachedVocabIndex :
"""
Class to manage the cache index. Takes care of finding the vocab directory, and manages the index
to the individual vocab data.
The vocab directory is set to a platform specific area, unless an environment variable
sets it explicitly. The environment variable is "PyRdfaCacheDir"
Every time the index is changed, the index is put back (via pickle) to the directory.
@ivar app_data_dir: directory for the vocabulary cache directory
@ivar index_fname: the full path of the index file on the disc
@ivar indeces: the in-memory version of the index (a directory mapping URI-s to tuples)
@ivar options: the error handler (option) object to send warnings to
@type options: L{options.Options}
@ivar report: whether details on the caching should be reported
@type report: Boolean
@cvar vocabs: File name used for the index in the cache directory
@cvar preference_path: Cache directories for the three major platforms (ie, mac, windows, unix)
@type preference_path: directory, keyed by "mac", "win", and "unix"
@cvar architectures: Various 'architectures' as returned by the python call, and their mapping on one of the major platforms. If an architecture is missing, it is considered to be "unix"
@type architectures: directory, mapping architectures to "mac", "win", or "unix"
"""
# File Name used for the index in the cache directory
vocabs = "cache_index"
# Cache directories for the three major platforms...
preference_path = {
"mac" : "Library/Application Support/pyRdfa-cache",
"win" : "pyRdfa-cache",
"unix" : ".pyRdfa-cache"
}
# various architectures as returned by the python call, and their mapping on platorm. If an architecture is not here, it is considered as unix
architectures = {
"darwin" : "mac",
"nt" : "win",
"win32" : "win",
"cygwin" : "win"
}
def __init__(self, options = None) :
"""
@param options: the error handler (option) object to send warnings to
@type options: L{options.Options}
"""
self.options = options
self.report = (options != None) and options.vocab_cache_report
# This is where the cache files should be
self.app_data_dir = self._give_preference_path()
self.index_fname = os.path.join(self.app_data_dir, self.vocabs)
self.indeces = {}
# Check whether that directory exists.
if not os.path.isdir(self.app_data_dir) :
try :
os.mkdir(self.app_data_dir)
except Exception, e:
(type,value,traceback) = sys.exc_info()
if self.report: options.add_info("Could not create the vocab cache area %s" % value, VocabCachingInfo)
return
else :
# check whether it is at least readable
if not os.access(self.app_data_dir, os.R_OK) :
if self.report: options.add_info("Vocab cache directory is not readable", VocabCachingInfo)
return
if not os.access(self.app_data_dir, os.W_OK) :
if self.report: options.add_info("Vocab cache directory is not writeable, but readable", VocabCachingInfo)
return
if os.path.exists(self.index_fname) :
if os.access(self.index_fname, os.R_OK) :
self.indeces = _load(self.index_fname)
else :
if self.report: options.add_info("Vocab cache index not readable", VocabCachingInfo)
else :
# This is the very initial phase, creation
# of a a new index
if os.access(self.app_data_dir, os.W_OK) :
# This is then put into a pickle file to put the stake in the ground...
try :
_dump(self.indeces, self.index_fname)
except Exception, e:
if self.report: options.add_info("Could not create the vocabulary index %s" % e.msg, VocabCachingInfo)
else :
if self.report: options.add_info("Vocabulary cache directory is not writeable", VocabCachingInfo)
self.cache_writeable = False
def add_ref(self, uri, vocab_reference) :
"""
Add a new entry to the index, possibly removing the previous one.
@param uri: the URI that serves as a key in the index directory
@param vocab_reference: tuple consisting of file name, modification date, and expiration date
"""
# Store the index right away
self.indeces[uri] = vocab_reference
try :
_dump(self.indeces, self.index_fname)
except Exception, e:
(type,value,traceback) = sys.exc_info()
if self.report: self.options.add_info("Could not store the cache index %s" % value, VocabCachingInfo)
def get_ref(self, uri) :
"""
Get an index entry, if available, None othe
|
rwise.
The return value is a tuple: file name, modification date, and expiration date
@param uri: the URI that serves as a key in the index directory
"""
if uri in self.indeces :
return tuple(self.indeces[uri])
else :
return None
def _give_preference_path(self) :
"""
Find the vocab cache directory.
"""
from pyRdfa import CACHE_DIR_VAR
if CACHE_DIR_VAR in os.environ :
return os.environ[CACHE_D
|
IR_VAR]
else :
# find the preference path on the architecture
platform = sys.platform
if platform in self.architectures :
system = self.architectures[platform]
else :
system = "unix"
if system == "win" :
# there is a user variable set for that purpose
app_data = os.path.expandvars("%APPDATA%")
return os.path.join(app_data,self.preference_path[system])
else :
return os.path.join(os.path.expanduser('~'),self.preference_path[system])
#===========================================================================================
class CachedVocab(Cache
|
kyleabeauchamp/DBayes
|
dbayes/analysis/test_moe.py
|
Python
|
gpl-2.0
| 1,864
| 0.004292
|
import moe
from moe.easy_interface.experiment import Experiment
from moe.easy_interface.simple_endpoint import gp_next_points, gp_hyper_opt
import pymbar
import seaborn as sns
import scipy.interpolate
import pymc
import sklearn.gaussian_process
import os
import pandas as pd
import glob
keys = ["q0", "sigma0"]
data = pd.read_hdf("./symmetric.h5", 'data')
indexed_data = data.set_index(keys + ["temperature"])
q0 = pymc.Uniform("q0", 0.4, 0.8)
sigma0 = pymc.Uniform("sigma0", 0.2, 0.3)
temperatures = [280, 300, 320]
measurements = np.array([1.000998, 1.043560, 1.084166])
relative_error = 0.001
def objective(q0_val, sigma0_val):
variables = []
q0.value = q0_val
sigma0.value = sigma0_val
print(q0.value)
print(sigma0.value)
for k, temperature in enumerate(temperatures):
observed = measurements[k]
predicted = indexed_data.density.ix[(q0_val, sigma0_val, temperature)]
tau = (observed * relative_error) ** -2.
var = pymc.Normal("obs_%d" % k, mu=predicted, tau=tau, observed=True, value=observed)
print(predicted, observed, tau, var.logp)
variables.append(var)
model = pymc.MCMC(variables)
return model.logp
a, b = data[keys].iloc[0].values
logp = objective(a, b)
get_bounds = lambda variable: (variable.parents["lower"], variable.parents["upper"])
experiment_bounds = [get_bounds(q0), get_bounds(sigma0)]
exp = Experiment(experiment_bounds)
for (q0_val, sigma0_val) in data.set_index(keys).index:
value = objective(q0_val, sigma0_val)
print(q0_val, sig
|
ma0_val, value)
error = 0.001
exp.historical_data.append_sample_points([[(q0_val, sigma0
|
_val), value, error]])
covariance_info = gp_hyper_opt(exp.historical_data.to_list_of_sample_points())
next_point_to_sample = gp_next_points(exp, covariance_info=covariance_info)
print next_point_to_sample
|
wrongu/AlphaGo
|
AlphaGo/models/value.py
|
Python
|
mit
| 1,711
| 0.01052
|
from keras.models import Sequential
from keras.layers import convolutional
from keras.layers.core import Dense, Flatten
from SGD_exponential_decay import SGD_exponential_decay as SGD
### Parameters obtained from paper ###
K = 152 # depth of convolutional layers
LEARNING_RATE = .003 # initial learning rate
DECAY = 8.664339379294006e-08 # rate of exponential learning_rate decay
class value_trainer:
def __init__(self):
self.model = Sequential()
self.model.add(convolutional.Convolution2D(input_shape=(49, 19, 19), nb_filter=K, nb_row=5, nb_col=5,
init='uniform', activation='relu', border_mode='same'))
for i in range(2,13):
self.model.add(convolutional.Convolution2D(nb_filter=K, nb_row=3, nb_col=3,
init='uniform', activation='relu', border_mode='same'))
self.m
|
odel.add(convolutional.Convolution2D(nb_filter=1, nb_row=1, nb_col=1,
init='uniform', activation='linear', border_mode='same'))
self.model.add(Flatten())
self.model.add(Dense(256,init='uniform'))
self.model.add(Dense(1,init='uniform',activation="tanh"))
sgd = SGD(lr=LEARNING_RATE, decay=DECAY)
|
self.model.compile(loss='mean_squared_error', optimizer=sgd)
def get_samples(self):
# TODO non-terminating loop that draws training samples uniformly at random
pass
def train(self):
# TODO use self.model.fit_generator to train from data source
pass
if __name__ == '__main__':
trainer = value_trainer()
# TODO command line instantiation
|
ministryofjustice/opg-docker
|
mongodb/docker/opt/reindex_database.py
|
Python
|
mit
| 784
| 0.019133
|
#!/usr/bin/env python
import os
import argparse
from subprocess import call
admin_username = 'admin'
admin_password = os.environ['MONGO_ADMIN_PASSWORD']
parser=
|
argparse.ArgumentParser()
parser.add_argument("-d", "--db-name", help="the DB to create the user in", required=True)
parser.add_argument("-c", "--collection", help="the collection to index", required=True)
parser.add_argument("-i", "--index-definition", help="the index definition", required=True)
args = parser.parse_args()
reindex_js = "db.getSiblin
|
gDB('" + args.db_name + "').getCollection('" + args.collection + "').ensureIndex( " + args.index_definition + " );"
print 'Creating index'
call(["/usr/bin/mongo","admin","-u",admin_username,"-p",admin_password,"--authenticationDatabase","admin","--eval",reindex_js])
|
endlessm/chromium-browser
|
third_party/llvm/clang/bindings/python/tests/cindex/test_linkage.py
|
Python
|
bsd-3-clause
| 1,175
| 0.001702
|
import os
from clang.cindex import Config
if 'CLANG_LIBRARY_PATH' in os.environ:
Config.set_library_path(os.environ['CLANG_LIBRARY_PATH'])
from clang.cindex import LinkageKind
from clang.cindex import Cursor
from clang.cindex import TranslationUnit
from .util import get_cursor
from .util import get_tu
import unittest
class TestLinkage(unittest.TestCase):
def test_linkage(self):
"""Ensure that linkage specifers are available on cursors"""
tu = get_tu("""
void foo() { int no_linkage; }
static int internal;
namespace { struct unique_external_type {} }
unique_external_type unique_external;
extern int external;
""", lang = 'cpp')
no_linkage = get_cursor(tu.cursor, 'no_linkage')
self.assertEqual(no_linkage.linkage, LinkageKind.NO_LINKAGE)
internal = get_cursor(tu.cursor, 'internal')
self.assertEqual(internal.linkage, LinkageKind
|
.INTERNAL)
unique_external = get_cursor(tu.cursor, 'unique_
|
external')
self.assertEqual(unique_external.linkage, LinkageKind.UNIQUE_EXTERNAL)
external = get_cursor(tu.cursor, 'external')
self.assertEqual(external.linkage, LinkageKind.EXTERNAL)
|
bxlab/bx-python
|
lib/bx/seqmapping.py
|
Python
|
mit
| 2,856
| 0
|
"""
Classes for char-to-int mapping and int-to-int mapping.
:Author: James Taylor (james@bx.psu.edu)
The char-to-int mapping can be used to translate a list of strings
over some alphabet to a single int array (example for encoding a multiple
sequence alignment).
The int-to-int mapping is particularly useful for creating partitions,
and provides methods to merge/split symbols in the output mapping.
The two forms of mapping can be combined, for example to encode a
multiple sequence alignment in a reduced alphabet defined by a partition
of alignment columns. Many of the helper methods provided are for
solving such alignme
|
nt oriented problems.
This code was originally written for the `ESPERR`_ project which includes
software for searcing for alignment encodings that work well for specific
classification problems using
|
various Markov chain classifiers over the
reduced encodings.
Most of the core implementation is in the pyrex/C extension
"_seqmapping.pyx" for performance reasons (specifically to avoid the
excessive bounds checking that would make a sequence/array lookup heavy
problem like this slow in pure python).
.. _ESPERR: http://www.bx.psu.edu/projects/esperr/
"""
from ._seqmapping import (
CharToIntArrayMapping,
IntToIntMapping,
)
# Char->Int mapping for DNA characters with missing data
DNA = CharToIntArrayMapping()
DNA.set_mapping("a", 0)
DNA.set_mapping("A", 0)
DNA.set_mapping("c", 1)
DNA.set_mapping("C", 1)
DNA.set_mapping("g", 2)
DNA.set_mapping("G", 2)
DNA.set_mapping("t", 3)
DNA.set_mapping("T", 3)
DNA.set_mapping("-", 4)
DNA.set_mapping("*", 5)
# Creating mappings
def alignment_mapping_from_file(f, char_mapping=DNA):
"""
Create a mapping from a file of alignment columns.
"""
columns, symbols = [], []
for line in f:
column, symbol = line.split()
columns.append(column)
symbols.append(int(symbol))
align_count = len(columns[0])
mapping = IntToIntMapping(char_mapping.get_out_size() ** align_count)
for column, symbol in zip(columns, symbols):
index = char_mapping.translate_list(list(column))[0]
mapping.set_mapping(index, symbol)
return align_count, mapping
def second_mapping_from_file(f, first_mapping, char_mapping=DNA):
columns, symbols = [], []
for line in f:
column, symbol = line.split()
columns.append(column)
symbols.append(int(symbol))
mapping = IntToIntMapping(first_mapping.get_out_size())
for column, symbol in zip(columns, symbols):
index = char_mapping.translate_list(list(column))[0]
if first_mapping[index] >= 0:
mapping.set_mapping(first_mapping[index], symbol)
return mapping
def identity_mapping(size):
mapping = IntToIntMapping(size)
for i in range(size):
mapping.set_mapping(i, i)
return mapping
|
10n1z3d/YAVD
|
setup.py
|
Python
|
gpl-3.0
| 391
| 0.046036
|
from distutils.core import setup
import yavd
setup (
name = 'YAVD',
version = y
|
avd.__version__,
description= 'Download videos from Youtube and others.',
author = '10n1z3d',
author_email = '10n1z3d@w.cn',
url = '',
|
license = 'GPLv3',
packages = ['YAVD'],
data_files = [('YAVD/', ['README', 'COPYING', 'TODO'])]
)
|
denverfoundation/storybase
|
apps/storybase_geo/migrations/0004_auto.py
|
Python
|
mit
| 7,705
| 0.007787
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db impor
|
t models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'Place', fields ['place_id']
db.create_index('storybase_geo_place', ['place_id'])
# Adding index on 'Location', fields ['location_id']
db.create_index('storybase_geo_location', ['location_id'])
def b
|
ackwards(self, orm):
# Removing index on 'Location', fields ['location_id']
db.delete_index('storybase_geo_location', ['location_id'])
# Removing index on 'Place', fields ['place_id']
db.delete_index('storybase_geo_place', ['place_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'storybase_geo.geolevel': {
'Meta': {'object_name': 'GeoLevel'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['storybase_geo.GeoLevel']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'storybase_geo.location': {
'Meta': {'object_name': 'Location'},
'address': ('storybase.fields.ShortTextField', [], {'blank': 'True'}),
'address2': ('storybase.fields.ShortTextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'lng': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'location_id': ('uuidfield.fields.UUIDField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'name': ('storybase.fields.ShortTextField', [], {'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations'", 'null': 'True', 'to': "orm['auth.User']"}),
'point': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'raw': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'storybase_geo.place': {
'Meta': {'object_name': 'Place'},
'boundary': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'null': 'True', 'blank': 'True'}),
'children': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['storybase_geo.Place']", 'null': 'True', 'through': "orm['storybase_geo.PlaceRelation']", 'blank': 'True'}),
'geolevel': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'places'", 'null': 'True', 'to': "orm['storybase_geo.GeoLevel']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('storybase.fields.ShortTextField', [], {}),
'place_id': ('uuidfield.fields.UUIDField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '32', 'blank': 'True'})
},
'storybase_geo.placerelation': {
'Meta': {'unique_together': "(('parent', 'child'),)", 'object_name': 'PlaceRelation'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'place_parent'", 'to': "orm['storybase_geo.Place']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'place_child'", 'to': "orm['storybase_geo.Place']"})
}
}
complete_apps = ['storybase_geo']
|
sebrandon1/neutron
|
neutron/tests/unit/services/trunk/drivers/openvswitch/test_driver.py
|
Python
|
apache-2.0
| 2,829
| 0
|
# Copyright 2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron_lib import constants
from oslo_config import cfg
from neutron.callbacks import events
from neutron.callbacks import registry
from neut
|
ron.plugins.ml2.drivers.openvswitch.agent.common import (
constants as agent_consts)
from neutron.services.trunk.drivers.openvswitch import driver
from neutron.tests import base
GEN_TRUNK_BR_NAME_PATCH = (
'neutron.services.trunk.drivers.openvswitch.utils.gen_trunk_br_name')
class OVSDriverTestCase(base.BaseTestCase):
def test_driver_creation(self):
|
ovs_driver = driver.OVSDriver.create()
self.assertFalse(ovs_driver.is_loaded)
self.assertEqual(driver.NAME, ovs_driver.name)
self.assertEqual(driver.SUPPORTED_INTERFACES, ovs_driver.interfaces)
self.assertEqual(driver.SUPPORTED_SEGMENTATION_TYPES,
ovs_driver.segmentation_types)
self.assertEqual(constants.AGENT_TYPE_OVS, ovs_driver.agent_type)
self.assertFalse(ovs_driver.can_trunk_bound_port)
self.assertTrue(
ovs_driver.is_agent_compatible(constants.AGENT_TYPE_OVS))
self.assertTrue(
ovs_driver.is_interface_compatible(driver.SUPPORTED_INTERFACES[0]))
def test_driver_is_loaded(self):
cfg.CONF.set_override('mechanism_drivers',
'openvswitch', group='ml2')
ovs_driver = driver.OVSDriver.create()
self.assertTrue(ovs_driver.is_loaded)
def test_driver_is_not_loaded(self):
cfg.CONF.set_override('core_plugin', 'my_foo_plugin')
ovs_driver = driver.OVSDriver.create()
self.assertFalse(ovs_driver.is_loaded)
@mock.patch(GEN_TRUNK_BR_NAME_PATCH)
def test_vif_details_bridge_name_handler_registration(self,
mock_gen_br_name):
driver.register()
mock_gen_br_name.return_value = 'fake-trunk-br-name'
test_trigger = mock.Mock()
registry.notify(agent_consts.OVS_BRIDGE_NAME, events.BEFORE_READ,
test_trigger, **{'port': {'trunk_details':
{'trunk_id': 'foo'}}})
test_trigger.assert_called_once_with('fake-trunk-br-name')
|
dfm/python-finufft
|
tests/test_1d.py
|
Python
|
apache-2.0
| 1,814
| 0.000551
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import finufft
from finufft import interface
import numpy as np
import pytest
__all__ = [
"test_nufft1d1", "test_nufft1d2", "test_nufft1d3",
]
def test_nufft1d1(seed=42, iflag=1):
np.random.seed(seed)
ms = int(1e3)
n = int(2e3)
tol = 1.0e-9
x = np.random.uniform(-np.pi, np.pi, n)
c = np.random.uniform(-1.0, 1.0, n) + 1.0j*np.random.uniform(-1.0, 1.0, n)
f = finufft.nufft1d1(x, c, ms, eps=tol, iflag=iflag)
# Make sure that this also works with other values of 'fftw'
f = finufft.nufft1d1(x, c, ms, eps=tol, iflag=iflag,
fftw=interface.FFTWOptions.measure)
with pytest.raises(TypeError):
f = finufft.nufft1d1(x, c, ms, eps=tol, iflag=ifla
|
g, fftw=100)
f0 = interface.dirft1d1(x, c, ms, iflag=iflag)
assert np.all(np.abs((f - f0) / f0) < 1e-6)
def test_nufft1d2(seed=42, iflag=1):
np.random.seed(seed)
ms = int(1e3)
n = int(2e3)
tol = 1.0e-9
x = np.random.uniform(-np.pi, np.pi, n)
c = np.random.uniform(-1.0, 1.0, n) + 1.0j*np.random.uniform(-1.0, 1.0, n)
f = finufft.nufft1d1(x, c, ms, eps=tol, iflag=iflag)
c = finufft.nufft1d2(x,
|
f, eps=tol, iflag=iflag)
c0 = interface.dirft1d2(x, f, iflag=iflag)
assert np.all(np.abs((c - c0) / c0) < 1e-6)
def test_nufft1d3(seed=42, iflag=1):
np.random.seed(seed)
ms = int(1e3)
n = int(2e3)
tol = 1.0e-9
x = np.random.uniform(-np.pi, np.pi, n)
c = np.random.uniform(-1.0, 1.0, n) + 1.0j*np.random.uniform(-1.0, 1.0, n)
s = 0.5 * n * (1.7 + np.random.uniform(-1.0, 1.0, ms))
f = finufft.nufft1d3(x, c, s, eps=tol, iflag=iflag)
f0 = interface.dirft1d3(x, c, s, iflag=iflag)
assert np.all(np.abs((f - f0) / f0) < 1e-6)
|
RobinCPC/algorithm-practice
|
LinkedList/deleteNode.py
|
Python
|
mit
| 1,861
| 0.005911
|
# Write a function to delete a node (except the tail) in a singly linked list,
# given only access to that node.
#
# Supposed the linked list is 1 -> 2 -> 3 -> 4 and you are given the third node
# with value 3, the linked list should become 1 -> 2 -> 4 after calling your function.
#
# time: O(1)
# space: O(1)
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
cl
|
ass Solution(object):
def deleteNode2(self, node):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead.
"""
curt = node
prev = None
while curt.next is not None:
curt.val = curt.next.
|
val
prev = curt
curt = curt.next
if prev is not None:
prev.next = None
return
def deleteNode1(self, node):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead.
"""
curt = node
while curt.next is not None:
curt.val = curt.next.val
if curt.next.next is None:
curt.next = None
break
curt = curt.next
return
def deleteNode(self, node):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead.
"""
node.val = node.next.val
node.next = node.next.next
return
if __name__ == '__main__':
n1 = ListNode(1)
n2 = ListNode(2)
n3 = ListNode(3)
n4 = ListNode(4)
n1.next = n2
n2.next = n3
n3.next = n4
sol = Solution()
sol.deleteNode(n1)
print n1.val, n1.next.val, n1.next.next.val
try:
print n1.next.next.next.val
except:
print 'None Type!'
pass
|
editorsnotes/editorsnotes
|
editorsnotes/api/urls.py
|
Python
|
agpl-3.0
| 3,744
| 0.006143
|
# vim: set tw=0:
from django.conf.urls import url, include
from django.core.urlresolvers import RegexURLPattern
from rest_framework.urlpatterns import format_suffix_patterns
from rest_framework.authtoken.views import obtain_auth_token
from . import views
def format_patterns(urlpatterns):
"If a URL pattern ends in a slash, it should be able to be rendered as different types"
suffixes = ['json', 'jsonld', 'jsonld-browse', 'ttl', 'ttl-browse']
ret = []
for urlpattern in urlpatterns:
if isinstance(urlpattern, RegexURLPattern):
pattern = urlpattern.regex.pattern
is_empty = pattern == '^$'
if is_empty or pattern.endswith('/$'):
regex = '^' if is_empty else urlpattern.regex.pattern[:-2]
view = urlpattern._callback or urlpattern._callback_str
kwargs = urlpattern.default_args
name = urlpattern.name
stripped_url = url(regex, view, kwargs, name)
ret.append(format_suffix_patterns([stripped_url], True, suffixes)[0])
ret.append(urlpattern)
return ret
project_specific_patterns = [
### Project (general) ###
url(r'^$', views.ProjectDetail.as_view(), name='projects-detail'),
url(r'^vocab$', views.ProjectAPIDocumentation.as_view(), name='projects-api-documentation'),
url(r'^activity/$', views.ActivityView.as_view(), name='projects-activity'),
### Topics ###
url(r'^topics/$', views.TopicList.as_view(), name='topics-list'),
url(r'^topics/(?P<pk>\d+)/$', views.TopicDetail.as_view(), name='topics-detail'),
url(r'^topics/(?P<pk>\d+)/w/$', views.ENTopicDetail.as_view(), name='topics-wn-detail'),
url(r'^topics/(?P<pk>\d+)/p/$', views.TopicLDDetail.as_view(), name='topics-proj-detail'),
url(r'^topics/(?P<pk>\d+)/confirm_delete$', views.TopicConfirmDelete.as_vi
|
ew(), name='topics-confirm-delete'),
### Notes ###
url(r'^notes/$', views.NoteList.as_view(), name='notes-list'),
url(r'^notes/(?P<pk>\d+)/$', views.NoteDetail.as_view(), name='notes-detail'),
url(r'^notes/(?P<pk>\d+)/confirm_delete$', views.NoteConfirmDelete.as_view(), name='notes-confirm-delete'),
### Documents ###
url(r'^documents/$', views.DocumentList.as_view(
|
), name='documents-list'),
url(r'^documents/(?P<pk>\d+)/$', views.DocumentDetail.as_view(), name='documents-detail'),
url(r'^documents/(?P<pk>\d+)/confirm_delete$', views.DocumentConfirmDelete.as_view(), name='documents-confirm-delete'),
url(r'^documents/(?P<document_id>\d+)/scans/$', views.ScanList.as_view(), name='scans-list'),
url(r'^documents/(?P<document_id>\d+)/scans/(?P<scan_id>\d+)/$', views.ScanDetail.as_view(), name='scans-detail'),
url(r'^documents/(?P<document_id>\d+)/transcript/$', views.Transcript.as_view(), name='transcripts-detail'),
]
project_specific_patterns = format_patterns(project_specific_patterns)
urlpatterns = [
url(r'^$', views.browse.root, name='root'),
url(r'^browse/$', views.browse.browse_items, name='browse'),
url(r'^auth-token/$', obtain_auth_token, name='obtain-auth-token'),
url(r'^search/$', views.SearchView.as_view(), name='search'),
url(r'^notes/$', views.AllProjectNoteList.as_view(), name='all-projects-notes-list'),
url(r'^projects/$', views.ProjectList.as_view(), name='projects-list'),
url(r'^projects/(?P<project_slug>[\w\-]+)/', include(project_specific_patterns)),
url(r'^users/(?P<pk>\d+)/$', views.UserDetail.as_view(), name='users-detail'),
url(r'^users/(?P<pk>\d+)/activity/$', views.ActivityView.as_view(), name='users-activity'),
url(r'^me/$', views.SelfUserDetail.as_view(), name='users-detail-self'),
]
urlpatterns = format_patterns(urlpatterns)
|
sounak98/coala-bears
|
tests/java/InferBearTest.py
|
Python
|
agpl-3.0
| 585
| 0
|
from bears.java.InferBear import InferBear
from tests.Lo
|
calBearTestHelper import verify_local_bear
good_file = """
class InferGood {
int test() {
String s = null;
return s == null ? 0 : s.length();
|
}
}
"""
bad_file = """
class InferBad {
int test() {
String s = null;
return s.length();
}
}
"""
InferBearTest = verify_local_bear(InferBear,
valid_files=(good_file,),
invalid_files=(bad_file,),
tempfile_kwargs={'suffix': '.java'})
|
palladius/gcloud
|
packages/gcutil-1.7.1/lib/google_api_python_client/apiclient/discovery.py
|
Python
|
gpl-3.0
| 26,209
| 0.008623
|
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for discovery based APIs
A client library for Google's discovery based APIs.
"""
__all__ = [
'build',
'build_from_document'
'fix_method_name',
'key2param'
]
import copy
import httplib2
import logging
import os
import random
import re
import uritemplate
import urllib
import urlparse
import mimeparse
import mimetypes
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
from apiclient.errors import HttpError
from apiclient.errors import InvalidJsonError
from apiclient.errors import MediaUploadSizeError
from apiclient.errors import UnacceptableMimeTypeError
from apiclient.errors import UnknownApiNameOrVersion
from apiclient.errors import UnknownLinkType
from apiclient.http import HttpRequest
from apiclient.http import MediaFileUpload
from apiclient.http import MediaUpload
from apiclient.model import JsonModel
from apiclient.model import MediaModel
from apiclient.model import RawModel
from apiclient.schema import Schemas
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
from oauth2client.anyjson import simplejson
logger = logging.getLogger(__name__)
URITEMPLATE = re.compile('{[^}]*}')
VARNAME = re.compile('[a-zA-Z0-9_-]+')
DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
'{api}/{apiVersion}/rest')
DEFAULT_METHOD_DOC = 'A description of how to use this function'
# Parameters accepted by the stack, but not visible via discovery.
STACK_QUERY_PARAMETERS = ['trace', 'pp', 'userip', 'strict']
# Python reserved words.
RESERVED_WORDS = ['and', 'assert', 'break', 'class', 'continue', 'def', 'del',
'elif', 'else', 'except', 'exec', 'finally', 'for', 'from',
'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or',
'pass', 'print', 'raise', 'return', 'try', 'while' ]
def fix_method_name(name):
"""Fix method names to avoid reserved word conflicts.
Args:
name: string, method name.
Returns:
The name with a '_' prefixed if the name is a reserved word.
"""
if name in RESERVED_WORDS:
return name + '_'
else:
return name
def _add_query_parameter(url, name, value):
"""Adds a query parameter to a url.
Replaces the current value if it already exists in the URL.
Args:
url: string, url to add the query parameter to.
name: string, query parameter name.
value: string, query parameter value.
Returns:
Updated query parameter. Does not update the url if value is None.
"""
if value is None:
return url
else:
parsed = list(urlparse.urlparse(url))
q = dict(parse_qsl(parsed[4]))
q[name] = value
parsed[4] = urllib.urlencode(q)
return urlparse.urlunparse(parsed)
def key2param(key):
"""Converts key names into parameter names.
For example, converting "max-results" -> "max_results"
Args:
key: string, the method key name.
Returns:
A safe method name based on the key name.
"""
result = []
key = list(key)
if not key[0].isalpha():
result.append('x')
for c in key:
if c.isalnum():
result.append(c)
else:
result.append('_')
return ''.join(result)
def build(serviceName,
version,
http=None,
discoveryServiceUrl=DISCOVERY_URI,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Construct a Resource for interacting with an API.
Construct a Resource object for interacting with an API. The serviceName and
version are the names from the Discovery service.
Args:
serviceName: string, name of the service.
version: string, the version of the service.
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
discoveryServiceUrl: string, a URI Template that points to the location of
the discovery service. It should have two parameters {api} and
{apiVersion} that when filled in produce an absolute URI to the discovery
document for that service.
developerKey: string, key obtained from
https://code.google.com/apis/console.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: apiclient.http.HttpRequest, encapsulator for an HTTP
request.
Returns:
A Resource object with methods for interacting with the service.
"""
params = {
'api': serviceName,
'apiVersion': version
}
if http is None:
http = httplib2.Http()
requested_url = uritemplate.expand(discoveryServiceUrl, params)
# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
# variable that contains the network address of the client sending the
# request. If it exists then add that to the request for the discovery
# document to avoid exceeding the quota on discovery requests.
if 'REMOTE_ADDR' in os.environ:
requested_url = _add_query_parameter(requested_url, 'userIp',
os.environ['REMOTE_ADDR'])
logger.info('URL being requested: %s' % requested_url)
resp, content = http.request(requested_url)
if resp.status == 404:
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName,
version))
if resp.status >= 400:
raise HttpError(resp, content, requested_url)
try:
service = simplejson.loads(content)
except ValueError, e:
logger.error('Failed to parse as JSON: ' + content)
raise InvalidJsonError()
return build_from_document(content, discoveryServiceUrl, http=http,
developerKey=developerKey, model=model, requestBuilder=requestBuilder)
def build_from_document(
service,
base,
future=None,
http=None,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Create a Resource for interacting with an API.
Same as `build()`, but constructs the Resource object from a discovery
document that is it given, as opposed to retrieving one over HTTP.
Args:
service: string, discovery document.
base: string, base URI for all HTTP requests, usually the discovery URI.
future: string, discovery document with future capabilities (deprecated).
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
developerKey: string, Key for controlling API usage, generated
from the API Console.
model: Model class instance that serializes and de-serializes requests and
responses.
requestBuilder: Takes an http request and packages it up to be executed.
Returns:
A Resource object with methods for interacting with the service.
"""
# future is no longer used.
future = {}
service = simplejson.loads(service)
base = urlparse.urljoin(base, service['basePath'])
schema = Schemas(service)
if model is None:
features = service.get
|
('features', [])
model = JsonModel('dataWrapper' in features)
resource = _createResource(http, base, model, requestBuilder, developerKey,
service, service, schema)
return resource
def _cast(value, schema_type):
"""Convert value to a string based on JSON Schema type.
See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
JSON Schema.
Args:
v
|
alue: any, the value to convert
schema_type: string, the type that value should be interpreted as
Returns:
A string representation of 'value' based on the schema_type.
"""
if schema_type == 'string':
if type(va
|
GadgeurX/NetworkLiberator
|
Daemon/AttackProcess.py
|
Python
|
gpl-3.0
| 586
| 0.001706
|
from threading import Thread
import time
from scapy.all import *
class AttackProcess(Thread):
def __init__(self, main):
Thread.__init__(self)
self.main = main
self.selected_hosts = []
self.is_attacking = False
def run(self):
while True:
while self.is_at
|
tacking:
packets = []
for host in self.main.HostMgr.hosts:
if host.is_selected:
packets.append(host.packet)
|
time.sleep(1)
send(packets)
time.sleep(5)
|
7Pros/circuit
|
circuit/wsgi.py
|
Python
|
gpl-2.0
| 391
| 0
|
"""
WSGI conf
|
ig for circuit project.
It exposes the WSGI callable as a module-
|
level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "circuit.settings")
application = get_wsgi_application()
|
jabaier/iic1103.20152.s4
|
estriangulo_prueba.py
|
Python
|
unlicense
| 428
| 0.063084
|
def estriangulo(a,b,c):
print("el primer argumento es:",a)
print("el segundo argumento es:",b)
print("el tercer argumento es:",c)
return a+b>c and a+c>b and c+b>a
def espitagorico(a,b,c):
|
return a**2+b**2==c**2 or a**2+c*
|
*2==b**2 or b**2+c**2==a**2
def esisosceles(a,b,c):
return a==b or a==c or b==c
print(estriangulo(int(input("numero? ")),4,5))
print(espitagorico(3,4,5))
print(esisosceles(3,4,5))
|
UBayouski/RaspberryPiPowerButton
|
power_button.py
|
Python
|
mit
| 315
| 0
|
#!/usr/bin/python
import RPi.GPIO as GPIO
import subprocess
# St
|
arting
|
up
GPIO.setmode(GPIO.BCM)
GPIO.setup(3, GPIO.IN)
# Wait until power button is off
# Recommended to use GPIO.BOTH for cases with switch
GPIO.wait_for_edge(3, GPIO.BOTH)
# Shutting down
subprocess.call(['shutdown', '-h', 'now'], shell=False)
|
protochron/aurora
|
src/test/python/apache/aurora/client/cli/test_quota.py
|
Python
|
apache-2.0
| 4,844
| 0.0064
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
from mock import patch
from apache.aurora.client.cli.client import AuroraCommandLine
from .util import AuroraClientCommandTest, FakeAuroraCommandContext
from gen.apache.aurora.api.ttypes import GetQuotaResult, ResourceAggregate, Result
class TestGetQuotaCommand(AuroraClientCommandTest):
@classmethod
def setup_mock_quota_call_no_consumption(cls, mock_context):
api = mock_context.get_api('west')
response = cls.create_simple_success_response()
response.result = Result(getQuotaResult=GetQuotaResult(
quota=ResourceAggregate(numCpus=5, ramMb=20480, diskMb=40960),
prodSharedConsumption=None,
prodDedicatedConsumption=None,
nonProdSharedConsumption=None,
nonProdDedicatedConsumption=None
))
api.get_quota.return_value = response
@classmethod
def setup_mock_quota_call_with_consumption(cls, mock_context):
api = mock_context.get_api('west')
response = cls.create_simple_success_response()
response.result = Result(getQuotaResult=GetQuotaResult(
quota=ResourceAggregate(numCpus=5, ramMb=20480, diskMb=40960),
prodSharedConsumption=ResourceAggregate(numCpus=1, ramMb=512, diskMb=1024),
prodDedicatedConsumption=ResourceAggregate(numCpus=2, ramMb=1024, diskMb=2048),
nonProdSharedConsumption=ResourceAggregate(numCpus=3, ramMb=2048, diskMb=4096),
nonProdDedicatedConsumption=ResourceAggregate(numCpus=4, ramMb=4096, diskMb=8192),
))
api.get_quota.return_value = response
def test_get_quota_no_consumption(self):
assert ('Allocated:\n CPU: 5\n RAM: 20.000000 GB\n Disk: 40.000000 GB' ==
self._get_quota(False, ['quota', 'get', 'west/bozo']))
def test_get_quota_with_consumption(self):
expected_output = ('Allocated:\n CPU: 5\n RAM: 20.000000 GB\n Disk: 40.000000 GB\n'
'Production shared pool resources consumed:\n'
' CPU: 1\n RAM: 0.500000 GB\n Disk: 1.000000 GB\n'
'Production dedicated pool resources consumed:\n'
' CPU: 2\n RAM: 1.000000 GB\n Disk: 2.000000 GB\n'
'Non-production shared pool resources consumed:\n'
' CPU: 3\n RAM: 2.000000 GB\n Disk: 4.000000 GB\n'
'Non-production dedicated pool resources consumed:\n'
' CPU: 4\n RAM: 4.000000 GB\n Disk: 8.000000 GB')
assert expected_output == self._get_quota(True, ['quota', 'get', 'west/bozo'])
def test_get_quota_with_no_consumption_json(self):
assert (json.loads('{"quota":{"numCpus":5,"ramMb":20480,"diskMb":40960}}') ==
json.loads(self._get_quota(False, ['quota', 'get', '--write-json', 'west/bozo'])))
def test_get_quota_with_consumption_json(self):
expected_response = json.loads(
'{"quota":{"numCpus":5,"ramMb":20480,"diskMb":40960},'
'"prodSharedConsumption":{"numCpus":1,"ramMb":512,"diskMb":1024},'
'"prodDedicatedConsumption":{"numCpus":2,"ramMb":1024,"diskMb":2048},'
'"nonProdSharedConsumption":{"numCpus":3,"ramMb":2048,"diskMb":4096},'
'"nonProdDedicatedConsumption":{"numCpus":4,"ramMb":4096,"diskMb":8192}}')
assert (expected_response ==
json.loads(self._get_quota(True, ['quota', 'get', '--write-json', 'west/bozo'])))
def test_get_quota_failed(self):
fake_context = FakeAuroraCommandContext()
api = fake_context.get_api('')
api.get_quota.return_value = self.create_error_response()
self._call_get_quota(fake_context, ['quota', 'get', '
|
west/bozo'])
assert fake_context.get_err() == ['Error retrieving quota for role bozo', '\tWhoops']
def _get_quota(self, include_c
|
onsumption, command_args):
mock_context = FakeAuroraCommandContext()
if include_consumption:
self.setup_mock_quota_call_with_consumption(mock_context)
else:
self.setup_mock_quota_call_no_consumption(mock_context)
return self._call_get_quota(mock_context, command_args)
def _call_get_quota(self, mock_context, command_args):
with patch('apache.aurora.client.cli.quota.Quota.create_context', return_value=mock_context):
cmd = AuroraCommandLine()
cmd.execute(command_args)
out = '\n'.join(mock_context.get_out())
return out
|
kdebrab/pandas
|
pandas/core/arrays/__init__.py
|
Python
|
bsd-3-clause
| 325
| 0
|
from .base import (ExtensionArray, # noqa
ExtensionScalarOpsMixin)
from .categorical i
|
mport Categorical # noqa
from .datetimes import DatetimeArrayMixin # noqa
from .interval import IntervalArray # noqa
from .period import Peri
|
odArrayMixin # noqa
from .timedeltas import TimedeltaArrayMixin # noqa
|
vejeshv/main_project
|
knockknock/MacFailedException.py
|
Python
|
gpl-3.0
| 790
| 0
|
# Copyright (c) 2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your opt
|
ion) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a
|
copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
class MacFailedException(Exception):
pass
|
gamesun/MyTerm-for-YellowStone
|
setup.py
|
Python
|
bsd-3-clause
| 4,648
| 0.010327
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2013, gamesun
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of gamesun nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GAMESUN "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GAMESUN BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from distutils.core import setup
import sys
import py2exe
import os
import glob
from py2exe.build_exe import py2exe as build_exe
import appInfo
if len(sys.argv) == 1:
sys.argv.append("py2exe")
# sys.argv.append("-q")
manifest_template = '''
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<assemblyIdentity
version="0.6.8.0"
processorArchitecture="x86"
name="%(prog)s"
type="win32"
/>
<description>%(prog)s</description>
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel
level="asInvoker"
uiAccess="false"
/>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.VC90.CRT"
version="9.0.21022.8"
processorArchitecture="x86"
publicKeyToken="1fc8b3b9a1e18e3b"
/>
</dependentAssembly>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="x86"
publicKeyToken="6595b64144ccf1df"
language="*"
/>
</dependentAssembly>
</dependency>
</assembly>
'''
CONTENT_DIRS = [ "media" ]
# EXTRA_FILES = [ "./media/icon16.ico", "./media/icon32.ico" ]
EXTRA_FILES = []
class MediaCollector(build_exe):
def addDirectoryToZip(self, folder):
full = os.path.join(self.collect_dir, folder)
if not os.path.exists(full):
self.mkpath(full)
for f in glob.glob("%s/*" % folder):
if os.path.isdir(f):
self.addDirectoryToZip(f)
else:
name = os.path.basename(f)
self.copy_file(f, os.path.join(full, name))
self.
|
compiled_files.append(os.path.join(folder, name))
def copy_extensions(self, extensions):
#super(MediaCollector, self).copy_extensions(extensions)
build_exe.copy_extensions(self, extensions)
for folder in CONTENT_DIRS:
self.addDirectoryToZip(folder)
for fileName in EXTRA_FILES:
name = os.path.basename(fileName)
self.copy_file(file
|
Name, os.path.join(self.collect_dir, name))
self.compiled_files.append(name)
myOptions = {
"py2exe":{
"compressed": 1,
"optimize": 2,
"ascii": 1,
# "includes":,
"dll_excludes": ["MSVCP90.dll","w9xpopen.exe"],
"bundle_files": 2
}
}
RT_MANIFEST = 24
class Target:
def __init__(self, **kw):
self.__dict__.update(kw)
MyTerm_windows = Target(
# used for the versioninfo resource
copyright = "Copywrong All Lefts Unreserved.",
name = appInfo.title,
version = appInfo.version,
description = appInfo.file_name,
author = appInfo.author,
url = appInfo.url,
# what to build
script = "main.py",
dest_base = appInfo.file_name,
icon_resources = [(1, "icon\icon.ico")],
other_resources= [(RT_MANIFEST, 1, manifest_template % dict(prog = appInfo.title))]
)
setup(
options = myOptions,
cmdclass= {'py2exe': MediaCollector},
data_files = [("", ["COPYING",]),],
windows = [MyTerm_windows]
)
|
emencia/emencia_paste_djangocms_3
|
emencia_paste_djangocms_3/django_buildout/project/mods_available/admin_tools/dashboard.py
|
Python
|
mit
| 3,336
| 0.014688
|
# -*- coding: utf-8 -*-
"""
Dashboard stuff for admin_tools
"""
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from admin_tools.dashboard import modules, Dashboard, AppIndexDashboard
from admin_tools.utils import get_admin_site_name
class CustomIndexDashboard(Dashboard):
"""
Cust
|
om index dashboard for project.
"""
def init_with_context(self, context):
site_name = get_admin_site_name(context)
# append a link list module for "quick links"
self.children.append(modules.LinkList(
_('Quick links'),
layout='inline',
|
draggable=False,
deletable=False,
collapsible=False,
children=[
[_('Return to site'), '/'],
[_('Change password'),
reverse('%s:password_change' % site_name)],
[_('Log out'), reverse('%s:logout' % site_name)],
]
))
# append an app list module for "Applications"
self.children.append(modules.AppList(
_('Applications'),
exclude=('django.contrib.*',),
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
_('Administration'),
models=('django.contrib.*',),
))
# append a recent actions module
self.children.append(modules.RecentActions(_('Recent Actions'), 5))
## append a feed module
#self.children.append(modules.Feed(
#_('Latest Django News'),
#feed_url='http://www.djangoproject.com/rss/weblog/',
#limit=5
#))
## append another link list module for "support".
#self.children.append(modules.LinkList(
#_('Support'),
#children=[
#{
#'title': _('Django documentation'),
#'url': 'http://docs.djangoproject.com/',
#'external': True,
#},
#{
#'title': _('Django "django-users" mailing list'),
#'url': 'http://groups.google.com/group/django-users',
#'external': True,
#},
#{
#'title': _('Django irc channel'),
#'url': 'irc://irc.freenode.net/django',
#'external': True,
#},
#]
#))
class CustomAppIndexDashboard(AppIndexDashboard):
"""
Custom app index dashboard for project.
"""
# we disable title because its redundant with the model list module
title = ''
def __init__(self, *args, **kwargs):
AppIndexDashboard.__init__(self, *args, **kwargs)
# append a model list module and a recent actions module
self.children += [
modules.ModelList(self.app_title, self.models),
modules.RecentActions(
_('Recent Actions'),
include_list=self.get_app_content_types(),
limit=5
)
]
def init_with_context(self, context):
"""
Use this method if you need to access the request context.
"""
return super(CustomAppIndexDashboard, self).init_with_context(context)
|
mudbungie/NetExplorer
|
Host.py
|
Python
|
mit
| 17,053
| 0.002346
|
# Class for a network object.
from NetworkPrimitives import Ip, Mac
from Config import config
from Exceptions import *
import Toolbox
import easysnmp
import requests
import json
import time
from datetime import datetime
import uuid
import geocoder
# Disable security warnings.
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
class Host:
def __init__(self, network, ip=None, mac=None, hash=None):
self.serial = uuid.uuid4().int
self.network = network
# Set the timestamp for unix epoch, unless it was set during init.
self.network.add_node(self)
self.network.node[self]['updated'] = 0
self.community = None
# If supplied with an IP address or a MAC address, add those.
if ip:
if type(ip) != Ip:
ip = Ip(ip)
self.addAddress(ip)
if mac:
if type(mac) != Mac:
mac = Mac(mac)
self.addAddress(mac)
print(self.ips)
def __str__(self):
return 'Host:' + str(self.serial)
def __hash__(self):
return self.serial
@property
def ips(self):
if self.mgmntip:
# Always return management ips first.
ips = self.network.typedNeighbors(self, Ip)
ips.remove(self.mgmntip)
return [self.mgmntip] + ips
else:
return sorted(self.network.typedNeighbors(self, Ip))
@property
def macs(self):
return sorted(self.network.typedNeighbors(self, Mac))
@property
def community(self):
return self.network.node[self]['community']
@community.setter
def community(self, community):
self.network.node[self]['community'] = community
@property
def addresses(self):
# Aggregation of all MAC and IP addresses
return self.macs + self.ips
@property
def hostname(self):
try:
return self.network.node[self]['hostname']
except KeyError:
return None
@hostname.setter
def hostname(self, hostname):
self.network.node[self]['hostname'] = hostname
@property
def updated(self):
return self.network.node[self]['updated']
def touch(self):
# Update timestamp on host.
self.network.node[self]['updated'] = Toolbox.timestamp()
@property
def vendor(self):
# Take the first recognizable MAC vendor we find.
for mac in self.macs:
if mac.vendor:
return mac.vendor
return None
@property
def location(self):
try:
return self.network.node[self]['location']
except KeyError:
return None
@property
def coords(self):
# Geocoords lookup to get address for host.
return geocoder.google(self.location).latlng
@property
def lat(self):
return self.coords[0]
@property
def lng(self):
return self.coords[1]
@property
def arpNeighbors(self):
return self.network.findAdj(self, ntype=Host, etype='arp')
@property
def mgmntip(self):
# An IP address that is confirmed to work with this host.
try:
for ip in self.network.typedNeighbors(self, Ip):
edge = self.network[self][ip]
if 'mgmnt' in edge and edge['mgmnt'] == 1:
return ip
# Unless we don't know one.
except TypeError:
# Means that there are no IPs.
pass
return False
def setmgmntip(self, ip, isit):
if isit:
self.network[self][ip]['mgmnt'] = 1
else:
self.network[self][ip]['mgmnt'] = 0
def addAddress(self, address, ifnum=None):
# Add an IP or MAC address.
if not address.local:
if address in self.addresses:
# Add the ifnum, in case it's not there.
self.network.node[address]['ifnum'] = ifnum
else:
# This is a new mac, or at least not attached to this host.
self.network.removeSafely(address)
self.network.add_node(address, ifnum=ifnum)
self.network.add_edge(self, address, etype='owns')
# Associate it with any similarly-numbered IPs.
if ifnum:
for a in self.addresses:
if 'ifnum' in self.network.node[a] and \
self.network.node[a]['ifnum'] == ifnum:
self.network.add_edge(address, a, etype='interface')
def snmpInit(self, ip, community):
print(ip, community)
session = easysnmp.Session(hostname=ip, community=community, version=1, timeout=1)
return session
def snmpwalk(self, mib):
# Walks specified mib
ips = self.ips
# Get a list of communities, starting with any that are known to
# work on this host.
communities = self.network.communities.copy()
if self.community:
# Means we have a functional community string. Use that first.
communities.append(self.community)
communities.reverse()
def scanAllCommunities(ip):
for community in communities:
results = scan(ip, community)
if results:
return results
return False
def scan(ip, community):
session = self.snmpInit(ip, community)
try:
responses = session.walk(mib)
self.community = community
self.setmgmntip(ip, True)
print('Response
|
on', ip, 'with', community)
return responses
except easysnmp.exceptions.EasySNMPNoSuchNameError:
# Probably means that you're hitting the wrong kind of device.
self.community = None
self.setmgmntip(ip,
|
False)
raise
except easysnmp.exceptions.EasySNMPTimeoutError:
# Either the community string is wrong, or the address is dead.
print('No response on', ip, 'with', community)
self.community = None
self.setmgmntip(ip, False)
pass
return False
# First, we try using known-good settings for communicating with this
# host.
if self.mgmntip:
if self.community:
results = scan(self.mgmntip, self.community)
if results:
return results
results = scanAllCommunities(self.mgmntip)
if results:
return results
# If we have no known-good settings, we just iterate over everything.
for ip in ips:
if not Toolbox.ipInNetworks(ip, self.network.inaccessiblenets):
results = scanAllCommunities(ip)
if results:
return results
return False
def getStatusPage(self, path, tries=0):
# Negotiates HTTP auth and JSON decoding for getting data from
# the web interface. Functions by requesting IPs until something gives
# a non-empty response, then authenticates until it gives the correct
# response.
# HTTPS redirect is managed automatically in applicable devices.
# Mostly got its own function for exception handling.
def webrequest(verb, session, url, data=None, tries=0):
if tries < 3:
try:
if verb == 'get':
return session.get(url, verify=False, timeout=2)
elif verb == 'post':
return session.post(url, data=data, verify=False,
timeout=2)
# Requests has like a billion error codes...
except (requests.exceptions.ConnectionError,
requests.exceptions.ReadTimeout,
requests.exceptions.ConnectTimeout):
tries += 1
|
iftekeriba/softlayer-python
|
SoftLayer/CLI/virt/upgrade.py
|
Python
|
mit
| 1,538
| 0
|
"""Upgrade a virtual server."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
from SoftLayer.CLI import virt
import click
@click.command(epilog="""Note: SoftLayer automatically reboo
|
ts the VS once
upgrade request is placed. The VS is halted until the Upgrade transaction is
completed. However for Network, no reboot is req
|
uired.""")
@click.argument('identifier')
@click.option('--cpu', type=click.INT, help="Number of CPU cores")
@click.option('--private',
is_flag=True,
help="CPU core will be on a dedicated host server.")
@click.option('--memory', type=virt.MEM_TYPE, help="Memory in megabytes")
@click.option('--network', type=click.INT, help="Network port speed in Mbps")
@environment.pass_env
def cli(env, identifier, cpu, private, memory, network):
"""Upgrade a virtual server."""
vsi = SoftLayer.VSManager(env.client)
vs_id = helpers.resolve_id(vsi.resolve_ids, identifier, 'VS')
if not (env.skip_confirmations or formatting.confirm(
"This action will incur charges on your account. "
"Continue?")):
raise exceptions.CLIAbort('Aborted')
if not vsi.upgrade(vs_id,
cpus=cpu,
memory=memory/1024,
nic_speed=network,
public=not private):
raise exceptions.CLIAbort('VS Upgrade Failed')
|
wakermahmud/sync-engine
|
inbox/models/message.py
|
Python
|
agpl-3.0
| 22,094
| 0.000181
|
import datetime
import itertools
from hashlib import sha256
from collections import defaultdict
from flanker import mime
from sqlalchemy import (Column, Integer, BigInteger, String, DateTime,
Boolean, Enum, ForeignKey, Index)
from sqlalchemy.dialects.mysql import LONGBLOB
from sqlalchemy.orm import relationship, backref, validates
from sqlalchemy.sql.expression import false
from sqlalchemy.ext.associationproxy import association_proxy
from inbox.util.html import plaintext2html, strip_tags
from inbox.sqlalchemy_ext.util import JSON, json_field_too_long
from inbox.util.addr import parse_mimepart_address_header
from inbox.util.misc import parse_references, get_internaldate
from inbox.models.mixins import HasPublicID, HasRevisions
from inbox.models.base import MailSyncBase
from inbox.models.namespace import Namespace
from inbox.models.category import Category
from inbox.security.blobstorage import encode_blob, decode_blob
from inbox.log import get_logger
log = get_logger()
def _trim_filename(s, mid, max_len=64):
if s and len(s) > max_len:
log.warning('filename is too long, truncating',
mid=mid, max_len=max_len, filename=s)
return s[:max_len - 8] + s[-8:] # Keep extension
return s
class Message(MailSyncBase, HasRevisions, HasPublicID):
@property
def API_OBJECT_NAME(self):
return 'message' if not self.is_draft else 'draft'
namespace_id = Column(ForeignKey(Namespace.id, ondelete='CASCADE'),
index=True, nullable=False)
namespace = relationship(
'Namespace',
lazy='joined',
load_on_pending=True)
# Do delete messages if their associated thread is deleted.
thread_id = Column(Integer, ForeignKey('thread.id', ondelete='CASCADE'),
nullable=False)
thread = relationship(
'Thread',
backref=backref('messages', order_by='Message.received_date',
passive_deletes=True, cascade='all, delete-orphan'))
from_addr = Column(JSON, nullable=False, default=lambda: [])
sender_addr = Column(JSON, nullable=True)
reply_to = Column(JSON, nullable=True, default=lambda: [])
to_addr = Column(JSON, nullable=False, default=lambda: [])
cc_addr = Column(JSON, nullable=False, default=lambda: [])
bcc_addr = Column(JSON, nullable=False, default=lambda: [])
in_reply_to = Column(JSON, nullable=True)
# From: http://tools.ietf.org/html/rfc4130, section 5.3.3,
# max message_id_header is 998 characters
message_id_header = Column(String(998), nullable=True)
# There is no hard limit on subject limit in the spec, but 255 is common.
subject = Column(String(255), nullable=True, default='')
received_date = Column(DateTime, nullable=False, index=True)
size = Column(Integer, nullable=False)
data_sha256 = Column(String(255), nullable=True)
is_read = Column(Boolean, server_default=false(), nullable=False)
is_starred = Column(Boolean, server_default=false(), nullable=False)
# For drafts (both Inbox-created and otherwise)
is_draft = Column(Boolean, server_default=false(), nullable=False)
is_sent = Column(Boolean, server_default=false(), nullable=False)
# DEPRECATED
state = Column(Enum('draft', 'sending', 'sending failed', 'sent'))
_c
|
ompacted_body = Column(LONGBLOB, nullable=True)
snippet = Column(String(191), nullable=False)
SNIPPET_LENGTH = 191
# A reference to the block holding the full contents of the message
full_body_id = Column(ForeignKey('block.id', name='full_body_id_fk'),
|
nullable=True)
full_body = relationship('Block', cascade='all, delete')
# this might be a mail-parsing bug, or just a message from a bad client
decode_error = Column(Boolean, server_default=false(), nullable=False,
index=True)
# In accordance with JWZ (http://www.jwz.org/doc/threading.html)
references = Column(JSON, nullable=True)
# Only used for drafts.
version = Column(Integer, nullable=False, server_default='0')
# only on messages from Gmail (TODO: use different table)
#
# X-GM-MSGID is guaranteed unique across an account but not globally
# across all Gmail.
#
# Messages between different accounts *may* have the same X-GM-MSGID,
# but it's unlikely.
#
# (Gmail info from
# http://mailman13.u.washington.edu/pipermail/imap-protocol/
# 2014-July/002290.html.)
g_msgid = Column(BigInteger, nullable=True, index=True, unique=False)
g_thrid = Column(BigInteger, nullable=True, index=True, unique=False)
# The uid as set in the X-INBOX-ID header of a sent message we create
inbox_uid = Column(String(64), nullable=True, index=True)
def regenerate_inbox_uid(self):
"""
The value of inbox_uid is simply the draft public_id and version,
concatenated. Because the inbox_uid identifies the draft on the remote
provider, we regenerate it on each draft revision so that we can delete
the old draft and add the new one on the remote."""
from inbox.sendmail.message import generate_message_id_header
self.inbox_uid = '{}-{}'.format(self.public_id, self.version)
self.message_id_header = generate_message_id_header(self.inbox_uid)
categories = association_proxy(
'messagecategories', 'category',
creator=lambda category: MessageCategory(category=category))
# FOR INBOX-CREATED MESSAGES:
is_created = Column(Boolean, server_default=false(), nullable=False)
# Whether this draft is a reply to an existing thread.
is_reply = Column(Boolean)
reply_to_message_id = Column(Integer, ForeignKey('message.id'),
nullable=True)
reply_to_message = relationship('Message', uselist=False)
def mark_for_deletion(self):
"""
Mark this message to be deleted by an asynchronous delete
handler.
"""
self.deleted_at = datetime.datetime.utcnow()
@validates('subject')
def sanitize_subject(self, key, value):
# Trim overlong subjects, and remove null bytes. The latter can result
# when, for example, UTF-8 text decoded from an RFC2047-encoded header
# contains null bytes.
if value is None:
return
if len(value) > 255:
value = value[:255]
value = value.replace('\0', '')
return value
@classmethod
def create_from_synced(cls, account, mid, folder_name, received_date,
body_string):
"""
Parses message data and writes out db metadata and MIME blocks.
Returns the new Message, which links to the new Part and Block objects
through relationships. All new objects are uncommitted.
Threads are not computed here; you gotta do that separately.
Parameters
----------
mid : int
The account backend-specific message identifier; it's only used for
logging errors.
raw_message : str
The full message including headers (encoded).
"""
_rqd = [account, mid, folder_name, body_string]
if not all([v is not None for v in _rqd]):
raise ValueError(
'Required keyword arguments: account, mid, folder_name, '
'body_string')
# stop trickle-down bugs
assert account.namespace is not None
assert not isinstance(body_string, unicode)
msg = Message()
from inbox.models.block import Block
body_block = Block()
body_block.namespace_id = account.namespace.id
body_block.data = body_string
body_block.content_type = "text/plain"
msg.full_body = body_block
msg.namespace_id = account.namespace.id
try:
parsed = mime.from_string(body_string)
msg._parse_metadata(parsed, body_string, received_date, account.id,
folder_name, mid)
except (mime.DecodingError, AttributeError, RuntimeError,
TypeErr
|
ael-code/libreant
|
setup.py
|
Python
|
agpl-3.0
| 4,963
| 0.002015
|
import os
import sys
import msgfmt
from setuptools import setup
from setuptools.command.install_lib import install_lib as _install_lib
from setuptools.command.develop import develop as _develop
from distutils.command.build import build as _build
from setuptools.command.test import test as TestCommand
from distutils.cmd import Command
class compile_translations(Command):
description = 'compile message catalogs to .mo files'
user_options = [('force', 'f', "compile also not updated message catalogs")]
boolean_options = ['force']
def initialize_options(self):
self.force = False
def finalize_options(self):
pass
def run(self):
"""
Compile all message catalogs .mo files into .po files.
Skips not changed file based on source mtime.
"""
# thanks to deluge guys ;)
po_dir = os.path.join(os.path.dirname(__file__), 'webant', 'translations')
print('Compiling po files from "{}"...'.format(po_dir))
for lang in os.listdir(po_dir):
sys.stdout.write("\tCompiling {}... ".format(lang))
sys.stdout.flush()
curr_lang_path = os.path.join(po_dir, lang)
for path, dirs, filenames in os.walk(curr_lang_path):
for f in filenames:
if f.endswith('.po'):
src = os.path.join(path, f)
dst = os.path.join(path, f[:-3] + ".mo")
if not os.path.exists(dst) or self.force:
msgfmt.make(src, dst)
print("ok.")
else:
src_mtime = os.stat(src)[8]
dst_mtime = os.stat(dst)[8]
if src_mtime > dst_mtime:
msgfmt.make(src, dst)
print("ok.")
else:
print("already up to date.")
print('Finished compiling translation files.')
class build(_build):
sub_commands = [('compile_translations', None)] + _build.sub_commands
class install_lib(_install_lib):
def run(self):
self.run_command('compile_translations')
_install_lib.run(self)
class develop(_develop):
|
def run(self):
self.run_command('compile_translations')
_develop.run(self)
class NoseTestCommand(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_te
|
sts(self):
# Run nose ensuring that argv simulates running nosetests directly
import nose
nose.run_exit(argv=['nosetests'])
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as buf:
return buf.read()
conf = dict(
name='libreant',
version='0.3',
description='{e,}book archive focused on small grass root archives, distributed search, low assumptions',
long_description=read('README.rst'),
author='insomnialab',
author_email='insomnialab@hacari.org',
url='https://github.com/insomnia-lab/libreant',
license='AGPL',
packages=['libreantdb',
'webant',
'webant.api',
'presets',
'archivant',
'users',
'utils',
'cli',
'conf'],
install_requires=[
'gevent',
'elasticsearch >=1, <2',
'flask-bootstrap',
'Flask-Babel',
'flask-script',
'Flask-Authbone >=0.2',
'Flask',
'opensearch',
'Fsdb',
'click',
'peewee',
'passlib >=1.6, <1.7' # version 1.7 will drop python2 suport
],
package_data = {
# If any package contains *.mo include them
# important! leave all the stars!
'webant': ['translations/*/*/*.mo']
},
include_package_data=True,
tests_require=['nose', 'coverage'],
zip_safe=False,
cmdclass={'build': build,
'test': NoseTestCommand,
'install_lib': install_lib,
'develop': develop,
'compile_translations': compile_translations},
entry_points={'console_scripts': [
'libreant=cli.libreant:libreant',
'agherant=cli.agherant:agherant',
'libreant-users=cli.libreant_users:libreant_users',
'libreant-db=cli.libreant_db:libreant_db'
]},
classifiers=[
"Framework :: Flask",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2",
"Development Status :: 4 - Beta"
])
if __name__ == '__main__':
setup(**conf)
|
kaushik94/sympy
|
sympy/polys/factortools.py
|
Python
|
bsd-3-clause
| 34,338
| 0.00067
|
"""Polynomial factorization routines in characteristic zero. """
from __future__ import print_function, division
from sympy.polys.galoistools import (
gf_from_int_poly, gf_to_int_poly,
gf_
|
lshift, gf_add_mul, gf_mul,
gf_div, gf_rem,
gf_gcdex,
gf_sqf_p,
gf_factor_sqf, gf_factor)
from sympy.polys.densebasic import (
dup_LC, dmp_LC, dmp_ground_LC,
dup_TC,
dup_convert, dmp_convert,
dup_degree, dmp_degree,
dmp_degree_in, dmp_degree_list,
dmp_from_dict,
dmp_zero_p,
dmp_one,
dmp_nest, dmp_raise,
dup_strip,
dmp_ground,
dup_inflate,
dmp_exclude, dmp_include,
dmp_inject, dmp_eject,
|
dup_terms_gcd, dmp_terms_gcd)
from sympy.polys.densearith import (
dup_neg, dmp_neg,
dup_add, dmp_add,
dup_sub, dmp_sub,
dup_mul, dmp_mul,
dup_sqr,
dmp_pow,
dup_div, dmp_div,
dup_quo, dmp_quo,
dmp_expand,
dmp_add_mul,
dup_sub_mul, dmp_sub_mul,
dup_lshift,
dup_max_norm, dmp_max_norm,
dup_l1_norm,
dup_mul_ground, dmp_mul_ground,
dup_quo_ground, dmp_quo_ground)
from sympy.polys.densetools import (
dup_clear_denoms, dmp_clear_denoms,
dup_trunc, dmp_ground_trunc,
dup_content,
dup_monic, dmp_ground_monic,
dup_primitive, dmp_ground_primitive,
dmp_eval_tail,
dmp_eval_in, dmp_diff_eval_in,
dmp_compose,
dup_shift, dup_mirror)
from sympy.polys.euclidtools import (
dmp_primitive,
dup_inner_gcd, dmp_inner_gcd)
from sympy.polys.sqfreetools import (
dup_sqf_p,
dup_sqf_norm, dmp_sqf_norm,
dup_sqf_part, dmp_sqf_part)
from sympy.polys.polyutils import _sort_factors
from sympy.polys.polyconfig import query
from sympy.polys.polyerrors import (
ExtraneousFactors, DomainError, CoercionFailed, EvaluationFailed)
from sympy.ntheory import nextprime, isprime, factorint
from sympy.utilities import subsets
from math import ceil as _ceil, log as _log
from sympy.core.compatibility import range
def dup_trial_division(f, factors, K):
"""
Determine multiplicities of factors for a univariate polynomial
using trial division.
"""
result = []
for factor in factors:
k = 0
while True:
q, r = dup_div(f, factor, K)
if not r:
f, k = q, k + 1
else:
break
result.append((factor, k))
return _sort_factors(result)
def dmp_trial_division(f, factors, u, K):
"""
Determine multiplicities of factors for a multivariate polynomial
using trial division.
"""
result = []
for factor in factors:
k = 0
while True:
q, r = dmp_div(f, factor, u, K)
if dmp_zero_p(r, u):
f, k = q, k + 1
else:
break
result.append((factor, k))
return _sort_factors(result)
def dup_zz_mignotte_bound(f, K):
"""Mignotte bound for univariate polynomials in `K[x]`. """
a = dup_max_norm(f, K)
b = abs(dup_LC(f, K))
n = dup_degree(f)
return K.sqrt(K(n + 1))*2**n*a*b
def dmp_zz_mignotte_bound(f, u, K):
"""Mignotte bound for multivariate polynomials in `K[X]`. """
a = dmp_max_norm(f, u, K)
b = abs(dmp_ground_LC(f, u, K))
n = sum(dmp_degree_list(f, u))
return K.sqrt(K(n + 1))*2**n*a*b
def dup_zz_hensel_step(m, f, g, h, s, t, K):
"""
One step in Hensel lifting in `Z[x]`.
Given positive integer `m` and `Z[x]` polynomials `f`, `g`, `h`, `s`
and `t` such that::
f = g*h (mod m)
s*g + t*h = 1 (mod m)
lc(f) is not a zero divisor (mod m)
lc(h) = 1
deg(f) = deg(g) + deg(h)
deg(s) < deg(h)
deg(t) < deg(g)
returns polynomials `G`, `H`, `S` and `T`, such that::
f = G*H (mod m**2)
S*G + T*H = 1 (mod m**2)
References
==========
.. [1] [Gathen99]_
"""
M = m**2
e = dup_sub_mul(f, g, h, K)
e = dup_trunc(e, M, K)
q, r = dup_div(dup_mul(s, e, K), h, K)
q = dup_trunc(q, M, K)
r = dup_trunc(r, M, K)
u = dup_add(dup_mul(t, e, K), dup_mul(q, g, K), K)
G = dup_trunc(dup_add(g, u, K), M, K)
H = dup_trunc(dup_add(h, r, K), M, K)
u = dup_add(dup_mul(s, G, K), dup_mul(t, H, K), K)
b = dup_trunc(dup_sub(u, [K.one], K), M, K)
c, d = dup_div(dup_mul(s, b, K), H, K)
c = dup_trunc(c, M, K)
d = dup_trunc(d, M, K)
u = dup_add(dup_mul(t, b, K), dup_mul(c, G, K), K)
S = dup_trunc(dup_sub(s, d, K), M, K)
T = dup_trunc(dup_sub(t, u, K), M, K)
return G, H, S, T
def dup_zz_hensel_lift(p, f, f_list, l, K):
"""
Multifactor Hensel lifting in `Z[x]`.
Given a prime `p`, polynomial `f` over `Z[x]` such that `lc(f)`
is a unit modulo `p`, monic pair-wise coprime polynomials `f_i`
over `Z[x]` satisfying::
f = lc(f) f_1 ... f_r (mod p)
and a positive integer `l`, returns a list of monic polynomials
`F_1`, `F_2`, ..., `F_r` satisfying::
f = lc(f) F_1 ... F_r (mod p**l)
F_i = f_i (mod p), i = 1..r
References
==========
.. [1] [Gathen99]_
"""
r = len(f_list)
lc = dup_LC(f, K)
if r == 1:
F = dup_mul_ground(f, K.gcdex(lc, p**l)[0], K)
return [ dup_trunc(F, p**l, K) ]
m = p
k = r // 2
d = int(_ceil(_log(l, 2)))
g = gf_from_int_poly([lc], p)
for f_i in f_list[:k]:
g = gf_mul(g, gf_from_int_poly(f_i, p), p, K)
h = gf_from_int_poly(f_list[k], p)
for f_i in f_list[k + 1:]:
h = gf_mul(h, gf_from_int_poly(f_i, p), p, K)
s, t, _ = gf_gcdex(g, h, p, K)
g = gf_to_int_poly(g, p)
h = gf_to_int_poly(h, p)
s = gf_to_int_poly(s, p)
t = gf_to_int_poly(t, p)
for _ in range(1, d + 1):
(g, h, s, t), m = dup_zz_hensel_step(m, f, g, h, s, t, K), m**2
return dup_zz_hensel_lift(p, g, f_list[:k], l, K) \
+ dup_zz_hensel_lift(p, h, f_list[k:], l, K)
def _test_pl(fc, q, pl):
if q > pl // 2:
q = q - pl
if not q:
return True
return fc % q == 0
def dup_zz_zassenhaus(f, K):
"""Factor primitive square-free polynomials in `Z[x]`. """
n = dup_degree(f)
if n == 1:
return [f]
fc = f[-1]
A = dup_max_norm(f, K)
b = dup_LC(f, K)
B = int(abs(K.sqrt(K(n + 1))*2**n*A*b))
C = int((n + 1)**(2*n)*A**(2*n - 1))
gamma = int(_ceil(2*_log(C, 2)))
bound = int(2*gamma*_log(gamma))
a = []
# choose a prime number `p` such that `f` be square free in Z_p
# if there are many factors in Z_p, choose among a few different `p`
# the one with fewer factors
for px in range(3, bound + 1):
if not isprime(px) or b % px == 0:
continue
px = K.convert(px)
F = gf_from_int_poly(f, px)
if not gf_sqf_p(F, px, K):
continue
fsqfx = gf_factor_sqf(F, px, K)[1]
a.append((px, fsqfx))
if len(fsqfx) < 15 or len(a) > 4:
break
p, fsqf = min(a, key=lambda x: len(x[1]))
l = int(_ceil(_log(2*B + 1, p)))
modular = [gf_to_int_poly(ff, p) for ff in fsqf]
g = dup_zz_hensel_lift(p, f, modular, l, K)
sorted_T = range(len(g))
T = set(sorted_T)
factors, s = [], 1
pl = p**l
while 2*s <= len(T):
for S in subsets(sorted_T, s):
# lift the constant coefficient of the product `G` of the factors
# in the subset `S`; if it is does not divide `fc`, `G` does
# not divide the input polynomial
if b == 1:
q = 1
for i in S:
q = q*g[i][-1]
q = q % pl
if not _test_pl(fc, q, pl):
continue
else:
G = [b]
for i in S:
G = dup_mul(G, g[i], K)
G = dup_trunc(G, pl, K)
G = dup_primitive(G, K)[1]
q = G[-1]
if q and fc % q != 0:
continue
H = [b]
S = set(S)
T_S = T - S
if b == 1:
G = [b]
for i in S:
|
dhermes/google-cloud-python
|
vision/google/cloud/vision_v1/types.py
|
Python
|
apache-2.0
| 2,315
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
from google.api_core.protobuf_helpers import get_messages
from google.api import http_pb2
from google.cloud.vision_v1.proto import geometry_pb2
from google.cloud.vision_v1.proto import image_annotator_pb2
from google.cloud.vision_v1.proto import product_search_pb2
from google.cloud.vision_v1.proto import product_search_service_pb2
from google.cloud.vision_v1.proto import text_annotation_pb2
from google.cloud.vision_v1.proto import web_detection_pb2
from google.longrunning import operations_pb2
from google.protobuf import any_pb2
from google.protobuf import descriptor_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import timestamp_pb2
from google.protobuf import wrappers_pb2
from google.rpc import status_pb2
from google.type import color_pb2
from google.type import latlng_pb2
_shared_modules = [
http_pb2,
operations_pb2,
any_pb2,
descriptor_pb2,
empty_pb2,
field_mask_pb2,
timestamp_pb2,
wrappers_pb2,
status_pb2,
color_pb2,
latlng_pb2,
]
_local_modules = [
geometry_pb2,
image_annotator_pb2,
product_search_pb2,
product_search_service_pb2,
text_annotation_pb2,
web_detection_pb2,
]
names = []
for modu
|
le in _shared_modules:
for name, message in get_messages(module).items():
setattr(sys.modules[__name__], name, message)
names.append(name)
for module in _local_modules:
for name, message in get_messages(module).items():
message.__module__ = "google.cloud.vision_v1
|
.types"
setattr(sys.modules[__name__], name, message)
names.append(name)
__all__ = tuple(sorted(names))
|
reshadh/Keepnote-LaTeX
|
keepnote/extensions/new_file/__init__.py
|
Python
|
gpl-2.0
| 17,272
| 0.004805
|
"""
KeepNote Extension
new_file
Extension allows adding new filetypes to a notebook
"""
#
# KeepNote
# Copyright (c) 2008-2011 Matt Rasmussen
# Author: Matt Rasmussen <rasmus@mit.edu>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
#
imp
|
ort gettext
import os
import re
import shutil
import sys
import time
import xml.etree.cElementTr
|
ee as etree
#_ = gettext.gettext
import keepnote
from keepnote import unicode_gtk
from keepnote.notebook import NoteBookError
from keepnote import notebook as notebooklib
from keepnote import tasklib
from keepnote import tarfile
from keepnote.gui import extension
from keepnote.gui import dialog_app_options
# pygtk imports
try:
import pygtk
pygtk.require('2.0')
from gtk import gdk
import gtk.glade
import gobject
except ImportError:
# do not fail on gtk import error,
# extension should be usable for non-graphical uses
pass
class Extension (extension.Extension):
def __init__(self, app):
"""Initialize extension"""
extension.Extension.__init__(self, app)
self.app = app
self._file_types = []
self._default_file_types = [
FileType("Text File (txt)", "untitled.txt", "plain_text.txt"),
FileType("Spreadsheet (xls)", "untitled.xls", "spreadsheet.xls"),
FileType("Word Document (doc)", "untitled.doc", "document.doc")
]
self.enabled.add(self.on_enabled)
def get_filetypes(self):
return self._file_types
def on_enabled(self, enabled):
if enabled:
self.load_config()
def get_depends(self):
return [("keepnote", ">=", (0, 7, 1))]
#===============================
# config handling
def get_config_file(self):
return self.get_data_file("config.xml")
def load_config(self):
config = self.get_config_file()
if not os.path.exists(config):
self.set_default_file_types()
self.save_default_example_files()
self.save_config()
try:
tree = etree.ElementTree(file=config)
# check root
root = tree.getroot()
if root.tag != "file_types":
raise NoteBookError("Root tag is not 'file_types'")
# iterate children
self._file_types = []
for child in root:
if child.tag == "file_type":
filetype = FileType("", "", "")
for child2 in child:
if child2.tag == "name":
filetype.name = child2.text
elif child2.tag == "filename":
filetype.filename = child2.text
elif child2.tag == "example_file":
filetype.example_file = child2.text
self._file_types.append(filetype)
except:
self.app.error("Error reading file type configuration")
self.set_default_file_types()
self.save_config()
def save_config(self):
config = self.get_config_file()
tree = etree.ElementTree(
etree.Element("file_types"))
root = tree.getroot()
for file_type in self._file_types:
elm = etree.SubElement(root, "file_type")
name = etree.SubElement(elm, "name")
name.text = file_type.name
example = etree.SubElement(elm, "example_file")
example.text = file_type.example_file
filename = etree.SubElement(elm, "filename")
filename.text = file_type.filename
tree.write(open(config, "w"), "UTF-8")
def set_default_file_types(self):
self._file_types = list(self._default_file_types)
def save_default_example_files(self):
base = self.get_base_dir()
data_dir = self.get_data_dir()
for file_type in self._default_file_types:
fn = file_type.example_file
shutil.copy(os.path.join(base, fn), os.path.join(data_dir, fn))
def update_all_menus(self):
for window in self.get_windows():
self.set_new_file_menus(window)
#==============================
# UI
def on_add_ui(self, window):
"""Initialize extension for a particular window"""
# add menu options
self.add_action(window, "New File", "New _File")
#("treeview_popup", None, None),
self.add_ui(window,
"""
<ui>
<menubar name="main_menu_bar">
<menu action="File">
<placeholder name="New">
<menuitem action="New File"/>
</placeholder>
</menu>
</menubar>
<!--
<menubar name="popup_menus">
<menu action="treeview_popup">
<placeholder action="New">
<menuitem action="New File"/>
</placeholder>
</menu>
</menubar>
-->
</ui>
""")
self.set_new_file_menus(window)
#=================================
# Options UI setup
def on_add_options_ui(self, dialog):
dialog.add_section(NewFileSection("new_file",
dialog, self._app,
self),
"extensions")
def on_remove_options_ui(self, dialog):
dialog.remove_section("new_file")
#======================================
# callbacks
def on_new_file(self, window, file_type):
"""Callback from gui to add a new file"""
notebook = window.get_notebook()
if notebook is None:
return
nodes = window.get_selected_nodes()
if len(nodes) == 0:
parent = notebook
else:
sibling = nodes[0]
if sibling.get_parent():
parent = sibling.get_parent()
index = sibling.get_attr("order") + 1
else:
parent = sibling
try:
uri = os.path.join(self.get_data_dir(), file_type.example_file)
node = notebooklib.attach_file(uri, parent)
node.rename(file_type.filename)
window.get_viewer().goto_node(node)
except Exception, e:
window.error("Error while attaching file '%s'." % uri, e)
def on_new_file_type(self, window):
"""Callback from gui for adding a new file type"""
self.app.app_options_dialog.show(window, "new_file")
#==========================================
# menu setup
def set_new_file_menus(self, window):
"""Set the recent notebooks in the file menu"""
menu = window.get_uimanager().get_widget("/main_menu_bar/File/New/New File")
if menu:
self.set_new_file_menu(window, menu)
menu = window.get_uimanager().get_widget("/popup_menus/treeview_popup/New/New File")
if menu:
self.set_new_file_menu(window, menu)
def set_new_file_menu(self, window, menu):
"""Set the recent notebooks in the file menu"""
# TODO: perform lookup of filetypes again
# init menu
if menu.get_submenu() is None:
submenu = gtk.Menu()
submenu.show()
menu.set_submenu(submenu)
menu = menu.get_submenu()
# clear menu
menu.foreach(l
|
hschovanec-usgs/magpy
|
magpy/lib/format_iaga02.py
|
Python
|
gpl-3.0
| 20,820
| 0.011479
|
"""
MagPy
IAGA02 input filter
Written by Roman Leonhardt June 2012
- contains test, read and write function
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from io import open
from magpy.stream import *
#global variables
MISSING_DATA = 99999
NOT_REPORTED = 88888
def isIAGA(filename):
"""
Checks whether a file is ASCII IAGA 2002 format.
"""
try:
temp = open(filename, 'rt').readline()
except:
return False
try:
if not temp.startswith(' Format'):
return False
if not 'IAGA-2002' in temp:
return False
except:
return False
return True
def readIAGA(filename, headonly=False, **kwargs):
"""
Reading IAGA2002 format data.
"""
starttime = kwargs.get('starttime')
endtime = kwargs.get('endtime')
debug = kwargs.get('debug')
getfile = True
array = [[] for key in KEYLIST]
fh = open(filename, 'rt')
# read file and split text into channels
stream = DataStream()
# Check whether header infromation is already present
headers = {}
data = []
key = None
try:
# get day from filename (platform independent)
theday = extractDateFromString(filename)[0]
day = datetime.strftime(theday,"%Y-%m-%d")
# Select only files within eventually defined time range
if starttime:
if not datetime.strptime(day,'%Y-%m-%d') >= datetime.strptime(datetime.strftime(stream._testtime(starttime),'%Y-%m-%d'),'%Y-%m-%d'):
getfile = False
if endtime:
if not datetime.strptime(day,'%Y-%m-%d') <= datetime.strptime(datetime.strftime(stream._testtime(endtime),'%Y-%m-%d'),'%Y-%m-%d'):
getfile = False
except:
logging.warning("Could not identify typical IAGA date for %s. Reading all ...".format(filename))
getfile = True
if getfile:
loggerlib.info('Read: %s Format: %s ' % (filename, "IAGA2002"))
dfpos = KEYLIST.index('df')
for line in fh:
if line.isspace():
# blank line
continue
elif line.startswith(' '):
# data info
infoline = line[:-4]
key = infoline[:23].strip()
val = infoline[23:].strip()
if key.find('Source') > -1:
if not val == '':
stream.header['StationInstitution'] = val
if key.find('Station') > -1:
if not val == '':
stream.header['StationName'] = val
if key.find('IAGA') > -1:
if not val == '':
stream.header['StationIAGAcode'] = val
stream.header['StationID'] = val
if key.find('Latitude') > -1:
if not val == '':
stream.header['DataAcquisitionLatitude'] = val
if key.find('Longitude') > -1:
if not val == '':
stream.header['DataAcquisitionLongitude'] = val
if key.find('Elevation') > -1:
if not val == '':
stream.header['DataElevation'] = val
if key.find('Format') > -1:
if not val == '':
stream.header['DataFormat'] = val
if key.find('Reported') > -1:
if not val == '':
stream.header['DataComponents'] = val
if key.find('Orientation') > -1:
if not val == '':
stream.header['DataSensorOrientation'] = val
if key.find('Digital') > -1:
if not val == '':
stream.header['DataDigitalSampling'] = val
if key.find('Interval') > -1:
if not val == '':
stream.header['DataSamplingFilter'] = val
if key.startswith(' #'):
if key.find('# V-Instrument') > -1:
if not val == '':
stream.header['SensorID'] = val
elif key.find('# PublicationDate') > -1:
if not val == '':
stream.header['DataPublicationDate'] = val
else:
print ("formatIAGA: did not import optional header info {a}".format(a=key))
if key.find('Data Type') > -1:
if not val == '':
if val[0] in ['d','D']:
stream.header['DataPublicationLevel'] = '4'
elif val[0] in ['q','Q']:
stream.header['DataPublicationLevel']
|
= '3'
elif val[0] in ['p','P']:
stream.header['DataPublicationLevel'] = '2'
else:
stream.header['DataPublicationLevel'] = '1'
if key.find('Publication Date') > -1:
if not val == '':
stream.header['DataPublica
|
tionDate'] = val
elif line.startswith('DATE'):
# data header
colsstr = line.lower().split()
varstr = ''
for it, elem in enumerate(colsstr):
if it > 2:
varstr += elem[-1]
varstr = varstr[:4]
stream.header["col-x"] = varstr[0].upper()
stream.header["col-y"] = varstr[1].upper()
stream.header["col-z"] = varstr[2].upper()
stream.header["unit-col-x"] = 'nT'
stream.header["unit-col-y"] = 'nT'
stream.header["unit-col-z"] = 'nT'
stream.header["unit-col-f"] = 'nT'
if varstr.endswith('g'):
stream.header["unit-col-df"] = 'nT'
stream.header["col-df"] = 'G'
stream.header["col-f"] = 'F'
else:
stream.header["col-f"] = 'F'
if varstr in ['dhzf','dhzg']:
#stream.header["col-x"] = 'H'
#stream.header["col-y"] = 'D'
#stream.header["col-z"] = 'Z'
stream.header["unit-col-y"] = 'deg'
stream.header['DataComponents'] = 'HDZF'
elif varstr in ['ehzf','ehzg']:
#stream.header["col-x"] = 'H'
#stream.header["col-y"] = 'E'
#stream.header["col-z"] = 'Z'
stream.header['DataComponents'] = 'HEZF'
elif varstr in ['dhif','dhig']:
stream.header["col-x"] = 'I'
stream.header["col-y"] = 'D'
stream.header["col-z"] = 'F'
stream.header["unit-col-x"] = 'deg'
stream.header["unit-col-y"] = 'deg'
stream.header['DataComponents'] = 'IDFF'
elif varstr in ['hdzf','hdzg']:
#stream.header["col-x"] = 'H'
#stream.header["col-y"] = 'D'
stream.header["unit-col-y"] = 'deg'
#stream.header["col-z"] = 'Z'
stream.header['DataComponents'] = 'HDZF'
else:
#stream.header["col-x"] = 'X'
#stream.header["col-y"] = 'Y'
#stream.header["col-z"] = 'Z'
stream.header['DataComponents'] = 'XYZF'
elif headonly:
# skip data for option headonly
continue
elif line.startswith('%'):
pass
else:
# data entry - may be written in multiple columns
# row beinhaltet die Werte eine Zeile
# transl. row values contains a line
row=[]
# Verwende das letzte Zeichen von "line
|
HailStorm32/Q.bo_stacks
|
qbo_webi/build/catkin_generated/generate_cached_setup.py
|
Python
|
lgpl-2.1
| 1,266
| 0.004739
|
from __future__ import print_function
import argparse
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/hydro/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/hydro/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_ca
|
che import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in "/opt/ros/hydro".split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/opt/ros/hydro/stacks/q
|
bo_webi/build/devel/env.sh')
output_filename = '/opt/ros/hydro/stacks/qbo_webi/build/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
#print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
|
tomturner/django-tenants
|
django_tenants/models.py
|
Python
|
mit
| 9,732
| 0.001747
|
from django.conf import settings
from django.contrib.sites.shortcuts import get_current_site
from django.core.management import call_command
from django.db import models, connections, transaction
from django.urls import reverse
from django_tenants.clone import CloneSchema
from .postgresql_backend.base import _check_schema_name
from .signals import post_schema_sync, schema_needs_to_be_sync
from .utils import get_creation_fakes_migrations, get_tenant_base_schema
from .utils import schema_exists, get_tenant_domain_model, get_public_schema_name, get_tenant_database_alias
class TenantMixin(models.Model):
"""
All tenant models must inherit this class.
"""
auto_drop_schema = False
"""
USE THIS WITH CAUTION!
Set this flag to true on a parent class if you want the schema to be
automatically deleted if the tenant row gets deleted.
"""
auto_create_schema = True
"""
Set this flag to false on a parent class if you don't want the schema
to be automatically created upon save.
"""
schema_name = models.CharField(max_length=63, unique=True, db_index=True,
validators=[_check_schema_name])
domain_url = None
"""
Leave this as None. Stores the current domain url so it can be used in the logs
"""
domain_subfolder = None
"""
Leave this as None. Stores the subfolder in subfolder routing was used
"""
_previous_tenant = []
class Meta:
abstract = True
def __enter__(self):
"""
Syntax sugar which helps in celery tasks, cro
|
n jobs, and other scripts
Usage:
with Tenant.objects.get(schema_name='test') as tenant:
# run some code in tenant test
# run some code in previous tenant (public probably)
"""
connection = connections[get_tenant_database_alias()]
self._previous_tenant.append(connect
|
ion.tenant)
self.activate()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
connection = connections[get_tenant_database_alias()]
connection.set_tenant(self._previous_tenant.pop())
def activate(self):
"""
Syntax sugar that helps at django shell with fast tenant changing
Usage:
Tenant.objects.get(schema_name='test').activate()
"""
connection = connections[get_tenant_database_alias()]
connection.set_tenant(self)
@classmethod
def deactivate(cls):
"""
Syntax sugar, return to public schema
Usage:
test_tenant.deactivate()
# or simpler
Tenant.deactivate()
"""
connection = connections[get_tenant_database_alias()]
connection.set_schema_to_public()
def save(self, verbosity=1, *args, **kwargs):
connection = connections[get_tenant_database_alias()]
is_new = self.pk is None
has_schema = hasattr(connection, 'schema_name')
if has_schema and is_new and connection.schema_name != get_public_schema_name():
raise Exception("Can't create tenant outside the public schema. "
"Current schema is %s." % connection.schema_name)
elif has_schema and not is_new and connection.schema_name not in (self.schema_name, get_public_schema_name()):
raise Exception("Can't update tenant outside it's own schema or "
"the public schema. Current schema is %s."
% connection.schema_name)
super().save(*args, **kwargs)
if has_schema and is_new and self.auto_create_schema:
try:
self.create_schema(check_if_exists=True, verbosity=verbosity)
post_schema_sync.send(sender=TenantMixin, tenant=self.serializable_fields())
except Exception:
# We failed creating the tenant, delete what we created and
# re-raise the exception
self.delete(force_drop=True)
raise
elif is_new:
# although we are not using the schema functions directly, the signal might be registered by a listener
schema_needs_to_be_sync.send(sender=TenantMixin, tenant=self.serializable_fields())
elif not is_new and self.auto_create_schema and not schema_exists(self.schema_name):
# Create schemas for existing models, deleting only the schema on failure
try:
self.create_schema(check_if_exists=True, verbosity=verbosity)
post_schema_sync.send(sender=TenantMixin, tenant=self.serializable_fields())
except Exception:
# We failed creating the schema, delete what we created and
# re-raise the exception
self._drop_schema()
raise
def serializable_fields(self):
""" in certain cases the user model isn't serializable so you may want to only send the id """
return self
def _drop_schema(self, force_drop=False):
""" Drops the schema"""
connection = connections[get_tenant_database_alias()]
has_schema = hasattr(connection, 'schema_name')
if has_schema and connection.schema_name not in (self.schema_name, get_public_schema_name()):
raise Exception("Can't delete tenant outside it's own schema or "
"the public schema. Current schema is %s."
% connection.schema_name)
if has_schema and schema_exists(self.schema_name) and (self.auto_drop_schema or force_drop):
self.pre_drop()
cursor = connection.cursor()
cursor.execute('DROP SCHEMA "%s" CASCADE' % self.schema_name)
def pre_drop(self):
"""
This is a routine which you could override to backup the tenant schema before dropping.
:return:
"""
def delete(self, force_drop=False, *args, **kwargs):
"""
Deletes this row. Drops the tenant's schema if the attribute
auto_drop_schema set to True.
"""
self._drop_schema(force_drop)
super().delete(*args, **kwargs)
def create_schema(self, check_if_exists=False, sync_schema=True,
verbosity=1):
"""
Creates the schema 'schema_name' for this tenant. Optionally checks if
the schema already exists before creating it. Returns true if the
schema was created, false otherwise.
"""
# safety check
connection = connections[get_tenant_database_alias()]
_check_schema_name(self.schema_name)
cursor = connection.cursor()
if check_if_exists and schema_exists(self.schema_name):
return False
fake_migrations = get_creation_fakes_migrations()
if sync_schema:
if fake_migrations:
# copy tables and data from provided model schema
base_schema = get_tenant_base_schema()
clone_schema = CloneSchema()
clone_schema.clone_schema(base_schema, self.schema_name)
call_command('migrate_schemas',
tenant=True,
fake=True,
schema_name=self.schema_name,
interactive=False,
verbosity=verbosity)
else:
# create the schema
cursor.execute('CREATE SCHEMA "%s"' % self.schema_name)
call_command('migrate_schemas',
tenant=True,
schema_name=self.schema_name,
interactive=False,
verbosity=verbosity)
connection.set_schema_to_public()
def get_primary_domain(self):
"""
Returns the primary domain of the tenant
"""
try:
domain = self.domains.get(is_primary=True)
return domain
except get_tenant_domain_model().DoesNotExist:
return None
def reverse(self, request, view_name):
"""
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/sympy/utilities/tests/test_lambdify.py
|
Python
|
agpl-3.0
| 11,262
| 0.010478
|
from sympy.utilities.pytest import XFAIL, raises
from sympy import (symbols, lambdify, sqrt, sin, cos, pi, atan, Rational, Float,
Matrix, Lambda, exp, Integral, oo, I)
from sympy.printing.lambdarepr import LambdaPrinter
from sympy import mpmath
from sympy.utilities.lambdify import implemented_function
import math, sympy
x,y,z = symbols('x,y,z')
#================== Test different arguments ==============
def test_no_args():
f = lambdify([], 1)
try:
f(-1)
assert False
except TypeError:
pass
assert f() == 1
def test_single_arg():
f = lambdify(x, 2*x)
assert f(1) == 2
def test_list_args():
f = lambdify([x,y], x+y)
assert f(1,2) == 3
def test_str_args():
f = lambdify('x,y,z', 'z,y,x')
assert f(3,2,1) == (1,2,3)
assert f(1.0,2.0,3.0) == (3.0,2.0,1.0)
# make sure correct number of args required
try:
f(0)
assert False
except TypeError:
pass
def test_own_namespace():
myfunc = lambda x:1
f = lambdify(x, sin(x), {"sin":myfunc})
assert f(0.1) == 1
assert f(100) == 1
def test_own_module():
f = lambdify(x, sin(x), math)
assert f(0)==0.0
f = lambdify(x, sympy.ceiling(x), math)
try:
f(4.5)
assert False
except NameError:
pass
def test_bad_args():
try:
# no vargs given
f = lambdify(1)
assert False
except TypeError:
pass
try:
# same with vector exprs
f = lambdify([1,2])
assert False
except TypeError:
pass
def test_atoms():
# Non-Symbol atoms should not be pulled out from the expression namespace
f = lambdify(x, pi + x, {"pi": 3.14})
assert f(0) == 3.14
f = lambdify(x, I + x, {"I": 1j})
assert f(1) == 1 + 1j
#================== Test different modules ================
# high precision output of sin(0.2*pi) is used to detect if precision is lost unwanted
def test_sympy_lambda():
dps = mpmath.mp.dps
mpmath.mp.dps = 50
try:
sin02 = mpmath.mpf("0.19866933079506121545941262711838975037020672954020")
f = lambdify(x, sin(x), "sympy")
assert f(x) == sin(x)
prec = 1e-15
assert -prec < f(Rational(1,5)).evalf() - Float(str(sin02)) < prec
try:
# arctan is in numpy module and should not be available
f = lambdify(x, arctan(x), "sympy")
assert False
except NameError:
pass
finally:
mpmath.mp.dps = dps
def test_math_lambda():
dps = mpmath.mp.dps
mpmath.mp.dps = 50
try:
sin02 = mpmath.mpf("0.19866933079506121545941262711838975037020672954020")
f = lambdify(x, sin(x), "math")
prec = 1e-15
assert -prec < f(0.2) - sin02 < prec
try:
f(x) # if this succeeds, it can't be a python math function
assert False
except ValueError:
pass
finally:
mpmath.mp.dps = dps
def test_mpmath_lambda():
dps = mpmath.mp.dps
mpmath.mp.dps = 50
try:
sin02 = mpmath.mpf("0.19866933079506121545941262711838975037020672954020")
f = lambdify(x, sin(x), "mpmath")
prec = 1e-49 # mpmath precision is around 50 decimal places
assert -prec < f(mpmath.mpf("0.2")) - sin02 < prec
try:
f(x) # if this succeeds, it can't be a mpmath function
assert False
except TypeError:
pass
finally:
mpmath.mp.dps = dps
@XFAIL
def test_number_precision():
dps = mpmath.mp.dps
mpmath.mp.dps = 50
try:
sin02 = mpmath.mpf("0.19866933079506121545941262711838975037020672954020")
f = lambdify(x, sin02, "mpmath")
prec = 1e-49 # mpmath precision is around 50 decimal places
assert -prec < f(0) - sin02 < prec
finally:
mpmath.mp.dps = dps
#===
|
=============== Test Translations =====================
# We can only check if all translated functions are valid. It has to be checked
# by hand if they
|
are complete.
def test_math_transl():
from sympy.utilities.lambdify import MATH_TRANSLATIONS
for sym, mat in MATH_TRANSLATIONS.iteritems():
assert sym in sympy.__dict__
assert mat in math.__dict__
def test_mpmath_transl():
from sympy.utilities.lambdify import MPMATH_TRANSLATIONS
for sym, mat in MPMATH_TRANSLATIONS.iteritems():
assert sym in sympy.__dict__ or sym == 'Matrix'
assert mat in mpmath.__dict__
#================== Test some functions ===================
def test_exponentiation():
f = lambdify(x, x**2)
assert f(-1) == 1
assert f(0) == 0
assert f(1) == 1
assert f(-2) == 4
assert f(2) == 4
assert f(2.5) == 6.25
def test_sqrt():
f = lambdify(x, sqrt(x))
assert f(0) == 0.0
assert f(1) == 1.0
assert f(4) == 2.0
assert abs(f(2) - 1.414) < 0.001
assert f(6.25) == 2.5
try:
#FIXME-py3k: In Python 3, sqrt(-1) is a ValueError but (-1)**(1/2) isn't
#FIXME-py3k: (previously both were). Change the test, or check Py version?
f(-1)
assert False
except ValueError: pass
def test_trig():
f = lambdify([x], [cos(x),sin(x)])
d = f(pi)
prec = 1e-11
assert -prec < d[0]+1 < prec
assert -prec < d[1] < prec
d = f(3.14159)
prec = 1e-5
assert -prec < d[0]+1 < prec
assert -prec < d[1] < prec
#================== Test vectors ==========================
def test_vector_simple():
f = lambdify((x,y,z), (z,y,x))
assert f(3,2,1) == (1,2,3)
assert f(1.0,2.0,3.0) == (3.0,2.0,1.0)
# make sure correct number of args required
try:
f(0)
assert False
except TypeError: pass
def test_vector_discontinuous():
f = lambdify(x, (-1/x, 1/x))
try:
f(0)
assert False
except ZeroDivisionError: pass
assert f(1) == (-1.0, 1.0)
assert f(2) == (-0.5, 0.5)
assert f(-2) == (0.5, -0.5)
def test_trig_symbolic():
f = lambdify([x], [cos(x),sin(x)])
d = f(pi)
assert abs(d[0]+1) < 0.0001
assert abs(d[1]-0) < 0.0001
def test_trig_float():
f = lambdify([x], [cos(x),sin(x)])
d = f(3.14159)
assert abs(d[0]+1) < 0.0001
assert abs(d[1]-0) < 0.0001
def test_docs():
f = lambdify(x, x**2)
assert f(2) == 4
f = lambdify([x,y,z], [z,y,x])
assert f(1, 2, 3) == [3, 2, 1]
f = lambdify(x, sqrt(x))
assert f(4) == 2.0
f = lambdify((x,y), sin(x*y)**2)
assert f(0, 5) == 0
def test_math():
f = lambdify((x, y), sin(x), modules="math")
assert f(0, 5) == 0
def test_sin():
f = lambdify(x, sin(x)**2)
assert isinstance(f(2), float)
f = lambdify(x, sin(x)**2, modules="math")
assert isinstance(f(2), float)
def test_matrix():
A = Matrix([[x, x*y], [sin(z)+4, x**z]])
sol = Matrix([[1, 2], [sin(3)+4, 1]])
f = lambdify((x,y,z), A, modules="sympy")
assert f(1,2,3) == sol
f = lambdify((x,y,z), (A, [A]), modules="sympy")
assert f(1,2,3) == (sol,[sol])
def test_integral():
f = Lambda(x, exp(-x**2))
l = lambdify(x, Integral(f(x), (x, -oo, oo)), modules="sympy")
assert l(x) == Integral(exp(-x**2), (x, -oo, oo))
#########Test Symbolic###########
def test_sym_single_arg():
f = lambdify(x, x * y)
assert f(z) == z * y
def test_sym_list_args():
f = lambdify([x,y], x + y + z)
assert f(1,2) == 3 + z
def test_sym_integral():
f = Lambda(x, exp(-x**2))
l = lambdify(x, Integral(f(x), (x, -oo, oo)), modules="sympy")
assert l(y).doit() == sqrt(pi)
def test_namespace_order():
# lambdify had a bug, such that module dictionaries or cached module
# dictionaries would pull earlier namespaces into themselves.
# Because the module dictionaries form the namespace of the
# generated lambda, this meant that the behavior of a previously
# generated lambda function could change as a result of later calls
# to lambdify.
n1 = {'f': lambda x:'first f'}
n2 = {'f': lambda x:'second f',
'g': lambda x:'function g'}
f = sympy.Function('f')
g = sympy.Function('g')
if1 = lambdify(x, f(x), modules=(n1, "sympy"))
assert if1(1) == 'first f'
|
jeh/mopidy-gmusic
|
mopidy_gmusic/__init__.py
|
Python
|
apache-2.0
| 976
| 0
|
from __future__ import unicode_literals
import os
from mopidy import config, exceptions, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
d
|
ef get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.Str
|
ing(optional=True)
return schema
def validate_environment(self):
try:
import gmusicapi # noqa
except ImportError as e:
raise exceptions.ExtensionError('gmusicapi library not found', e)
pass
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
|
muxiaobai/CourseExercises
|
python/kaggle/competition/house-price/house.py
|
Python
|
gpl-2.0
| 4,162
| 0.010591
|
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
# In[2]:
train_df = pd.read_csv('./input/train.csv', index_col=0)
test_df = pd.read_csv('./input/test.csv', index_col=0)
# In[4]:
train_df.head()
# In[6]:
#label本身并不平滑。为了我们分类器的学习更加准确,我们会首先把label给“平滑化”(正态化)
import matplotlib.pyplot as plt
prices = pd.DataFrame({"price":train_df["SalePrice"], "log(price + 1)":np.log1p(train_df["SalePrice"])})
prices.hist()
plt.show()
# In[7]:
y_train = np.log1p(train_df.pop('SalePrice'))
# In[8]:
all_df = pd.concat((train_df, test_df), axis=0)
# In[19]:
all_df['MSSubClass'].dtypes
all_df['MSSubClass'].value_counts()
all_df['MSSubClass'] = all_df['MSSubClass'].astype(str)
pd.get_dummies(all_df['MSSubClass'], prefix='MSSubClass').head()
# In[20]:
all_dummy_df = pd.get_dummies(all_df)
all_dummy_df.head()
# In[21]:
all_dummy_df.isnull().sum().sort_values(ascending=False).head(10)
# In[22]:
mean_cols = all_dummy_df.mean()
mean_cols.head(10)
all_dummy_df = all_dummy_df.fillna(mean_cols)
all_dummy_df.isnull().sum().sum()
# In[23]:
dummy_train_df = all_dummy_df.loc[train_df.index]
dummy_test_df = all_dummy_df.loc[test_df.index]
# In[24]:
from sklearn.linear_model import Ridge
from sklearn.model_selection import cross_val_score
X_train = dummy_train_df.values
X_
|
test = dummy_test_df.values
# ### Ridge
# In[25]:
alphas = np.logspace(-3, 2, 50)
test_scores = []
for alpha in alphas:
clf = Ridge(alpha)
test_score = np.sq
|
rt(-cross_val_score(clf, X_train, y_train, cv=10, scoring='neg_mean_squared_error'))
test_scores.append(np.mean(test_score))
# In[27]:
plt.plot(alphas, test_scores)
plt.title("Alpha vs CV Error");
plt.show()
# 15最佳
#
# ### RandomForestRegressor
# In[28]:
from sklearn.ensemble import RandomForestRegressor
max_features = [.1, .3, .5, .7, .9, .99]
test_scores = []
for max_feat in max_features:
clf = RandomForestRegressor(n_estimators=200, max_features=max_feat)
test_score = np.sqrt(-cross_val_score(clf, X_train, y_train, cv=5, scoring='neg_mean_squared_error'))
test_scores.append(np.mean(test_score))
# In[29]:
plt.plot(max_features, test_scores)
plt.title("Max Features vs CV Error");
plt.show()
# ### xgboost
# In[36]:
from xgboost import XGBRegressor
params = [1,2,3,4,5,6]
test_scores = []
for param in params:
clf = XGBRegressor(max_depth=param)
test_score = np.sqrt(-cross_val_score(clf, X_train, y_train, cv=10, scoring='neg_mean_squared_error'))
test_scores.append(np.mean(test_score))
# In[ ]:
plt.plot(params, test_scores)
plt.title("max_depth vs CV Error");
plt.show()
# ### bagging
# In[38]:
from sklearn.ensemble import BaggingRegressor
params = [10, 15, 20, 25, 30, 35, 40, 45, 50]
test_scores = []
for param in params:
clf = BaggingRegressor(n_estimators=param)
test_score = np.sqrt(-cross_val_score(clf, X_train, y_train, cv=10, scoring='neg_mean_squared_error'))
test_scores.append(np.mean(test_score))
# In[39]:
plt.plot(params, test_scores)
plt.title("max_depth vs CV Error");
plt.show()
# ### 基本为ridge ,效果更好一点
# In[40]:
ridge = Ridge(15)
params = [1, 10, 15, 20, 25, 30, 40]
test_scores = []
for param in params:
clf = BaggingRegressor(n_estimators=param, base_estimator=ridge)
test_score = np.sqrt(-cross_val_score(clf, X_train, y_train, cv=10, scoring='neg_mean_squared_error'))
test_scores.append(np.mean(test_score))
# In[41]:
plt.plot(params, test_scores)
plt.title("max_depth vs CV Error");
plt.show()
# ### Ensemble
#
# In[30]:
ridge = Ridge(alpha=15)
rf = RandomForestRegressor(n_estimators=500, max_features=.3)
ridge.fit(X_train, y_train)
rf.fit(X_train, y_train)
# In[31]:
y_ridge = np.expm1(ridge.predict(X_test))
y_rf = np.expm1(rf.predict(X_test))
y_final = (y_ridge + y_rf) / 2
# In[32]:
submission_df = pd.DataFrame(data= {'Id' : test_df.index, 'SalePrice': y_final})
# In[33]:
submission_df.head()
# In[37]:
submission_df.to_csv('submission20180316.csv',index = False,header = True,columns = ['Id','SalePrice'])
|
Turgon37/SMSShell
|
SMSShell/commands/help.py
|
Python
|
gpl-3.0
| 1,862
| 0.001611
|
# -*- coding: utf8 -*-
# This file is a part of SMSShell
#
# Copyright (c) 2016-2018 Pierre GINDRAUD
#
# SMSShell is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SMSShell is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY
|
or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SMSShell. If not, see <http://www.gnu.org/licenses/>.
|
"""Help command
This command return some help string in function of the given input parameters
* If call without parameter : return the list of all available commands
* If call with a command name as first parameter, return the usage string of this function
In this case you can pass some other parameter that will be send to command usage
"""
from . import AbstractCommand, CommandException
class Help(AbstractCommand):
"""Command class, see module docstring for help
"""
def usage(self, argv):
return 'help [COMMAND] [COMMAND ARGS]'
def description(self, argv):
return 'Show commands usage'
def main(self, argv):
# call usage function of the given command
if argv:
try:
return self.shell.getCommand(self.session, argv[0]).usage(argv[1:])
except CommandException as ex:
self.log.error("error during command execution : " + str(ex))
return 'command not available'
# return the list of availables commands
else:
return ' '.join(self.shell.getAvailableCommands(self.session))
|
tfiers/arenberg-online
|
polls/migrations/0005_auto_20150428_0016.py
|
Python
|
mit
| 510
| 0.001961
|
# -
|
*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('polls', '0004_pollanswer_zaventemtransport'),
]
operations = [
migrations.AlterField(
model_name='zaventemtransport',
name='transport',
field=models.CharField(default=b'group', max_length=5, choices=[(b'group', '.. with the group'), (b'own', '.. by myself')]),
),
]
| |
kervi/kervi
|
kervi-hal-win/kervi/platforms/windows/gpio.py
|
Python
|
mit
| 1,575
| 0.000635
|
from kervi.hal.gpio import IGPIODeviceDriver
class GPIODriver(IGPIODeviceDriver):
def __init__(self, gpio_id="generic_gpio"):
IGPIODeviceDriver.__init__(self, gpio_id)
pass
def _get_channel_type(self, channel):
from kervi.hal.gpio import CHANNEL_TYPE_GPIO, CHANNEL_TYPE_ANALOG_IN, CHANNEL_TYPE_ANALOG_OUT
if channel in ["GPIO1", "GPIO2", "GPIO3"]:
return CHANNEL_TYPE_GPIO
elif channel in ["DAC1", "DAC2"]:
return CHANNEL_TYPE_ANALOG_OUT
elif channel in ["ADC1", "ADC2"]:
return CHANNEL_TYPE_ANALOG_IN
def _get_channel_names(self):
return ["GPIO1", "GPIO2", "GPIO3", "DAC1", "DAC2", "ADC1", "ADC2"]
@property
def name(self):
return "Generic GPIO"
def define_as_input(self, pin, pullup=None, bounce_time=0)
|
:
print("define pin in")
def define_as_output(self, pin):
print("define pin out")
def define_as_pwm(self, pin, frequency, duty_cycle):
print("define pwm")
def set(self, pin, sta
|
te):
print("set pin", state)
def get(self, pin):
print("get pin")
return 0
def pwm_start(self, channel, duty_cycle=None, frequency=None):
print("start pwm")
def pwm_stop(self, pin):
print("stop pwm")
def listen(self, pin, callback, bounce_time=0):
print("listen rising")
def listen_rising(self, pin, callback, bounce_time=0):
print("listen rising")
def listen_falling(self, pin, callback, bounce_time=0):
print("listen falling")
|
arizona-phonological-imaging-lab/autotres
|
a3/constants.py
|
Python
|
apache-2.0
| 43
| 0.046512
|
#!/usr/bin/env
|
python3
_version = (0,4,
|
0)
|
orviz/ooi
|
ooi/tests/fakes.py
|
Python
|
apache-2.0
| 17,912
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2015 Spanish National Research Council
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
#
|
http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import re
import uuid
import webob.dec
import webob.exc
from ooi import utils
import ooi.wsgi
application_url = "https://foo.example.org:8774/ooiv1"
tenants = {
"foo": {"id": uuid.uuid4().hex,
"name": "foo"},
"bar": {"id": uuid.uuid4().hex,
"name": "bar"},
"baz": {"id": uuid.uuid4().hex,
"name": "baz"},
}
flavors = {
1: {
"id": 1,
"name": "foo",
"vcpus": 2,
"ram": 256,
"disk": 10,
},
2: {
"id": 2,
"name": "bar",
"vcpus": 4,
"ram": 2014,
"disk": 20,
}
}
images = {
"foo": {
"id": "foo",
"name": "foo",
},
"bar": {
"id": "bar",
"name": "bar",
}
}
volumes = {
tenants["foo"]["id"]: [
{
"id": uuid.uuid4().hex,
"displayName": "foo",
"size": 2,
"status": "available",
"attachments": [],
},
{
"id": uuid.uuid4().hex,
"displayName": "bar",
"size": 3,
"status": "available",
"attachments": [],
},
{
"id": uuid.uuid4().hex,
"displayName": "baz",
"size": 5,
"status": "available",
"attachments": [],
},
],
tenants["bar"]["id"]: [],
tenants["baz"]["id"]: [
{
"id": uuid.uuid4().hex,
"displayName": "volume",
"size": 5,
"status": "in-use",
},
],
}
pools = {
tenants["foo"]["id"]: [
{
"id": "foo",
"name": "foo",
},
{
"id": "bar",
"name": "bar",
}
],
tenants["bar"]["id"]: [],
tenants["baz"]["id"]: [
{
"id": "public",
"name": "public",
},
],
}
linked_vm_id = uuid.uuid4().hex
allocated_ip = "192.168.253.23"
floating_ips = {
tenants["foo"]["id"]: [],
tenants["bar"]["id"]: [],
tenants["baz"]["id"]: [
{
"fixed_ip": "10.0.0.2",
"id": uuid.uuid4().hex,
"instance_id": linked_vm_id,
"ip": "192.168.253.1",
"pool": pools[tenants["baz"]["id"]][0]["name"],
},
{
"fixed_ip": None,
"id": uuid.uuid4().hex,
"instance_id": None,
"ip": "192.168.253.2",
"pool": pools[tenants["baz"]["id"]][0]["name"],
},
],
}
servers = {
tenants["foo"]["id"]: [
{
"id": uuid.uuid4().hex,
"name": "foo",
"flavor": {"id": flavors[1]["id"]},
"image": {"id": images["foo"]["id"]},
"status": "ACTIVE",
},
{
"id": uuid.uuid4().hex,
"name": "bar",
"flavor": {"id": flavors[2]["id"]},
"image": {"id": images["bar"]["id"]},
"status": "SHUTOFF",
},
{
"id": uuid.uuid4().hex,
"name": "baz",
"flavor": {"id": flavors[1]["id"]},
"image": {"id": images["bar"]["id"]},
"status": "ERROR",
},
],
tenants["bar"]["id"]: [],
tenants["baz"]["id"]: [
{
"id": linked_vm_id,
"name": "withvolume",
"flavor": {"id": flavors[1]["id"]},
"image": {"id": images["bar"]["id"]},
"status": "ACTIVE",
"os-extended-volumes:volumes_attached": [
{"id": volumes[tenants["baz"]["id"]][0]["id"]}
],
"addresses": {
"private": [
{"addr": floating_ips[tenants["baz"]["id"]][0]["fixed_ip"],
"OS-EXT-IPS:type": "fixed",
"OS-EXT-IPS-MAC:mac_addr": "1234"},
{"addr": floating_ips[tenants["baz"]["id"]][0]["ip"],
"OS-EXT-IPS:type": "floating",
"OS-EXT-IPS-MAC:mac_addr": "1234"},
]
}
}
],
}
# avoid circular definition of attachments
volumes[tenants["baz"]["id"]][0]["attachments"] = [{
# how consistent can OpenStack be!
# depending on using /servers/os-volume_attachments
# or /os-volumes it will return different field names
"server_id": servers[tenants["baz"]["id"]][0]["id"],
"serverId": servers[tenants["baz"]["id"]][0]["id"],
"attachment_id": uuid.uuid4().hex,
"volumeId": volumes[tenants["baz"]["id"]][0]["id"],
"volume_id": volumes[tenants["baz"]["id"]][0]["id"],
"device": "/dev/vdb",
"id": volumes[tenants["baz"]["id"]][0]["id"],
}]
def fake_query_results():
cats = []
# OCCI Core
cats.append(
'link; '
'scheme="http://schemas.ogf.org/occi/core#"; '
'class="kind"; title="link"')
cats.append(
'resource; '
'scheme="http://schemas.ogf.org/occi/core#"; '
'class="kind"; title="resource"; '
'rel="http://schemas.ogf.org/occi/core#entity"')
cats.append(
'entity; '
'scheme="http://schemas.ogf.org/occi/core#"; '
'class="kind"; title="entity"')
# OCCI Infrastructure Compute
cats.append(
'compute; '
'scheme="http://schemas.ogf.org/occi/infrastructure#"; '
'class="kind"; title="compute resource"; '
'rel="http://schemas.ogf.org/occi/core#resource"')
cats.append(
'start; '
'scheme="http://schemas.ogf.org/occi/infrastructure/compute/action#"; '
'class="action"; title="start compute instance"')
cats.append(
'stop; '
'scheme="http://schemas.ogf.org/occi/infrastructure/compute/action#"; '
'class="action"; title="stop compute instance"')
cats.append(
'restart; '
'scheme="http://schemas.ogf.org/occi/infrastructure/compute/action#"; '
'class="action"; title="restart compute instance"')
cats.append(
'suspend; '
'scheme="http://schemas.ogf.org/occi/infrastructure/compute/action#"; '
'class="action"; title="suspend compute instance"')
# OCCI Templates
cats.append(
'os_tpl; '
'scheme="http://schemas.ogf.org/occi/infrastructure#"; '
'class="mixin"; title="OCCI OS Template"')
cats.append(
'resource_tpl; '
'scheme="http://schemas.ogf.org/occi/infrastructure#"; '
'class="mixin"; title="OCCI Resource Template"')
# OpenStack Images
cats.append(
'bar; '
'scheme="http://schemas.openstack.org/template/os#"; '
'class="mixin"; title="bar"; '
'rel="http://schemas.ogf.org/occi/infrastructure#os_tpl"')
cats.append(
'foo; '
'scheme="http://schemas.openstack.org/template/os#"; '
'class="mixin"; title="foo"; '
'rel="http://schemas.ogf.org/occi/infrastructure#os_tpl"')
# OpenStack Flavors
cats.append(
'1; '
'scheme="http://schemas.openstack.org/template/resource#"; '
'class="mixin"; title="Flavor: foo"; '
'rel="http://schemas.ogf.org/occi/infrastructure#resource_tpl"')
cats.append(
'2; '
'scheme="http://schemas.openstack.org/template/resource#"; '
'class="mixin"; title="Flavor: bar"; '
'rel="http://schemas.ogf.org/occi/infrastructure#resource_tpl"')
# OCCI Infrastructure Network
cats.append(
'network; '
'scheme="http://schemas.ogf.org/occi/infrastructure#"; '
'class="kind"; title="network resource"; '
|
Kaarel94/Ozobot-Python
|
ozopython/__init__.py
|
Python
|
mit
| 1,375
| 0.004364
|
from tkinter import ttk
from ozopython.colorLanguageTranslator import ColorLanguageTranslator
from .ozopython import *
from tkinter import *
def run(filename):
code = ozopython.compile(filename)
colorcode = ColorLanguageTranslator.translate(code)
def load(prog, prog_bar):
colormap = {
'K': "#000000",
'R': "#ff0000",
'G': "#00ff00",
'Y': "#ffff00",
'B': "#0000ff",
'M': "#ff00ff",
'C': "#00ffff",
'W': "#ffffff"
|
}
head, *t
|
ail = prog
canvas.itemconfig(circle, fill=colormap[head])
prog = tail
prog_bar["value"] = len(colorcode) - len(prog)
if len(prog) != 0:
canvas.after(50, lambda: load(prog, prog_bar))
window = Tk()
progress = ttk.Progressbar(window, orient="horizontal", length='5c', mode="determinate")
progress["value"] = 0
progress["maximum"] = len(colorcode)
button = Button(window, text="Load", command=lambda: load(colorcode, progress))
button.pack(pady=5)
exit = Button(window, text="Exit", command=lambda: quit())
exit.pack(side="bottom",pady=5)
progress.pack()
canvas = Canvas(window, height='6c', width='6c')
circle = canvas.create_oval('0.5c', '0.5c', '5.5c', '5.5c', fill="white")
canvas.pack()
window.mainloop()
|
odoousers2014/odoo_addons-2
|
clv_batch/history/__init__.py
|
Python
|
agpl-3.0
| 1,429
| 0.011896
|
# -*- encoding: utf-8 -*-
################################################################################
#
|
#
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the Lice
|
nse, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
import clv_batch_history
|
rcocetta/kano-profile
|
tools/print_needed_variables.py
|
Python
|
gpl-2.0
| 1,082
| 0.000924
|
#!/usr/bin/env python
# print_needed_variables.py
#
# Copyright (C) 2014, 2015 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU General Public License v2
#
import os
import sys
if __name__ == '__main__' and __package__ is None:
dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if dir_path != '/usr':
sys.path.insert(1, dir_path)
from kano_profile.badges import load_badge_rules
from kano.utils
|
import write_json, uniqify_list
all_rules = load_badge_rules()
variables_needed = dict()
for category, subcats in all_
|
rules.iteritems():
for subcat, items in subcats.iteritems():
for item, rules in items.iteritems():
targets = rules['targets']
for target in targets:
app = target[0]
variable = target[1]
variables_needed.setdefault(app, list()).append(variable)
for key in variables_needed.iterkeys():
variables_needed[key] = uniqify_list(variables_needed[key])
write_json('variables_needed.json', variables_needed, False)
|
borisbabic/browser_cookie3
|
setup.py
|
Python
|
lgpl-3.0
| 632
| 0.003165
|
from distutils.core import setup
setup(
name='browser-cookie3',
version='0.13.0',
packages=['browser_cookie3'],
# look for package contents in current directory
package_dir={'browser_cookie3': '.'},
author='Boris Babic',
author_email='boris.
|
ivan.babic@gmail.com',
|
description='Loads cookies from your browser into a cookiejar object so can download with urllib and other libraries the same content you see in the web browser.',
url='https://github.com/borisbabic/browser_cookie3',
install_requires=['pyaes', 'pbkdf2', 'keyring', 'lz4', 'pycryptodome', 'SecretStorage'],
license='lgpl'
)
|
dana-i2cat/felix
|
ofam/src/src/ext/sfa/trust/gid.py
|
Python
|
apache-2.0
| 9,265
| 0.004425
|
#----------------------------------------------------------------------
# Copyright (c) 2008 Board of Trustees, Princeton University
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
##
# Implements SFA GID. GIDs are based on certificates, and the GID class is a
# descendant of the certificate class.
##
import xmlrpclib
import uuid
from sfa.trust.certificate import Certificate
from sfa.util.faults import *
from sfa.util.sfalogging import logger
from sfa.util.xrn import hrn_to_urn, urn_to_hrn, hrn_authfor_hrn
##
# Create a new uuid. Returns the UUID as a string.
def create_uuid():
return str(uuid.uuid4().int)
##
# GID is a tuple:
# (uuid, urn, public_key)
#
# UUID is a unique identifier and is created by the python uuid module
# (or the utility function create_uuid() in gid.py).
#
# HRN is a human readable name. It is a dotted form similar to a backward domain
# name. For example, planetlab.us.arizona.bakers.
#
# URN is a human readable identifier of form:
# "urn:publicid:IDN+toplevelauthority[:sub-auth.]*[\res. type]\ +object name"
# For example, urn:publicid:IDN+planetlab:us:arizona+user+bakers
#
# PUBLIC_KEY is the public key of the principal identified by the UUID/HRN.
# It is a Keypair object as defined in the cert.py module.
#
# It is expected that there is a one-to-one pairing between UUIDs and HRN,
# but it is uncertain how this would be inforced or if it needs to be enforced.
#
# These fields are encoded using xmlrpc into the subjectAltName field of the
# x509 certificate. Note: Call encode() once the fields have been filled in
# to perform this encoding.
class GID(Certificate):
uuid = None
hrn = None
urn = None
##
# Create a new GID object
#
# @param create If true, create the X509 certificate
# @param subject If subject!=None, create the X509 cert and set the subject name
# @param string If string!=None, load the GID from a string
# @param filename If filename!=None, load the GID from a file
# @param lifeDays life of GID in days - default is 1825==5 years
def __init__(self, create=False, subject=None, string=None, filename=None, uuid=None, hrn=None, urn=None, lifeDays=1825):
Certificate.__init__(self, lifeDays, create, subject, string, filename)
if subject:
logger.debug("Creating GID for subject: %s" % subject)
if uuid:
self.uuid = int(uuid)
if hrn:
self.hrn = hrn
self.urn = hrn_to_urn(hrn, 'unknown')
if urn:
self.urn = urn
self.hrn, type = urn_to_hrn(urn)
def set_uuid(self, uuid):
if isinstance(uuid, str):
self.uuid = int(uuid)
else:
self.uuid = uuid
def get_uuid(self):
if not self.uuid:
self.decode()
return self.uuid
def set_hrn(self, hrn):
self.hrn = hrn
def get_hrn(self):
if not self.hrn:
self.decode()
return self.hrn
def set_urn(self, urn):
self.urn = urn
self.hrn, type = urn_to_hrn(urn)
def get_urn(self):
if not self.urn:
self.decode()
return self.urn
def get_type(self):
if not self.urn:
self.decode()
_, t = urn_to_hrn(self.urn)
return t
##
# Encode the GID fields and package them into the subject-alt-name field
# of the X509 certificate. This must be called prior to signing the
# certificate. It may only be called once per certificate.
def encode(self):
if self.urn:
urn = self.urn
else:
urn = hrn_to_urn(self.hrn, None)
str = "URI:" + urn
if self.uuid:
str += ", " + "URI:" + uuid.UUID(int=self.uuid).urn
self.set_data(str, 'subjectAltName')
##
# Decode the subject-alt-name field of the X509 certificate into the
# fields of the GID. This is automatically called by the various get_*()
# functions in this class.
def decode(self):
data = self.get_data('subjectAltName')
dict = {}
if data:
if data.lower().startswith('uri:http://<params>'):
dict = xmlrpclib.loads(data[11:])[0][0]
else:
spl = data.split(', ')
for val in spl:
if val.lower().startswith('uri:urn:uuid:'):
dict['uuid'] = uuid.UUID(val[4:]).int
elif val.lower().startswith('uri:urn:publicid:idn+'):
dict['urn'] = val[4:]
self.uuid = dict.get("uuid", None)
self.urn = dict.get("urn", None)
self.hrn = dict.get("hrn", None)
if self.urn:
self.hrn = urn_to_hrn(self.urn)[0]
##
# Dump the credential to stdout.
#
# @param indent specifies a number of spaces to indent the output
# @param dump_parents If true, also dump the parents of the GID
def dump(self, *args, **kwargs):
print self.dump_string(*args,**kwargs)
def dump_string(self, indent=0, dump_parents=False):
result=" "*(indent-2) + "GID\n"
result += " "*indent + "hrn:" + str(self.get_hrn()) +"\n"
result += " "*indent + "urn:" + str(self.get_urn()) +"\n"
result += " "*indent + "uuid:" + str(self.get_uuid()) + "\n"
filename=self.get_filename()
if filename: result += "Filename %s\n"%filename
if self.parent and dump_pa
|
rents:
result += " "*indent + "parent:\n"
|
result += self.parent.dump_string(indent+4, dump_parents)
return result
##
# Verify the chain of authenticity of the GID. First perform the checks
# of the certificate class (verifying that each parent signs the child,
# etc). In addition, GIDs also confirm that the parent's HRN is a prefix
# of the child's HRN, and the parent is of type 'authority'.
#
# Verifying these prefixes prevents a rogue authority from signing a GID
# for a principal that is not a member of that authority. For example,
# planetlab.us.arizona cannot sign a GID for planetlab.us.princeton.foo.
def verify_chain(self, trusted_certs = None):
# do the normal certificate verification stuff
trusted_root = Certificate.verify_chain(self, trusted_certs)
if self.parent:
# make sure the parent's hrn is a prefix of the child's hrn
if not hrn_authfor_hrn(self.parent.get_hrn(), self.get_hrn()):
raise GidParentHrn("This cert HRN %s isn't in the namespace for parent HRN %s" % (self.get_hrn(), self.parent.get_hrn()))
# Parent must also be an authority (of some type) to sign a GID
# There are multiple types of authority - accept them all here
if not self.parent.get_type().find('authority') == 0:
raise GidInvalidParentHrn("This cert %s's parent %s is not an authority (is a %s)" % (self.get_hrn(), self.parent.get_hrn(), self.parent.get_type()))
|
UMD-DRASTIC/drastic
|
drastic/DrasticLoader/FileNameSource.py
|
Python
|
agpl-3.0
| 7,914
| 0.006444
|
# coding=utf-8
"""Ingest workflow management tool
FileNameSource Class
"""
__copyright__ = "Copyright (C) 2016 University of Maryland"
__license__ = "GNU AFFERO GENERAL PUBLIC LICENSE, Version 3"
import abc
import os
import sys
import psycopg2
class FileNameSource:
def __init__(self): pass
def __iter__(self): return self
@abc.abstractmethod
def next(self): pass
def confirm_completion(self, path):
return True
class FileList(FileNameSource):
def __init__(self, args, cfg):
FileNameSource.__init__(self)
src = args['<source_directory>']
self.fp = sys.stdin if src == '-' else open(src, 'rU')
self.prefix = args['--prefix']
self.offset = len(self.prefix)
def next(self):
v = self.fp.next().strip()
if not v.startswith(self.prefix):
print v, ' not in ', self.prefix, 'ignoring '
return
return decode_str(v[self.offset:])
class DirectoryWalk(FileNameSource):
def __init__(self, args, cfg):
FileNameSource.__init__(self)
src = args['<source_directory>']
if src == '-':
print ' Incompatible mode -- Cannot Walk stdin '
raise ValueError
self.prefix = args['--prefix']
self.offset = len(self.prefix)
self.walker = os.walk(src, topdown=True, followlinks=True)
self.dirname = None
self.files = None
def next(self):
while not self.dirname or not self.files:
self.dirname, _, self.files = self.walker.next()
return os.path.join(self.dirname[self.offset:], self.files.pop())
class DB:
def __init__(self, args, cfg):
defaults = (('user', 'drastic'), ('database', 'drastic'), ('password', 'drastic'), ('host', 'localhost'))
credentials = dict(user=cfg.get('postgres', 'user'),
database=cfg.get('postgres', 'database'),
password=cfg.get('postgres', 'password'),
host=cfg.get('postgres', 'host'))
for k, v in defaults:
if not credentials[k]: credentials[k] = v
self.credentials = credentials
self.cnx = psycopg2.connect(**credentials)
self.cs1 = self.cnx.cursor()
table = args.get('--dataset', 'resource')
if not table: table = 'resource'
self.tablename = table
### Do JIT set up of other queries....
self.update_status = False
self.db_initialized = False
def summary(self):
cmd = '''SELECT status,count(*) from "{0}" group by status order by status '''.format(self.tablename)
try:
self.cs1.execute(cmd)
|
for v in self.cs1: print '{0:-10s}\t{1:,}'.format(*v)
except Excep
|
tion as e:
print e
def _setup_db(self, table):
cs = self.cnx.cursor()
# Create the status Enum
try:
cs.execute("CREATE TYPE resource_status AS ENUM ('READY','IN-PROGRESS','DONE','BROKEN','VERIFIED')")
except:
cs.connection.rollback()
#
cmds = [
'''CREATE TABLE IF NOT EXISTS "{0}" (
path TEXT PRIMARY KEY,
status resource_status DEFAULT 'READY',
started timestamp,
fs_sync boolean)''',
'''CREATE INDEX "IDX_{0}_01_status" ON "{0}" (status ) WHERE status <> 'DONE' ''',
'''CREATE INDEX "IDX_{0}_01_fs_sync" ON "{0}" (fs_sync) WHERE fs_sync is not True''']
for cmd in cmds:
try:
cs.execute(cmd.format(table))
cs.connection.commit()
except Exception as e:
cs.connection.rollback()
class DBPrepare(DB):
"""
Class to be used when preparing.
"""
def __init__(self, args, cfg):
DB.__init__(self, args, cfg)
self.prefix = (args['--prefix'])
self.offset = len(self.prefix)
self.cs = self.cnx.cursor('AB1', withhold=True)
self._setup_db(self.tablename)
cmd = '''PREPARE I1 ( text ) AS insert into "{0}" (path,status)
SELECT $1,'READY'::resource_status WHERE NOT EXISTS (SELECT TRUE FROM "{0}" where path = $1)'''
self.cs1.execute(cmd.format(self.tablename))
def prepare(self, path ):
self.cs1.execute("EXECUTE I1(%s); commit", [path])
return True
class DBQuery(FileNameSource, DB):
"""
Class to be used to get file names when injecting.
"""
def __init__(self, args, cfg):
DB.__init__(self,args,cfg)
FileNameSource.__init__(self)
self.prefix = (args['--prefix'])
self.offset = len(self.prefix)
self.fetch_cs = self.cnx.cursor()
cmd = '''PREPARE F1 (integer) AS SELECT path FROM "{0}" where status = 'READY' LIMIT $1 '''.format(self.tablename)
self.fetch_cs.execute(cmd)
self.fetch_cs.execute('EXECUTE F1 (1000)')
# And prepare the update status cmd
ucmd = '''PREPARE M1 (TEXT,resource_status) AS UPDATE "{0}" SET status='DONE' WHERE path = $1 and status <> $2 '''.format(
self.tablename)
self.cs1.execute(ucmd)
# And retreive the values for the status
self.cs1.execute('''SELECT unnest(enum_range(NULL::resource_status))''')
self.status_values = set( ( k[0] for k in self.cs1.fetchall() ))
return
def confirm_completion(self, path, status = 'DONE'):
if status not in self.status_values :
if status == 'FAILED' : status = 'BROKEN'
else : raise ValueError("bad value for enum -- {} : should be {}".format(status,self.status_values) )
####
try:
self.cs1.execute('EXECUTE M1(%s,%s)', [path,status])
updates = self.cs1.rowcount
self.cs1.connection.commit()
return True
except Exception as e:
print 'failed to update status for ', path,'\n',e
self.cs1.connection.rollback()
return False
def next(self):
"""
:return: next path from DB that is ready...
This function will re-issue the Select when the current one is exhausted.
This attempts to avoid two many locks on two many records.
"""
k = self.fetch_cs.fetchone()
#
if not k:
self.fetch_cs.execute('EXECUTE F1 (1000)')
k = self.fetch_cs.fetchone()
#
if k: return k[0].decode('utf-8')
raise StopIteration
def CreateFileNameSource(args, cfg):
"""
use the parameters to prepare an iterator that will deliver all the (suitably normalized) files to be injected
:param args: command line args
:param cfg: global, persistent parameters
:return: iterator
"""
src = args['<source_directory>']
prefix = args['--prefix']
if not prefix:
prefix = '/data'
else:
prefix = prefix.rstrip('/')
if not src.startswith(prefix):
print src, ' must be a subdirectory of the host data directory (--prefix=', prefix, ')'
print 'If you did not specify it, please do so'
sys.exit(1)
#########
## Set up a source that gets list of files from a file
if args['--read'] : return FileList(args, cfg)
if args['--walk']: return DirectoryWalk(args, cfg)
if args['--postgres'] : return DBQuery(args, cfg)
if args['--sqlite3'] :
raise NotImplementedError
def decode_str(s):
"""
:param s: string to be converted to unicode
:return: unicode version
"""
if isinstance(s, unicode): return s
try:
return s.decode('utf8')
except UnicodeDecodeError:
try:
return s.decode('iso8859-1')
except UnicodeDecodeError:
s_ignore = s.decode('utf8', 'ignore')
return s_ignore
|
ULHPC/modules
|
easybuild/easybuild-framework/test/framework/utilities.py
|
Python
|
mit
| 14,540
| 0.002682
|
##
# Copyright 2012-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Various test utility functions.
@author: Kenneth Hoste (Ghent University)
"""
import copy
import fileinput
import os
import re
import shutil
import sys
import tempfile
from vsc.utils import fancylogger
from vsc.utils.patterns import Singleton
from vsc.utils.testing import EnhancedTestCase as _EnhancedTestCase
import easybuild.tools.build_log as eb_build_log
import easybuild.tools.options as eboptions
import easybuild.tools.toolchain.utilities as tc_utils
import easybuild.tools.module_naming_scheme.toolchain as mns_toolchain
from easybuild.framework.easyconfig import easyconfig
from easybuild.framework.easyblock import EasyBlock
from easybuild.main import main
from easybuild.tools import config
from easybuild.tools.config import module_classes, set_tmpdir
from easybuild.tools.environment import modify_env
from easybuild.tools.filetools import mkdir, read_file
from easybuild.tools.module_naming_scheme import GENERAL_CLASS
from easybuild.tools.modules import modules_tool
from easybuild.tools.options import CONFIG_ENV_VAR_PREFIX, EasyBuildOptions
# make sure tests are robust against any non-default configuration settings;
# involves ignoring any existing configuration files that are picked up, and cleaning the environment
# this is tackled here rather than in suite.py, to make sure this is also done when test modules are ran separately
# clean up environment from unwanted $EASYBUILD_X env vars
for key in os.environ.keys():
if key.startswith('%s_' % CONFIG_ENV_VAR_PREFIX):
del os.environ[key]
# ignore any existing configuration files
go = EasyBuildOptions(go_useconfigfiles=False)
os.environ['EASYBUILD_IGNORECONFIGFILES'] = ','.join(go.options.configfiles)
# redefine $TEST_EASYBUILD_X env vars as $EASYBUILD_X
test_env_var_prefix = 'TEST_EASYBUILD_'
for key in os.environ.keys():
if key.startswith(test_env_var_prefix):
val = os.environ[key]
del os.environ[key]
newkey = '%s_%s' % (CONFIG_ENV_VAR_PREFIX, key[len(test_env_var_prefix):])
os.environ[newkey] = val
class EnhancedTestCase(_EnhancedTestCase):
"""Enhanced test case, provides extra functionality (e.g. an assertErrorRegex method)."""
def setUp(self):
"""Set up testcase."""
super(EnhancedTestCase, self).setUp()
# keep track of log handlers
log = fancylogger.getLogger(fname=False)
self.orig_log_handlers = log.handlers[:]
self.orig_tmpdir = tempfile.gettempdir()
# use a subdirectory for this test (which we can clean up easily after the test completes)
self.test_prefix = set_tmpdir()
self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
fd, self.logfile = tempfile.mkstemp(suffix='.log', prefix='eb-test-')
os.close(fd)
self.cwd = os.getcwd()
# keep track of original environment to restore
self.orig_environ = copy.deepcopy(os.environ)
# keep track of original environment/Python search path to restore
self.orig_sys_path = sys.path[:]
testdir = os.
|
path.dirname(os.path.abspath(__file__))
self.test_sourcepath = os.path.join(testdir, 'sandbox', 'sources')
os.environ['EASYBUILD_SOURCEPATH'] = self.test_sourcepath
os.
|
environ['EASYBUILD_PREFIX'] = self.test_prefix
self.test_buildpath = tempfile.mkdtemp()
os.environ['EASYBUILD_BUILDPATH'] = self.test_buildpath
self.test_installpath = tempfile.mkdtemp()
os.environ['EASYBUILD_INSTALLPATH'] = self.test_installpath
# make sure that the tests only pick up easyconfigs provided with the tests
os.environ['EASYBUILD_ROBOT_PATHS'] = os.path.join(testdir, 'easyconfigs')
# make sure no deprecated behaviour is being triggered (unless intended by the test)
# trip *all* log.deprecated statements by setting deprecation version ridiculously high
self.orig_current_version = eb_build_log.CURRENT_VERSION
os.environ['EASYBUILD_DEPRECATED'] = '10000000'
init_config()
# remove any entries in Python search path that seem to provide easyblocks
for path in sys.path[:]:
if os.path.exists(os.path.join(path, 'easybuild', 'easyblocks', '__init__.py')):
sys.path.remove(path)
# add test easyblocks to Python search path and (re)import and reload easybuild modules
import easybuild
sys.path.append(os.path.join(testdir, 'sandbox'))
reload(easybuild)
import easybuild.easyblocks
reload(easybuild.easyblocks)
import easybuild.easyblocks.generic
reload(easybuild.easyblocks.generic)
reload(easybuild.tools.module_naming_scheme) # required to run options unit tests stand-alone
modtool = modules_tool()
# purge out any loaded modules with original $MODULEPATH before running each test
modtool.purge()
self.reset_modulepath([os.path.join(testdir, 'modules')])
def tearDown(self):
"""Clean up after running testcase."""
super(EnhancedTestCase, self).tearDown()
# go back to where we were before
os.chdir(self.cwd)
# restore original environment
modify_env(os.environ, self.orig_environ)
# restore original Python search path
sys.path = self.orig_sys_path
# remove any log handlers that were added (so that log files can be effectively removed)
log = fancylogger.getLogger(fname=False)
new_log_handlers = [h for h in log.handlers if h not in self.orig_log_handlers]
for log_handler in new_log_handlers:
log_handler.close()
log.removeHandler(log_handler)
# cleanup test tmp dir
try:
shutil.rmtree(self.test_prefix)
except (OSError, IOError):
pass
# restore original 'parent' tmpdir
for var in ['TMPDIR', 'TEMP', 'TMP']:
os.environ[var] = self.orig_tmpdir
# reset to make sure tempfile picks up new temporary directory to use
tempfile.tempdir = None
def reset_modulepath(self, modpaths):
"""Reset $MODULEPATH with specified paths."""
modtool = modules_tool()
for modpath in os.environ.get('MODULEPATH', '').split(os.pathsep):
modtool.remove_module_path(modpath)
# make very sure $MODULEPATH is totally empty
# some paths may be left behind, e.g. when they contain environment variables
# example: "module unuse Modules/$MODULE_VERSION/modulefiles" may not yield the desired result
os.environ['MODULEPATH'] = ''
for modpath in modpaths:
modtool.add_module_path(modpath)
def eb_main(self, args, do_build=False, return_error=False, logfile=None, verbose=False, raise_error=False,
reset_env=True):
"""Helper method to call EasyBuild main function."""
cleanup()
myerr = False
if logfile is None:
logfile = self.logfile
# clear log file
if logfile:
f = open(logfile, 'w')
f.write('')
|
MoroGasper/client
|
client/plugins/ui/tk/animate.py
|
Python
|
gpl-3.0
| 1,581
| 0.003163
|
import io
import base64
import gevent
from Tkinter import Label
from PIL import ImageTk, Image
class AnimatedImgLabel(Label):
# http://stackoverflow.com/questions/7960600/python-tkinter-display-animated-gif-using-pil
def __init__(self, master, data, encoding='base64', **kwargs):
if encoding == 'base64':
data = base64.b64decode(data)
self.img = Image.open(io.BytesIO(data))
seq = list()
try:
while True:
seq.append(self.img.copy())
self.img.seek(len(seq)) # skip to next frame
except EOFError:
pass # we're done
try:
self.delay = float(self.img.info['duration'])/1000
except KeyError:
self.delay = 0.200
self.frames = li
|
st()
for frame in seq:
#frame = frame.convert('RGBA')
self.frames.append(ImageTk.PhotoImage(frame))
self.idx = 0
self.first = self.frames[0]
Label.__init__(self, master, image=self.first, **kwargs)
self.greenlet = gevent.spawn_later(self.delay, self.play)
def destroy(self):
self.greenlet.kill()
|
Label.destroy(self)
def play(self):
try:
self.config(image=self.frames[self.idx])
self.master.update()
self.idx += 1
if self.idx == len(self.frames):
self.idx = 0
self.greenlet = gevent.spawn_later(self.delay, self.play)
except:
import traceback
traceback.print_exc()
raise
|
theno/fabsetup
|
fabsetup/fabfile/setup/powerline.py
|
Python
|
mit
| 5,053
| 0.00099
|
import os.path
from fabric.api import env
from fabsetup.fabutils import checkup_git_repo_legacy, needs_packages
from fabsetup.fabutils import needs_repo_fabsetup_custom, suggest_localhost
from fabsetup.fabutils import install_file_legacy, run, subtask, subsubtask, task
from fabsetup.utils import flo, update_or_append_line, comment_out_line
from fabsetup.utils import uncomment_or_update_or_append_line, query_yes_no
@task
@needs_repo_fabsetup_custom
@suggest_localhost
@needs_packages('python-pip')
def powerline():
'''Install and set up powerline for vim, bash, tmux, and i3.
It uses pip (python2) and the most up to date powerline version (trunk) from
the github repository.
More infos:
https://github.com/powerline/powerline
https://powerline.readthedocs.io/en/latest/installation.html
https://github.com/powerline/fonts
https://youtu.be/_D6RkmgShvU
http://www.tecmint.com/powerline-adds-powerful-statuslines-and-prompts-to-vim-and-bash/
'''
bindings_dir, scripts_dir = install_upgrade_powerline()
set_up_powerline_fonts()
set_up_powerline_daemon(scripts_dir)
powerline_for_vim(bindings_dir)
powerline_for_bash_or_powerline_shell(bindings_dir)
powerline_for_tmux(bindings_dir)
powerline_for_i3(bindings_dir)
print('\nYou may have to reboot for make changes take effect')
@subsubtask
def install_special_glyphs():
'''
More infos:
https://powerline.readthedocs.io/en/latest/installation/linux.html#fonts-installation
https://wiki.archlinux.org/index.php/Font_configuration
$XDG_CONFIG_HOME: http://superuser.com/a/365918
'''
from_dir = '~/repos/powerline/font'
run('mkdir -p ~/.local/share/fonts')
run(flo('cp {from_dir}/PowerlineSymbols.otf ~/.local/share/fonts'))
to_dir = '~/.config/fontconfig/conf.d/'
run(flo('mkdir -p {to_dir}'))
run(flo('cp {from_dir}/10-powerline-symbols.conf {to_dir}'))
@subtask
def install_upgrade
|
_powerline():
'''
More infos:
https://powerline.readthedocs.io/en/latest/installation.html#pip-installation
'''
checkup_git_repo_legacy('https://github.com/powerline/powerline.git')
path_to_powerline = os.path.expanduser('~/repos/powerline')
run(flo('pip install --user --editable={path_to_powerline}'))
run('pip show powerline-status') # only for information
install_special_glyphs()
bindings_dir = '~/repos/powe
|
rline/powerline/bindings'
scripts_dir = '~/repos/powerline/scripts'
return bindings_dir, scripts_dir
@subtask
def set_up_powerline_fonts():
checkup_git_repo_legacy('https://github.com/powerline/fonts.git',
name='powerline-fonts')
# install fonts into ~/.local/share/fonts
run('cd ~/repos/powerline-fonts && ./install.sh')
prefix = 'URxvt*font: '
from config import fontlist
line = prefix + fontlist
update_or_append_line(filename='~/.Xresources', prefix=prefix,
new_line=line)
if env.host_string == 'localhost':
run('xrdb ~/.Xresources')
@subtask
def set_up_powerline_daemon(scripts_dir):
bash_snippet = '~/.bashrc_powerline_daemon'
install_file_legacy(path=bash_snippet, scripts_dir=scripts_dir)
prefix = flo('if [ -f {bash_snippet} ]; ')
enabler = flo('if [ -f {bash_snippet} ]; then source {bash_snippet}; fi')
update_or_append_line(filename='~/.bashrc', prefix=prefix, new_line=enabler)
@subtask
def powerline_for_vim(bindings_dir):
pass # TODO
def powerline_for_bash_or_powerline_shell(bindings_dir):
'''Set up the bash extension of powerline or powerline_shell (another task).
'''
question = '\nSet up powerline-shell instead of powerline bash extension?'
if query_yes_no(question, default='yes'):
from setup import powerline_shell
powerline_shell()
# disable powerline bash extension if it has been set up
powerline_bash_enabler = 'if [ -f ~/.bashrc_powerline_bash ]; then ' \
'source ~/.bashrc_powerline_bash; fi'
comment_out_line(filename='~/.bashrc', line=powerline_bash_enabler)
else:
powerline_for_bash(bindings_dir)
# disable powerline_shell if it has been set up
powerline_shell_enabler = 'if [ -f ~/.bashrc_powerline_shell ]; then ' \
'source ~/.bashrc_powerline_shell; fi'
comment_out_line(filename='~/.bashrc', line=powerline_shell_enabler)
@subtask
def powerline_for_bash(bindings_dir):
bash_snippet = '~/.bashrc_powerline_bash'
install_file_legacy(path=bash_snippet, bindings_dir=bindings_dir)
prefix = flo('if [ -f {bash_snippet} ]; ')
enabler = flo('if [ -f {bash_snippet} ]; then source {bash_snippet}; fi')
uncomment_or_update_or_append_line(filename='~/.bashrc', prefix=prefix,
new_line=enabler, comment='#')
@subtask
def powerline_for_tmux(bindings_dir):
pass # TODO
@subtask
def powerline_for_i3(bindings_dir):
pass # TODO
|
janezhango/BigDataMachineLearning
|
py/testdir_ec2/test_rf_iris.py
|
Python
|
apache-2.0
| 2,017
| 0.011403
|
import unittest
import random, sys
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_rf, h2o_hosts, h2o_import as h2i
# RF train parameters
paramsTrainRF = {
'ntree' : 100,
'depth' : 300,
'bin_limit' : 20000,
'ignore' : None,
'stat_type' : 'ENTROPY',
'out_of_bag_error_estimate': 1,
'exclusive_split_limit': 0,
'timeoutSecs': 14800,
}
# RF test parameters
paramsTestRF = {
# scoring requires the response_variable. it defaults to last, so normally
|
# we don't need to specify. But put this here and (ab
|
ove if used)
# in case a dataset doesn't use last col
'response_variable': None,
'out_of_bag_error_estimate': 0,
'timeoutSecs': 14800,
}
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global localhost
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(node_count=1)
else:
h2o_hosts.build_cloud_with_hosts(node_count=1)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_iris(self):
# Train RF
trainParseResult = h2i.import_parse(bucket='smalldata', path='iris/iris2.csv', hex_key='train_iris2.hex', schema='put')
kwargs = paramsTrainRF.copy()
trainResult = h2o_rf.trainRF(trainParseResult, **kwargs)
scoreParseResult = h2i.import_parse(bucket='smalldata', path='iris/iris2.csv', hex_key='score_iris2.hex', schema='put')
kwargs = paramsTestRF.copy()
scoreResult = h2o_rf.scoreRF(scoreParseResult, trainResult, **kwargs)
print "\nTrain\n=========={0}".format(h2o_rf.pp_rf_result(trainResult))
print "\nScoring\n========={0}".format(h2o_rf.pp_rf_result(scoreResult))
if __name__ == '__main__':
h2o.unit_main()
|
kaplun/inspire-next
|
tests/integration/test_latex_exporting.py
|
Python
|
gpl-3.0
| 2,222
| 0.0027
|
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2017 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://ww
|
w.gnu.org/licenses/>.
#
|
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
from __future__ import absolute_import, division, print_function
from datetime import date
from inspirehep.utils.latex import Latex
from inspirehep.utils.record_getter import get_db_record
import pytest
@pytest.mark.xfail(reason='wrong output')
def test_format_latex_eu(app):
article = get_db_record('lit', 4328)
today = date.today().strftime('%d %b %Y')
expected = u'''%\cite{Glashow:1961tr}
\\bibitem{Glashow:1961tr}
S.~L.~Glashow,
%``Partial Symmetries of Weak Interactions,''
Nucl.\ Phys.\ {\\bf 22} (1961) 579.
doi:10.1016/0029-5582(61)90469-2
%%CITATION = doi:10.1016/0029-5582(61)90469-2;%%
%11 citations counted in INSPIRE as of ''' + today
result = Latex(article, 'latex_eu').format()
assert expected == result
@pytest.mark.xfail(reason='wrong output')
def test_format_latex_us(app):
article = get_db_record('lit', 4328)
today = date.today().strftime('%d %b %Y')
expected = u'''%\cite{Glashow:1961tr}
\\bibitem{Glashow:1961tr}
S.~L.~Glashow,
%``Partial Symmetries of Weak Interactions,''
Nucl.\ Phys.\ {\\bf 22}, 579 (1961).
doi:10.1016/0029-5582(61)90469-2
%%CITATION = doi:10.1016/0029-5582(61)90469-2;%%
%11 citations counted in INSPIRE as of ''' + today
result = Latex(article, 'latex_us').format()
assert expected == result
|
pvagner/orca
|
test/keystrokes/oowriter/ui_role_list_item.py
|
Python
|
lgpl-2.1
| 1,384
| 0.001445
|
#!/usr/bin/python
"""Test to verify presentation of selectable list items."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(KeyComboAction("<Control><Shift>n"))
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.AssertPresentationAction(
"1. Tab to list item",
["KNOWN ISSUE: We are presenting nothing here",
""]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"2. Right to next
|
list item",
["BRAILLE LINE: 'soffice application Template Manager frame Template Manager dialog Drawings page tab list
|
Presentation Backgrounds list item'",
" VISIBLE: 'Presentation Backgrounds list it', cursor=1",
"SPEECH OUTPUT: 'Presentation Backgrounds'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Left"))
sequence.append(utils.AssertPresentationAction(
"3. Left to previous list item",
["BRAILLE LINE: 'soffice application Template Manager frame Template Manager dialog Drawings page tab list My Templates list item'",
" VISIBLE: 'My Templates list item', cursor=1",
"SPEECH OUTPUT: 'My Templates'"]))
sequence.append(utils.AssertionSummaryAction())
sequence.start()
|
dougthor42/GDWCalc
|
archive/GDWCalc_Lite v1.3.py
|
Python
|
gpl-2.0
| 8,852
| 0.001695
|
"""
@name: GDWCalc_Lite.py
@vers: 1.3
@author: Douglas Thor
@created: 2013-04-19
@modified: 2013-10-08
@descr: Calcualtes Gross Die per Wafer (GDW), accounting for
wafer flat, edge exclusion, and front-side-scribe (FSS)
exclusion (also called flat exclusion).
Returns nothing.
This Lite version does not include the option to plot
a wafer map or generate an OWT mask file.
"""
import math
# Defined by SEMI M1-0302
FLAT_LENGTHS = {50: 15.88, 75: 22.22, 100: 32.5, 125: 42.5, 150: 57.5}
PROG_VERS = "1.3"
REL_DATE = "2013-10-08"
def round_to_multiple(x, multiple):
return int(multiple * round(float(x)/multiple))
def max_dist(center, size):
"""
Calcualtes the largest distance from the origin for a rectangle of
size (x, y), where the center of the rectangle's coordinates are known.
If the rectangle's center is in the Q1, then the upper-right corner is
the farthest away from the origin. If in Q2, then the upper-left corner
is farthest away. Etc.
Returns the magnitude of the largest distance.
"""
halfX = size[0]/2.
halfY = size[1]/2.
if center[0] < 0: halfX = -halfX
if center[1] < 0: halfY = -halfY
dist = math.sqrt((center[0] + halfX)**2 + (center[1] + halfY)**2)
return dist
def progress_bar(n, size, barSize=10):
"""
A simple terminal progress bar.
Usage:
Insert into the loop that you want to monitor progrss on and once after
the loop is completed (with n = size)
n = ieration that you want to display.
size = maximum number of iterations that the loop will go through
barLen = Integer length of the progress bar.
Example:
size = 1000
n = 0
barSize = 17
for item in range(size):
time.sleep(.02)
progress_bar(n, size, barSize)
n += 1
progress_bar(n, size, barSize)
"""
barFill = int(n * barSize // float(size))
if barFill > barSize: barFill = barSize
if barFill < 0: barFill = 0
barText = "[" + "#" * barFill + " " * (barSize - barFill) + "] %d/%d\r"
print barText % (n, size),
def dieSizePrompt():
while True:
try:
dieX = float(raw_input("Die X size (mm): "))
if dieX > 1000 or dieX <= 0: raise(ValueError)
break
except ValueError:
print "Invalid entry. Please enter a number between 0 and 1000."
while True:
try:
dieY = float(raw_input("Die Y size (mm): "))
if dieY > 1000 or dieY <= 0: raise(ValueError)
break
except ValueError:
print "Invalid entry. Please enter a number between 0 and 1000."
return (dieX, dieY)
def waferSizePrompt():
while True:
default = 150.0
dia = raw_input("Wafer diameter (mm) [%dmm]: " % default)
if dia == "":
dia = float(default)
print "Using default value of %dmm." % default
break
else:
try:
dia = float(dia)
if dia <= 0 or dia > 500: raise(ValueError)
break
except ValueError:
print "Invalid entry. Please enter a number between 0 and 500."
return dia
def exclSizePrompt():
while True:
default = 5.0
exclSize = raw_input("Exclusion ring width (mm) [%dmm]: " % default)
if exclSize == "":
exclSize = float(default)
print "Using default value of %dmm." % default
break
else:
try:
exclSize = float(exclSize)
if exclSize < 0: raise(ValueError)
break
except ValueError:
print "Invalid entry. Please enter a number greater than 0."
return exclSize
def FSSExclPrompt():
""" Prompts user for Front-Side Scribe Exclusion width. Also called Flat
Exclusion """
while True:
default = 5.0
FSSExcl = raw_input("Front Side Scribe (Flat) Exclusion (mm) [%dmm]: " % default)
if FSSExcl == "":
FSSExcl = float(default)
print "Using default value of %dmm." % default
break
else:
try:
FSSExcl = float(FSSExcl)
if FSSExcl < 0: raise(ValueError)
break
except ValueError:
print "Invalid entry. Please enter a number greater than 0."
return FSSExcl
def gdw(dieSize, dia, centerType, excl, FSS_EXCLUSION):
"""
Calculates Gross Die per Wafer (GDW) for a given dieSize (X, Y),
wafer diameter dia, centerType (xType, yType), and exclusion width (mm).
Returns a list of tuples (X, Y, XCoord, YCoord, dieStatus)
"""
origin = (0, 0)
dieX = dieSize[0]
dieY = dieSize[1]
rad = 0.5 * dia
# assume that the reticle center is the wafer center
dieCenter = list(origin)
if centerType[0] == "even":
# offset the dieCenter by 1/2 the die size, X direction
dieCenter[0] = 0.5 * dieX
if centerType[1] == "even":
# offset the dieCenter by 1/2 the die size, Y direction
dieCenter[1] = 0.5 * dieY
# find out how many die we can fit on the wafer
nX = int(math.ceil(dia/dieX))
nY = int(mat
|
h.ceil(dia/dieY))
# I
|
f we're centered on the wafer, we need to add one to the axis count
if centerType[0] == "odd": nX += 1
if centerType[1] == "odd": nY += 1
# make a list of (x, y) center coordinate pairs
centers = []
for i in range(nX):
for j in range(nY):
centers.append(((i-nX/2) * dieX + dieCenter[0],
(j-nY/2) * dieY + dieCenter[1]))
if dia in FLAT_LENGTHS:
# A flat is defined, so we draw it.
flatSize = FLAT_LENGTHS[dia]
x = flatSize/2
y = -math.sqrt(rad**2 - x**2)
else:
# A flat is not defined so...
y = -rad
yExcl = y + FSS_EXCLUSION
# Take only those that are within the wafer radius
dieList = []
n = 0
updateValue = round_to_multiple(len(centers) // 100, 100)
listLen = len(centers)
print "Calculating GDW:"
for coord in centers:
if n % updateValue == 0:
progress_bar(n, listLen)
newCoords = (coord[0] - dieX/2, coord[1] - dieY/2)
if max_dist(coord, dieSize) > rad:
# it's off the wafer
status = "wafer"
elif coord[1] - dieY/2 < y:
# it's off the flat
status = "flat"
elif max_dist(coord, dieSize) > (rad - excl):
# it's outside of the exclusion
status = "excl"
elif coord[1] - dieY/2 < yExcl:
# it's ouside the flat exclusion
status = "flatExcl"
else:
# it's a good die, add it to the list
status = "probe"
# need to figure out how to get true RC numbers
dieList.append(("X column", "Y row", newCoords[0], newCoords[1], status))
n += 1
progress_bar(n, listLen)
print ""
return dieList
def maxGDW(dieSize, dia, excl, fssExcl):
# list of available die shifts
ds = [("odd", "odd"),
("odd", "even"),
("even", "odd"),
("even", "even")]
j = (0, "")
probeList = []
for shift in ds:
probeCount = 0
edgeCount = 0
flatCount = 0
flatExclCount = 0
dieList = gdw(dieSize, dia, shift, excl, fssExcl)
for die in dieList:
if die[-1] == "probe":
probeCount += 1
elif die[-1] == "excl":
edgeCount += 1
elif die[-1] == "flat":
flatCount += 1
elif die[-1] == "flatExcl":
flatExclCount += 1
if probeCount > j[0]:
j = (probeCount, shift, edgeCount, flatCount, flatExclCount)
probeList = dieList
print ""
print "----------------------------------"
print "Maximum GDW: %d %s" % (j[0], j[1])
print "Die lost to Edge Exclusion: %d" % j[2]
prin
|
JulyKikuAkita/PythonPrac
|
cs15211/WallsandGates.py
|
Python
|
apache-2.0
| 8,470
| 0.002597
|
__source__ = 'https://leetcode.com/problems/walls-and-gates/description/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/walls-and-gates.py
# Time: O(m * n)
# Space: O(g)
#
# Description: Leetcode # 286. Walls and Gates
#
# You are given a m x n 2D grid initialized with these three possible values.
#
# -1 - A wall or an obstacle.
# 0 - A gate.
# INF - Infinity means an empty room.
#
# We use the value 231 - 1 = 2147483647 to represent INF
# as you may assume that the distance to a gate is less than 2147483647.
# Fill each empty room with the distance to its nearest gate.
# If it is impossible to reach a gate, it should be filled with INF.
#
# For example, given the 2D grid:
# INF -1 0 INF
# INF INF INF -1
# INF -1 INF -1
# 0 -1 INF INF
# After running your function, the 2D grid should be:
# 3 -1 0 1
# 2 2 1 -1
# 1 -1 2 -1
# 0 -1 3 4
#
# Companies
# Google Facebook
# Related Topics
# Breadth-first Search
# Similar Questions
# Surrounded Regions Number of Islands Shortest Distance from All Buildings
import unittest
#BFS
class Solution(object):
def wallsAndGates(self, rooms):
"""
:type rooms: List[List[int]]
:rtype: void Do not return anything, modify rooms in-place instead.
"""
for i in xrange(len(rooms)):
for j in xrange(len(rooms[0])):
if rooms[i][j] == 0:
stack = [
(i+1, j, 1),
(i-1, j, 1),
(i, j+1, 1),
(i, j-1, 1)
]
while stack:
ii, jj, dist = stack.pop()
if ii < 0 or jj < 0 or ii >= len(rooms) or jj >= len(rooms[0]) or rooms[ii][jj] < dist:
continue
rooms[ii][jj] = dist
stack.append((ii+1, jj, dist + 1))
stack.append((ii-1, jj, dist + 1))
|
stack.append((ii, jj+1, dist + 1))
stack.append((ii, jj-1, dist + 1))
#BFS -2
class Solution2(object):
def wallsAndGates(self, rooms):
"""
:type rooms: List[List[int]]
:rtype: void Do not return anything, modify rooms in-place instead.
"""
if not rooms:
return
m = len(rooms)
n = len(rooms[0])
stack = []
for i in xrange(m):
for j in xrange(n):
|
if rooms[i][j] == 0:
stack.append([i*n +j, 0])
cube = [0, 1, 0, -1, 0]
while stack:
digit, dis = stack.pop()
x = digit / n
y = digit % n
for k in xrange(4):
p = x + cube[k]
q = y + cube[k+1]
if p >= 0 and p < m and q >= 0 and q < n and rooms[p][q] > dis + 1:
rooms[p][q] = dis + 1
stack.append([p*n+q, dis+1])
#DFS
class Solution3(object):
def wallsAndGates(self, rooms):
"""
:type rooms: List[List[int]]
:rtype: void Do not return anything, modify rooms in-place instead.
"""
if not rooms:
return
m = len(rooms)
n = len(rooms[0])
padding = [ 0, 1, 0, -1, 0]
for i in xrange(m):
for j in xrange(n):
if rooms[i][j] == 0:
self.dfs(rooms, m, n, i, j, padding)
def dfs(self, rooms, m, n, i, j, padding):
for k in xrange(4):
p = i + padding[k]
q = j + padding[k+1]
if p >= 0 and q >= 0 and p < m and q < n and rooms[p][q] > rooms[i][j] + 1:
rooms[p][q] = rooms[i][j] + 1
self.dfs(rooms, m, n, p, q, padding)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/walls-and-gates/solution/
# DFS
# 99.31% 4ms
class Solution {
public static final int[][] DIRECTIONS = new int[][] {{-1, 0}, {1, 0}, {0, -1}, {0, 1}};
public void wallsAndGates(int[][] rooms) {
int m = rooms.length;
int n = m == 0 ? 0 : rooms[0].length;
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
if (rooms[i][j] == 0) {
dfs(rooms, m, n, i, j, 1);
}
}
}
}
private void dfs(int[][] rooms, int m, int n, int i, int j, int steps) {
for (int[] direction : DIRECTIONS) {
int newI = i + direction[0];
int newJ = j + direction[1];
if (newI >= 0 && newI < m && newJ >= 0 && newJ < n && rooms[newI][newJ] > steps) {
rooms[newI][newJ] = steps;
dfs(rooms, m, n, newI, newJ, steps + 1);
}
}
}
}
# DFS
# 99.31% 4ms
public class Solution {
private static int[] dir = {0, 1, 0, -1, 0};
public void wallsAndGates(int[][] rooms) {
for (int i = 0; i < rooms.length; i++) {
for (int j = 0; j < rooms[0].length; j++) {
if (rooms[i][j] == 0) dfs(rooms, i, j);
}
}
}
public void dfs(int[][] rooms, int i, int j) {
for (int k = 0; k < 4; k++) {
int p = i + dir[k], q = j + dir[k+1];
if ( 0 <= p && p < rooms.length && 0 <= q && q < rooms[0].length && rooms[p][q] > rooms[i][j] + 1) {
rooms[p][q] = rooms[i][j] + 1;
dfs(rooms, p, q);
}
}
}
}
# BFS
# 59.16% 9ms
public class Solution {
//The Multi End BFS solution used is this
public static final int[] d = {0, 1, 0, -1, 0};
public void wallsAndGates(int[][] rooms) {
if (rooms.length == 0) return;
int m = rooms.length, n = rooms[0].length;
Deque<Integer> queue = new ArrayDeque<>();
for (int i = 0; i < m ; i++) {
for (int j = 0; j < n; j++) {
if (rooms[i][j] == 0){
queue.offer(i *n + j); }
}
}
while(!queue.isEmpty()){
int x = queue.poll();
int i = x / n, j = x % n;
for (int k = 0; k < 4; k++) {
int p = i + d[k], q = j + d[k+1];
if (0 <= p && p < m && 0 <= q && q < n && rooms[p][q] == Integer.MAX_VALUE) {
rooms[p][q] = rooms[i][j] + 1;
queue.offer(p * n + q);
}
}
}
}
private void bfs(int[][] rooms, int i, int j) {
int m = rooms.length, n = rooms[0].length;
Deque<Integer> queue = new ArrayDeque<>();
queue.offer(i * n + j); // Put gate in the queue
while (!queue.isEmpty()) {
int x = queue.poll();
i = x / n; j = x % n;
for (int k = 0; k < 4; ++k) {
int p = i + d[k], q = j + d[k+1];
if (0 <= p && p < m && 0 <= q && q < n && rooms[p][q] > rooms[i][j] + 1) {
rooms[p][q] = rooms[i][j] + 1;
queue.offer(p * n + q);
}
}
}
}
}
# BFS2
# 32.38% 13ms
class Solution {
//The Multi End BFS solution used is this
public static final int[] d = {0, 1, 0, -1, 0};
public void wallsAndGates(int[][] rooms) {
if (rooms.length == 0) return;
int m = rooms.length, n = rooms[0].length;
Deque<Integer> queue = new ArrayDeque<>();
for (int i = 0; i < m ; i++) {
for (int j = 0; j < n; j++) {
if (rooms[i][j] == 0){
queue.offer(i * n + j);
bfs(rooms, i, j); //naive BFS solution
}
}
}
}
private void bfs(int[][] rooms, int i, int j) {
int m = rooms.length, n = rooms[0].length;
Deque<Integer> queue = new ArrayDeque<>();
queue.offer(i * n + j); // Put gate in the queue
while (!queue.isEmpty()) {
int x = queue.poll();
i = x / n; j = x % n;
for (int k = 0; k < 4; +
|
doraemonext/DEOnlineJudge
|
api/account/urls.py
|
Python
|
mit
| 541
| 0.005545
|
# -*- coding: utf-8 -*-
from django.conf.urls import url
from api.account.views import RegistrationAPI, LoginAPI, LogoutAPI, UpdatePasswordAPI, Upd
|
ateProfileAPI
urlpatterns = [
url(r'^registration/$', Registratio
|
nAPI.as_view(), name='registration'),
url(r'^login/$', LoginAPI.as_view(), name='login'),
url(r'^logout/$', LogoutAPI.as_view(), name='logout'),
url(r'^update_password/$', UpdatePasswordAPI.as_view(), name='update_password'),
url(r'^update_profile/$', UpdateProfileAPI.as_view(), name='update_profile'),
]
|
nikdoof/test-auth
|
app/conf/celeryschedule.py
|
Python
|
bsd-3-clause
| 1,084
| 0
|
from datetime i
|
mport timedelta
CELERYBEAT_SCHEDULE = {
"reddit-validations": {
"task": "reddit.tasks.process_validations",
"schedule": timedelta(minutes=10),
},
|
"eveapi-update": {
"task": "eve_api.tasks.account.queue_apikey_updates",
"schedule": timedelta(minutes=10),
},
"alliance-update": {
"task": "eve_api.tasks.alliance.import_alliance_details",
"schedule": timedelta(hours=6),
},
"api-log-clear": {
"task": "eve_proxy.tasks.clear_old_logs",
"schedule": timedelta(days=1),
},
"blacklist-check": {
"task": "hr.tasks.blacklist_check",
"schedule": timedelta(days=7),
},
"reddit-update": {
"task": "reddit.tasks.queue_account_updates",
"schedule": timedelta(minutes=15),
}
}
CELERY_ROUTES = {
"sso.tasks.update_service_groups": {'queue': 'bulk'},
"hr.tasks.blacklist_check": {'queue': 'bulk'},
"eve_api.tasks.import_apikey_result": {'queue': 'fastresponse'},
"sso.tasks.update_user_access": {'queue': 'fastresponse'},
}
|
sfinucane/deviceutils
|
deviceutils/action/query.py
|
Python
|
apache-2.0
| 2,510
| 0.004382
|
#!/usr/bin/env python
"""
"""
# Python 2.6 and newer support
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from future.builtins import (
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
try:
unicode()
except NameError:
unicode = str
import sys
__python_version__ = dict()
try:
__python_version__['major'] = sys.version_info.major
except AttributeError:
__python_version__['major'] = sys.version_info[0]
try:
__python_version__['minor'] = sys.version_info.minor
except AttributeError:
__python_version__['minor'] = sys.version_info[1]
from ..defaultencoding import DefaultEncoding
from ..import channel
class Query(object):
"""More than just a simple aggregate of a Command and a Response.
Executes a command, attempts to retrieve an IMMEDIATE response, all without
releasing the resource locks.
"""
def __init__(self, message, device=None, io=None,
send_encoding=DefaultEncoding(),
receive_encoding=DefaultEncoding(),
receive_count=-1):
object.__init__(self)
self.message = message
self.device = device
|
self.io = io
self.send_encoding = send_encoding
self.receive_encoding = receive_encoding
self.receive_count = receive_count
self.__response = None
@property
def value(self):
"""The most recently retrieved response.
"""
return self.__response
def __call__(self, *args, **kwargs):
"""Sends the command, fetches a response, stores and returns that response.
|
Any arguments and/or keyword arguments will be passed to ``format``,
which is called on the command message before sending.
"""
if isinstance(self.send_encoding, DefaultEncoding):
with channel(self.device, self.io) as dev:
dev.send(self.message.format(*args, **kwargs))
self.__response = dev.receive(count=self.receive_count)
else:
with channel(self.device, self.io) as dev:
dev.send(self.message.format(*args, **kwargs), encoding=self.send_encoding)
self.__response = dev.receive(count=self.receive_count, encoding=self.receive_encoding)
return self.value
|
hybrid-storage-dev/cinder-fs-111t-hybrid-cherry
|
volume/drivers/hitachi/hbsd_iscsi.py
|
Python
|
apache-2.0
| 16,385
| 0
|
# Copyright (C) 2014, Hitachi, Ltd.
#
# Licensed under the Apache License, Version
|
2.0 (the "License"); you may
# not use this file except in compliance with the Li
|
cense. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
iSCSI Cinder volume driver for Hitachi storage.
"""
from contextlib import nested
import os
import threading
from oslo.config import cfg
import six
from cinder import exception
from cinder.i18n import _
from cinder.openstack.common import log as logging
from cinder import utils
import cinder.volume.driver
from cinder.volume.drivers.hitachi import hbsd_basiclib as basic_lib
from cinder.volume.drivers.hitachi import hbsd_common as common
LOG = logging.getLogger(__name__)
CHAP_METHOD = ('None', 'CHAP None', 'CHAP')
volume_opts = [
cfg.BoolOpt('hitachi_add_chap_user',
default=False,
help='Add CHAP user'),
cfg.StrOpt('hitachi_auth_method',
default=None,
help='iSCSI authentication method'),
cfg.StrOpt('hitachi_auth_user',
default='%sCHAP-user' % basic_lib.NAME_PREFIX,
help='iSCSI authentication username'),
cfg.StrOpt('hitachi_auth_password',
default='%sCHAP-password' % basic_lib.NAME_PREFIX,
help='iSCSI authentication password'),
]
CONF = cfg.CONF
CONF.register_opts(volume_opts)
class HBSDISCSIDriver(cinder.volume.driver.ISCSIDriver):
VERSION = common.VERSION
def __init__(self, *args, **kwargs):
os.environ['LANG'] = 'C'
super(HBSDISCSIDriver, self).__init__(*args, **kwargs)
self.db = kwargs.get('db')
self.common = None
self.configuration.append_config_values(common.volume_opts)
self._stats = {}
self.context = None
self.do_setup_status = threading.Event()
def _check_param(self):
self.configuration.append_config_values(volume_opts)
if (self.configuration.hitachi_auth_method and
self.configuration.hitachi_auth_method not in CHAP_METHOD):
msg = basic_lib.output_err(601, param='hitachi_auth_method')
raise exception.HBSDError(message=msg)
if self.configuration.hitachi_auth_method == 'None':
self.configuration.hitachi_auth_method = None
for opt in volume_opts:
getattr(self.configuration, opt.name)
def check_param(self):
try:
self.common.check_param()
self._check_param()
except exception.HBSDError:
raise
except Exception as ex:
msg = basic_lib.output_err(601, param=six.text_type(ex))
raise exception.HBSDError(message=msg)
def output_param_to_log(self):
lock = basic_lib.get_process_lock(self.common.system_lock_file)
with lock:
self.common.output_param_to_log('iSCSI')
for opt in volume_opts:
if not opt.secret:
value = getattr(self.configuration, opt.name)
LOG.info('\t%-35s%s' % (opt.name + ': ',
six.text_type(value)))
def _delete_lun_iscsi(self, hostgroups, ldev):
try:
self.common.command.comm_delete_lun_iscsi(hostgroups, ldev)
except exception.HBSDNotFound:
msg = basic_lib.set_msg(301, ldev=ldev)
LOG.warning(msg)
def _add_target(self, hostgroups, ldev):
self.common.add_lun('autargetmap', hostgroups, ldev)
def _add_initiator(self, hgs, port, gid, host_iqn):
self.common.command.comm_add_initiator(port, gid, host_iqn)
hgs.append({'port': port, 'gid': int(gid), 'detected': True})
LOG.debug("Create iSCSI target for %s" % hgs)
def _get_unused_gid_iscsi(self, port):
group_range = self.configuration.hitachi_group_range
if not group_range:
group_range = basic_lib.DEFAULT_GROUP_RANGE
return self.common.command.get_unused_gid_iscsi(group_range, port)
def _delete_iscsi_target(self, port, target_no, target_alias):
ret, _stdout, _stderr = self.common.command.delete_iscsi_target(
port, target_no, target_alias)
if ret:
msg = basic_lib.set_msg(
307, port=port, tno=target_no, alias=target_alias)
LOG.warning(msg)
def _delete_chap_user(self, port):
ret, _stdout, _stderr = self.common.command.delete_chap_user(port)
if ret:
msg = basic_lib.set_msg(
303, user=self.configuration.hitachi_auth_user)
LOG.warning(msg)
def _get_hostgroup_info_iscsi(self, hgs, host_iqn):
return self.common.command.comm_get_hostgroup_info_iscsi(
hgs, host_iqn, self.configuration.hitachi_target_ports)
def _discovery_iscsi_target(self, hostgroups):
for hostgroup in hostgroups:
ip_addr, ip_port = self.common.command.comm_get_iscsi_ip(
hostgroup['port'])
target_iqn = self.common.command.comm_get_target_iqn(
hostgroup['port'], hostgroup['gid'])
hostgroup['ip_addr'] = ip_addr
hostgroup['ip_port'] = ip_port
hostgroup['target_iqn'] = target_iqn
LOG.debug("ip_addr=%(addr)s ip_port=%(port)s target_iqn=%(iqn)s"
% {'addr': ip_addr, 'port': ip_port, 'iqn': target_iqn})
def _fill_groups(self, hgs, ports, target_iqn, target_alias, add_iqn):
for port in ports:
added_hostgroup = False
added_user = False
LOG.debug('Create target (hgs: %(hgs)s port: %(port)s '
'target_iqn: %(tiqn)s target_alias: %(alias)s '
'add_iqn: %(aiqn)s)' %
{'hgs': hgs, 'port': port, 'tiqn': target_iqn,
'alias': target_alias, 'aiqn': add_iqn})
gid = self.common.command.get_gid_from_targetiqn(
target_iqn, target_alias, port)
if gid is None:
for retry_cnt in basic_lib.DEFAULT_TRY_RANGE:
gid = None
try:
gid = self._get_unused_gid_iscsi(port)
self.common.command.comm_add_hostgrp_iscsi(
port, gid, target_alias, target_iqn)
added_hostgroup = True
except exception.HBSDNotFound:
msg = basic_lib.set_msg(312, resource='GID')
LOG.warning(msg)
continue
except Exception as ex:
msg = basic_lib.set_msg(
309, port=port, alias=target_alias,
reason=six.text_type(ex))
LOG.warning(msg)
break
else:
LOG.debug('Completed to add target'
'(port: %(port)s gid: %(gid)d)'
% {'port': port, 'gid': gid})
break
if gid is None:
LOG.error(_('Failed to add target(port: %s)') % port)
continue
try:
if added_hostgroup:
if self.configuration.hitachi_auth_method:
added_user = self.common.command.set_chap_authention(
port, gid)
self.common.command.comm_set_hostgrp_reportportal(
port, target_alias)
self._add_initiator(hgs, port, gid, add_iqn)
except Exception as ex:
msg = basic_lib.set_msg(
316, port=port, reason=six.text_type(ex))
LOG.warning(msg)
|
pmisik/buildbot
|
master/buildbot/test/util/changesource.py
|
Python
|
gpl-2.0
| 3,747
| 0.000267
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.internet import task
from buildbot.test.fake import fakemaster
class ChangeSourceMixin:
"""
This class is used for testing change sources, and handles a few things:
- starting and stopping a ChangeSource service
- a fake master with a data API implementation
"""
changesource = None
started = False
DUMMY_CHANGESOURCE_ID = 20
OTHER_MASTER_ID = 93
DEFAULT_NAME = "ChangeSource"
def setUpChangeSource(self):
"Set up the mixin - returns a deferred."
self.master = fakemaster.make_master(self, wantDb=True, wantData=True)
assert not hasattr(self.master, 'addChange') # just checking..
return defer.succeed(None)
@defer.inlineCallbacks
def tearDownChangeSource(self):
"Tear down the mixin - returns a deferred."
if not self.started:
return
if self.changesource.runni
|
ng:
yield self.changesource.stopService()
yield self.changesource.disownServiceParent()
return
@defer.inlineCallbacks
def attachChangeSource(self, cs):
"Set up a change source for testing; sets its .master attribute"
self.changesource = cs
# FIXME some changesource does not have master property yet but
# mailchangesource has :-/
try:
self.changesour
|
ce.master = self.master
except AttributeError:
yield self.changesource.setServiceParent(self.master)
# configure the service to let secret manager render the secrets
try:
yield self.changesource.configureService()
except NotImplementedError: # non-reconfigurable change sources can't reconfig
pass
# also, now that changesources are ClusteredServices, setting up
# the clock here helps in the unit tests that check that behavior
self.changesource.clock = task.Clock()
return cs
def startChangeSource(self):
"start the change source as a service"
self.started = True
return self.changesource.startService()
@defer.inlineCallbacks
def stopChangeSource(self):
"stop the change source again; returns a deferred"
yield self.changesource.stopService()
self.started = False
def setChangeSourceToMaster(self, otherMaster):
# some tests build the CS late, so for those tests we will require that
# they use the default name in order to run tests that require master
# assignments
if self.changesource is not None:
name = self.changesource.name
else:
name = self.DEFAULT_NAME
self.master.data.updates.changesourceIds[
name] = self.DUMMY_CHANGESOURCE_ID
if otherMaster:
self.master.data.updates.changesourceMasters[
self.DUMMY_CHANGESOURCE_ID] = otherMaster
else:
del self.master.data.updates.changesourceMasters[
self.DUMMY_CHANGESOURCE_ID]
|
jyi/ITSP
|
prophet-gpl/tools/libtiff-prepare-test.py
|
Python
|
mit
| 1,700
| 0.018235
|
# Copyright (C) 2016 Fan Long, Martin Rianrd and MIT CSAIL
# Prophet
#
# This file is part of Prophet.
#
# Prophet is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Prophet is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General
|
Public License
# along with Prophet. If not, see <http://www.gnu.org/licenses/>.
#!/usr/bin/env python
from os import system, chdir, getcwd
from sys import argv
import subprocess
build_cmd = argv[1];
dep_dir = argv[2];
src_dir = argv[3];
test_dir = argv[4];
rev = argv[5];
if (len(argv) < 7):
out_dir = test_dir + "-" + rev;
else:
out_dir = argv[6];
work_dir = "__tmp" + rev;
system("cp -rf " + src_dir + " " + work_dir);
ori_dir = getcwd();
chdir(work_dir);
system("git checkout -f
|
" + rev);
system("git clean -f -d");
chdir(ori_dir);
system(build_cmd + " -p " + dep_dir + " " + work_dir);
system("mv " + work_dir + "/test " + work_dir+"/ori_test");
system("cp -rf " + test_dir + " " + work_dir + "/test");
chdir(work_dir + "/test");
system("GENEXPOUT=1 CMPEXPOUT=0 make check");
chdir(ori_dir);
print "Goint to generate testdir for revision " + rev + " case: " + out_dir;
system("cp -rf " + test_dir + " " + out_dir);
system("cp -rf " + work_dir + "/test/*.exp " + work_dir + "/test/*.tol " + out_dir+"/");
system("rm -rf " + work_dir);
|
UpYou/relay
|
usrp_transmit_path.py
|
Python
|
gpl-3.0
| 3,809
| 0.006563
|
#
# Copyright 2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERC
|
HANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
|
#
from gnuradio import gr
import usrp_options
import transmit_path
from pick_bitrate import pick_tx_bitrate
from gnuradio import eng_notation
def add_freq_option(parser):
"""
Hackery that has the -f / --freq option set both tx_freq and rx_freq
"""
def freq_callback(option, opt_str, value, parser):
parser.values.rx_freq = value
parser.values.tx_freq = value
if not parser.has_option('--freq'):
parser.add_option('-f', '--freq', type="eng_float",
action="callback", callback=freq_callback,
help="set Tx and/or Rx frequency to FREQ [default=%default]",
metavar="FREQ")
def add_options(parser, expert):
add_freq_option(parser)
usrp_options.add_tx_options(parser)
transmit_path.transmit_path.add_options(parser, expert)
expert.add_option("", "--tx-freq", type="eng_float", default=None,
help="set transmit frequency to FREQ [default=%default]", metavar="FREQ")
parser.add_option("-v", "--verbose", action="store_true", default=False)
class usrp_transmit_path(gr.hier_block2):
def __init__(self, modulator_class, options):
'''
See below for what options should hold
'''
gr.hier_block2.__init__(self, "usrp_transmit_path",
gr.io_signature(0, 0, 0), # Input signature
gr.io_signature(0, 0, 0)) # Output signature
if options.tx_freq is None:
sys.stderr.write("-f FREQ or --freq FREQ or --tx-freq FREQ must be specified\n")
raise SystemExit
tx_path = transmit_path.transmit_path(modulator_class, options)
for attr in dir(tx_path): #forward the methods
if not attr.startswith('_') and not hasattr(self, attr):
setattr(self, attr, getattr(tx_path, attr))
#setup usrp
self._modulator_class = modulator_class
self._setup_usrp_sink(options)
#connect
self.connect(tx_path, self.u)
def _setup_usrp_sink(self, options):
"""
Creates a USRP sink, determines the settings for best bitrate,
and attaches to the transmitter's subdevice.
"""
self.u = usrp_options.create_usrp_sink(options)
dac_rate = self.u.dac_rate()
if options.verbose:
print 'USRP Sink:', self.u
(self._bitrate, self._samples_per_symbol, self._interp) = \
pick_tx_bitrate(options.bitrate, self._modulator_class.bits_per_symbol(), \
options.samples_per_symbol, options.interp, dac_rate, \
self.u.get_interp_rates())
self.u.set_interp(self._interp)
self.u.set_auto_tr(True)
if not self.u.set_center_freq(options.tx_freq):
print "Failed to set Rx frequency to %s" % (eng_notation.num_to_str(options.tx_freq))
raise ValueError, eng_notation.num_to_str(options.tx_freq)
|
b3yond/ticketfrei
|
session.py
|
Python
|
isc
| 1,063
| 0.001881
|
from bottle import redirect, request, abort, response
from db import db
from functools import wraps
from inspect import Signature
from user import User
class SessionPlugin(object):
name = 'SessionPlugin'
keyword = 'user'
api = 2
def __init__(self, loginpage):
self.loginpage = loginpage
def apply(self, callback, route):
if self.keyword in Signature.from_callable(route.
|
callback).parameters:
@wraps(callback)
def wrapper(*args, **kwargs):
uid = request.get_cookie('uid', secret=db.get_secret())
if uid is None:
return redirect(self.loginpage)
kwargs[self.keyword] = User(uid)
if request.method == 'POST':
if request.forms['csrf'] != request.get_cookie('csrf',
|
secret=db.get_secret()):
abort(400)
return callback(*args, **kwargs)
return wrapper
else:
return callback
|
jkonecny12/anaconda
|
pyanaconda/modules/payloads/payload/dnf/initialization.py
|
Python
|
gpl-2.0
| 1,604
| 0.000623
|
#
# Copyright (C) 2020 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or
|
replicated with the express permission of
# Red Hat, Inc.
#
import dnf.logging
import logging
import libdnf
DNF_LIBREPO_LOG = "/tmp/dnf.librepo.log"
DNF_LOGGER = "dnf"
def configure_dnf_logging():
"""Configure the DNF logging."""
# Set up librepo.
# This is still required even when the librepo has a separate logger because
# DNF needs to have callbacks that the librepo log is written to be able to
# process that log.
libdnf.repo.LibrepoLog.removeAllHandlers()
libdn
|
f.repo.LibrepoLog.addHandler(DNF_LIBREPO_LOG)
# Set up DNF. Increase the log level to the custom DDEBUG level.
dnf_logger = logging.getLogger(DNF_LOGGER)
dnf_logger.setLevel(dnf.logging.DDEBUG)
|
rtts/qqq
|
qqq/templatetags/customfilters.py
|
Python
|
gpl-3.0
| 807
| 0.032218
|
from django import template
register = template.Library()
@register.filter
def multiplyby(value, arg):
return in
|
t(value * arg)
@register.filter
def subtractfrom(value, arg):
return arg - value
@register.filter
def plus(value, arg):
return value + arg
@register.
|
filter
def appears_in(value, arg):
for name in arg:
if name == value: return True
return False
@register.filter
def length(value):
return len(value)
@register.filter
def user_can_downvote(votes, id):
if id not in votes: return True
if votes[id].is_downvote(): return False
return True
@register.filter
def user_can_upvote(votes, id):
if id not in votes: return True
if votes[id].is_upvote(): return False
return True
@register.filter
def stripnewlines(str):
return str.replace('\n', ' ').replace('\r', ' ')
|
sajuptpm/neutron-ipam
|
neutron/tests/unit/services/loadbalancer/agent/test_api.py
|
Python
|
apache-2.0
| 4,901
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
import mock
from neutron.services.loadbalancer.agent import agent_api as api
from neutron.tests import base
class TestApiCache(base.BaseTestCase):
def setUp(self):
super(TestApiCache, self).setUp()
self.api = api.LbaasAgentApi('topic', mock.sentinel.context, 'host')
self.make_msg = mock.patch.object(self.api, 'make_msg').start()
self.mock_call = mock.patch.object(self.api, 'call').start()
def test_init(self):
self.assertEqual(self.api.host, 'host')
self.assertEqual(self.api.context, mock.sentinel.context)
def test_get_ready_devices(self):
self.assertEqual(
self.api.get_ready_devices(),
self.mock_call.return_value
)
self.make_msg.assert_called_once_with('get_ready_devices', host='host')
self.mock_call.assert_called_once_with(
mock.sentinel.context,
self.make_msg.return_value,
topic='topic'
)
def test_
|
get_logical_device(self):
self.assertEqual(
self.api.get_logical_device('pool_id'),
self.mock_call.return_value
)
self.make_msg.assert_called_once_with(
'get_logical_device',
pool_id='pool_id')
self.mock_call.assert_called_once_with(
mock.sentinel.context,
self.make_msg.re
|
turn_value,
topic='topic'
)
def test_pool_destroyed(self):
self.assertEqual(
self.api.pool_destroyed('pool_id'),
self.mock_call.return_value
)
self.make_msg.assert_called_once_with(
'pool_destroyed',
pool_id='pool_id')
self.mock_call.assert_called_once_with(
mock.sentinel.context,
self.make_msg.return_value,
topic='topic'
)
def test_pool_deployed(self):
self.assertEqual(
self.api.pool_deployed('pool_id'),
self.mock_call.return_value
)
self.make_msg.assert_called_once_with(
'pool_deployed',
pool_id='pool_id')
self.mock_call.assert_called_once_with(
mock.sentinel.context,
self.make_msg.return_value,
topic='topic'
)
def test_update_status(self):
self.assertEqual(
self.api.update_status('pool', 'pool_id', 'ACTIVE'),
self.mock_call.return_value
)
self.make_msg.assert_called_once_with(
'update_status',
obj_type='pool',
obj_id='pool_id',
status='ACTIVE')
self.mock_call.assert_called_once_with(
mock.sentinel.context,
self.make_msg.return_value,
topic='topic'
)
def test_plug_vip_port(self):
self.assertEqual(
self.api.plug_vip_port('port_id'),
self.mock_call.return_value
)
self.make_msg.assert_called_once_with(
'plug_vip_port',
port_id='port_id',
host='host')
self.mock_call.assert_called_once_with(
mock.sentinel.context,
self.make_msg.return_value,
topic='topic'
)
def test_unplug_vip_port(self):
self.assertEqual(
self.api.unplug_vip_port('port_id'),
self.mock_call.return_value
)
self.make_msg.assert_called_once_with(
'unplug_vip_port',
port_id='port_id',
host='host')
self.mock_call.assert_called_once_with(
mock.sentinel.context,
self.make_msg.return_value,
topic='topic'
)
def test_update_pool_stats(self):
self.assertEqual(
self.api.update_pool_stats('pool_id', {'stat': 'stat'}),
self.mock_call.return_value
)
self.make_msg.assert_called_once_with(
'update_pool_stats',
pool_id='pool_id',
stats={'stat': 'stat'},
host='host')
self.mock_call.assert_called_once_with(
mock.sentinel.context,
self.make_msg.return_value,
topic='topic'
)
|
redondomarco/useradm
|
src/models/unificada.py
|
Python
|
gpl-3.0
| 16,421
| 0.015982
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import MySQLdb, sys
# for ide
if False:
from gluon import *
def clumusuario(email):
"""consulto usuario tabla clave unificada"""
dbmysql = MySQLdb.connect(
host=myconf.take('datos.clum_srv'),
port=int(myconf.take('datos.clum_port')),
user=myconf.take('datos.clum_user'),
passwd=myconf.take('datos.clum_pass'),
db=myconf.take('datos.clum_db'))
cursor = dbmysql.cursor()
cursor.execute("""select username from auth_user where email='%s';"""%(email))
registro=cursor.fetchall()
log("usuario: "+str(registro))
dbmysql.close()
if not registro:
salida='no configurado'
elif registro[0][0]=='':
salida='no configurado'
else:
salida=str(registro[0][0])
return salida
def consulta_id(usuario):
"""consulto email tabla clave unificada"""
dbmysql = MySQLdb.connect(
host=myconf.take('datos.clum_srv'),
port=int(myconf.take('datos.clum_port')),
user=myconf.take('datos.clum_user'),
passwd=myconf.take('datos.clum_pass'),
db=myconf.take('datos.clum_db'))
cursor = dbmysql.cursor()
cursor.execute("""select id from auth_user where username='%s';"""%(usuario))
registro=cursor.fetchall()
log("id: "+str(registro))
dbmysql.close()
if not registro:
salida='no creado'
elif registro[0][0]=='':
salida='no configurado'
else:
salida=int(registro[0][0])
return salida
def consulta_emailalt(usuario):
"""consulto email tabla clave unificada"""
try:
dbmysql = MySQLdb.connect(
host=myconf.take('datos.clum_srv'),
port=int(myconf.take('datos.clum_port')),
user=myconf.take('datos.clum_user'),
|
passwd=myconf.take('datos.clum_pass'),
db=myconf.take('datos.clum_db'))
cursor = dbmysql.cursor()
cursor.execute("""select email from auth_user where username='%s';"""%(usuario))
registro=cursor.fetchall()
#log("mailalt: "+str(registro))
dbmysql.close()
except Exception as e:
return ['error',e.args]
if not
|
registro:
salida=['error','no creado']
elif registro[0][0]=='':
salida=['error','no configurado']
else:
salida=['ok',str(registro[0][0])]
return salida
def consulta_autogestion(usuario):
"""consulto email tabla clave unificada"""
try:
dbmysql = MySQLdb.connect(
host=myconf.take('datos.clum_srv'),
port=int(myconf.take('datos.clum_port')),
user=myconf.take('datos.clum_user'),
passwd=myconf.take('datos.clum_pass'),
db=myconf.take('datos.clum_db'))
cursor = dbmysql.cursor()
cursor.execute("""select * from auth_user where username='%s';"""%(usuario))
registro=cursor.fetchall()
#log("mailalt: "+str(registro))
dbmysql.close()
except Exception as e:
return ['error',e.args]
if not registro:
salida=['error','no creado']
elif registro[0][0]=='':
salida=['error','no configurado']
else:
#return registro
aux={
"id":registro[0][0],
"first_name":registro[0][1],
"last_name":registro[0][2],
"email":registro[0][3],
"username":registro[0][4],
"password":registro[0][5],
"registration_key":registro[0][6],
"reset_password_key":registro[0][7],
"registration_id":registro[0][8],
"is_active":registro[0][9],
"created_on":registro[0][10],
"created_by":registro[0][11],
"modified_on":registro[0][12],
"modified_by":registro[0][13]
}
salida=['ok', aux]
return salida
def todos_autogestion():
"""consulto email tabla clave unificada"""
try:
dbmysql = MySQLdb.connect(
host=myconf.take('datos.clum_srv'),
port=int(myconf.take('datos.clum_port')),
user=myconf.take('datos.clum_user'),
passwd=myconf.take('datos.clum_pass'),
db=myconf.take('datos.clum_db'))
cursor = dbmysql.cursor()
cursor.execute("""select first_name,last_name,email,username,created_on,modified_on from auth_user;""")
registro=cursor.fetchall()
#log("mailalt: "+str(registro))
dbmysql.close()
except Exception as e:
return ['error',e.args]
#return registro
resultado={}
for i in registro:
resultado[i[3]]={
'nombre':i[0],
'apellido':i[1],
'mailalt':i[2],
'usuario':i[3],
'fcreado':i[4],
'fmodificado':i[5]
}
return ['ok',resultado]
def agrego_autogestion(username,nombre,apellido,correo,creadopor):
"""agrego usuario a autogestion"""
#consulto que el usuario exista en seguusua
log("intento agregar a clum: "+str(username)+" "+str(nombre)+" "+str(apellido)+" "+str(correo)+" "+str(creadopor))
consulta=seguusua(username)
if consulta[0]=='error':
log('no existe en seguusua')
return ['error',consulta[1]]
email=str(correo)
usuarioclum=clumusuario(email)
if usuarioclum!='no configurado':
return ['error',str(correo)+" utilizado por "+str(usuarioclum)+". No se agrega "+str(username)+" en autogestion."]
#valido que no exista en la base, si existe y no esta configurado lo borro para no duplicar registro
usuario_clum=consulta_emailalt(username)[1]
if usuario_clum=='no configurado':
elimino_autogestion(username)
creador=consulta_id(creadopor)
#solo lo creo si no existe"
if usuario_clum=='no creado':
dbmysql = MySQLdb.connect(
host=myconf.take('datos.clum_srv'),
port=int(myconf.take('datos.clum_port')),
user=myconf.take('datos.clum_user_insert'),
passwd=myconf.take('datos.clum_pass_insert'),
db=myconf.take('datos.clum_db'))
cursor = dbmysql.cursor()
modeloclum="(first_name,last_name,email,username,registration_id,is_active,created_on,created_by,respuesta,pregunta,tyc)"
valores="""'%s','%s','%s','%s','%s','T','%s','%s','Ninguna','Ninguna','T'"""%(nombre,apellido,correo,username,username,datetime.datetime.now(),creador)
#log("valores"+str(valores))
sqladd="""insert into auth_user %s values (%s);"""%(modeloclum,valores)
cursor.execute(sqladd)
dbmysql.commit()
registro=cursor.fetchall()
log("fetch: "+str(registro))
dbmysql.close()
log("agregado a clum: "+str(valores))
retorno="agregado ("+str(valores)+") fetch: "+str(registro)
return ['ok',retorno]
else:
return ['error','usuario ya existe en autogestion']
def elimino_autogestion(username):
log("intento borrar de clum: "+str(username))
consulta=seguusua(username)
if consulta[0]=='error':
log('no existe en seguusua')
return ['error',consulta[1]]
usuario_clum=consulta_emailalt(username)[1]
if usuario_clum[1]!='no creado':
dbmysql = MySQLdb.connect(
host=myconf.take('datos.clum_srv'),
port=int(myconf.take('datos.clum_port')),
user=myconf.take('datos.clum_user_insert'),
passwd=myconf.take('datos.clum_pass_insert'),
db=myconf.take('datos.clum_db'))
cursor = dbmysql.cursor()
sqldelete="""delete from auth_user where username='%s';"""%(username)
log(sqldelete)
cursor.execute(sqldelete)
dbmysql.commit()
usuario_clum=consulta_emailalt(username)[1]
if usuario_clum[1]=='no creado':
return['ok','borrado']
else:
return['error','no borrado']
def clave_unificada(usuario, id_clave, **kwargs):
# la clave se encuentra almacenada temporalmente en memoria (redis).
# testeo que todos los servicios esten disponibles, requiero que exista en kerberos y tenga rama mr, opcional en sw
# guardar clave con
# redis_server.setex("new"+session.sesiong,base64.b64encode(session.sesiong+request.vars.newpass),tiem
|
jgreener64/pdb-benchmarks
|
checkwholepdb/checkwholepdb.py
|
Python
|
mit
| 2,107
| 0.004271
|
# Test which PDB entries error on PDB/mmCIF parsers
# Writes output to a file labelled with the week
import os
from datetime import datetime
from math import ceil
from Bio.PDB import PDBList
from Bio.PDB.PDBParser import PDBParser
from Bio.PDB.MMCIFParser import MMCIFParser
start = datetime.now()
basedir = "."
pdbl = PDBList()
pdblist = pdbl.get_all_entries()
outstrs = ["Checking all PDB entries at {}".format(start.isoformat()),
"Checking {} entries".format(len(pdblist))]
pdb_parser = PDBParser()
mmcif_parser = MMCIFParser()
for pu in sorted(pdblist):
p = pu.lower()
try:
pdbl.retrieve_pdb_file(p, pdir=basedir, file_format="pdb")
except:
# Not having a PDB file is acceptable, though a failure to download an
# available file may hide an error in parsing
try:
os.remove("{}/pdb{}.ent".format(basedir, p))
except:
pass
if os.path.isfile("{}/pdb{}.ent".fo
|
rmat(basedir, p)):
try:
s = pdb_parser.get_structure("", "{}/pdb{}.ent".format(basedir, p))
except:
outstrs.append("{} - PDB parsing error".format(pu))
os.remove("{}/pdb{}.ent".format(basedir, p))
try:
pdbl.retrieve_pdb_file(p, pdir=basedir, file_format="mmCif")
except:
try:
os.remove("{}/{}.cif".format(basedir, p))
except:
pass
outstrs
|
.append("{} - no mmCIF download".format(pu))
if os.path.isfile("{}/{}.cif".format(basedir, p)):
try:
s = mmcif_parser.get_structure("", "{}/{}.cif".format(basedir, p))
except:
outstrs.append("{} - mmCIF parsing error".format(pu))
os.remove("{}/{}.cif".format(basedir, p))
if len(outstrs) == 2:
outstrs.append("All entries read fine")
end = datetime.now()
outstrs.append("Time taken - {} minute(s)".format(int(ceil((end - start).seconds / 60))))
datestr = str(end.date()).replace("-", "")
# This overwrites any existing file
with open("{}/wholepdb_py_{}.txt".format(basedir, datestr), "w") as f:
for l in outstrs:
f.write(l + "\n")
|
DigitalMockingbird/EULAThingy
|
eulathingy/urls.py
|
Python
|
mit
| 637
| 0.006279
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', include('dashboard.urls', namespa
|
ce='dashboard')),
url(r'^admin/', include(admin.site.urls)),
url(r'^dashboard/', include('dashboard.urls', namespace='dashboard')),
# url(r'^uploads/', include('uploads.urls', namespace='uploads')),
) + s
|
tatic(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
heathseals/CouchPotatoServer
|
libs/pyutil/PickleSaver.py
|
Python
|
gpl-3.0
| 8,932
| 0.002799
|
# Copyright (c) 2001 Autonomous Zone Industries
# Copyright (c) 2002-2009 Zooko Wilcox-O'Hearn
# This file is part of pyutil; see README.rst for licensing terms.
"""
An object that makes some of the attributes of your class persistent, pickling
them and lazily writing them to a file.
"""
# from the Python Standard Library
import os
import cPickle as pickle
import warnings
# from the pyutil library
import fileutil
import nummedobj
import twistedutil
# from the Twisted library
from twisted.python import log
class PickleSaver(nummedobj.NummedObj):
"""
This makes some of the attributes of your class persistent, saving
them in a pickle and saving them lazily.
The general idea: You are going to tell PickleSaver which of your
attributes ought to be persistently saved, and the name of a file to
save them in. Those attributes will get saved to disk, and when
your object is instantiated those attributes will get set to the
values loaded from the file.
Usage: inherit from PickleSaver and call PickleSaver.__init__() in your
constructor. You will pass arguments to PickleSaver.__init__()
telling it which attributes to save, which file to save them in, and
what values they should have if there is no value stored for them in
the file.
Note: do *not* assign values to your persistent attributes in your
constructor, because you might thus overwrite their persistent
values.
Then whenever you change one of the persistent attributes, call
self.lazy_save() (it won't *really* save -- it'll just schedule a
save for DELAY minutes later.) If you update an attribute and
forget to call self.lazy_save() then the change will not be saved,
unless you later call self.lazy_save() before you shut down.
Data could be lost if the Python interpreter were to die
unexpectedly (for example, due to a segfault in a compiled machine
code module or due to the Python process being killed without
warning via SIGKILL) before the delay passes. However if the Python
interpreter shuts down cleanly (i.e., if it garbage collects and
invokes the __del__ methods of the collected objects), then the data
will be saved at that time (unless your class has the "not-collectable"
problem: http://python.org/doc/current/lib/module-gc.html -- search
in text for "uncollectable").
Note: you can pass DELAY=0 to make PickleSaver a not-so-lazy saver.
The advantage of laziness is that you don't touch the disk as
often -- touching disk is a performance cost.
To cleanly shutdown, invoke shutdown(). Further operations after that
will result in exceptions.
"""
class ExtRes:
"""
This is for holding things (external resources) that PickleSaver needs
to finalize after PickleSaver is killed. (post-mortem finalization)
In particular, this holds the names and values of all attributes
that have been changed, so that after the PickleSaver is
garbage-collected those values will be saved to the persistent file.
"""
def __init__(self, fname, objname):
self.fname = fname
self.objname = objname
self.dirty = False # True iff the attrs have been changed and need to be saved to disk; When you change this flag from False to True, you schedule a save task for 10 minutes later. When the save task goes off it changes the flag from True to False.
self.savertask = None
|
self.valstr = None # the pickled (serialized, string) contents of the attributes that should be saved
def _save_to_disk(self):
if self.valstr is not None:
log.msg("%s._save_to_disk(): fname: %s" % (self.objname, self.fname,))
of = open(
|
self.fname + ".tmp", "wb")
of.write(self.valstr)
of.flush()
of.close()
of = None
fileutil.remove_if_possible(self.fname)
fileutil.rename(self.fname + ".tmp", self.fname)
log.msg("%s._save_to_disk(): now, having finished write(), os.path.isfile(%s): %s" % (self, self.fname, os.path.isfile(self.fname),))
self.valstr = None
self.dirty = False
try:
self.savertask.callId.cancel()
except:
pass
self.savertask = None
def shutdown(self):
if self.dirty:
self._save_to_disk()
if self.savertask:
try:
self.savertask.callId.cancel()
except:
pass
self.savertask = None
def __del__(self):
self.shutdown()
def __init__(self, fname, attrs, DELAY=60*60, savecb=None):
"""
@param attrs: a dict whose keys are the names of all the attributes to be persistently stored and whose values are the initial default value that the attribute gets set to the first time it is ever used; After this first initialization, the value will be persistent so the initial default value will never be used again.
@param savecb: if not None, then it is a callable that will be called after each save completes (useful for unit tests) (savecb doesn't get called after a shutdown-save, only after a scheduled save)
"""
warnings.warn("deprecated", DeprecationWarning)
nummedobj.NummedObj.__init__(self)
self._DELAY = DELAY
self._attrnames = attrs.keys()
self._extres = PickleSaver.ExtRes(fname=fname, objname=self.__repr__())
self._savecb = savecb
for attrname, defaultval in attrs.items():
setattr(self, attrname, defaultval)
try:
attrdict = pickle.loads(open(self._extres.fname, "rb").read())
for attrname, attrval in attrdict.items():
if not hasattr(self, attrname):
log.msg("WARNING: %s has no attribute named %s on load from disk, value: %s." % (self, attrname, attrval,))
setattr(self, attrname, attrval)
except (pickle.UnpicklingError, IOError, EOFError,), le:
try:
attrdict = pickle.loads(open(self._extres.fname + ".tmp", "rb").read())
for attrname, attrval in attrdict.items():
if not hasattr(self, attrname):
log.msg("WARNING: %s has no attribute named %s on load from disk, value: %s." % (self, attrname, attrval,))
setattr(self, attrname, attrval)
except (pickle.UnpicklingError, IOError, EOFError,), le2:
log.msg("Got exception attempting to load attrs. (This is normal if this is the first time you've used this persistent %s object.) fname: %s, le: %s, le2: %s" % (self.__class__, self._extres.fname, le, le2,))
self.lazy_save()
def _store_attrs_in_extres(self):
d = {}
for attrname in self._attrnames:
d[attrname] = getattr(self, attrname)
# log.msg("%s._store_attrs_in_extres: attrname: %s, val: %s" % (self, attrname, getattr(self, attrname),))
# pickle the attrs now, to ensure that there are no reference cycles
self._extres.valstr = pickle.dumps(d, True)
# log.msg("%s._store_attrs_in_extres: valstr: %s" % (self, self._extres.valstr,))
self._extres.dirty = True
def _save_to_disk(self):
log.msg("%s._save_to_disk()" % (self,))
self._extres._save_to_disk()
if self._savecb:
self._savecb()
def _lazy_save(self, delay=None):
""" @deprecated: use lazy_save() instead """
return self.lazy_save(delay)
def lazy_save(self, delay=None):
"""
@param delay: how long from now before the data gets saved to disk, or `None' in order to use the default value provided in the constructor
"""
if delay is None:
delay=self._DELAY
# copy the values into extres so that if `self' gets garbage-collected the values will be written to disk during post-mort
|
Tatsh-ansible/ansible
|
lib/ansible/modules/cloud/google/gce_instance_template.py
|
Python
|
gpl-3.0
| 19,433
| 0.000412
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gce_instance_template
version_added: "2.3"
short_description: create or destroy instance templates of Compute Engine of GCP.
description:
- Creates or destroy Google instance templates
of Compute Engine of Google Cloud Platform.
options:
state:
description:
- The desired state for the instance template.
default: "present"
choices: ["present", "absent"]
name:
description:
- The name of the GCE instance template.
required: true
default: null
size:
description:
- The desired machine type for the instance template.
default: "f1-micro"
source:
description:
- A source disk to attach to the instance.
Cannot specify both I(image) and I(source).
default: null
image:
description:
- The image to use to create the instance.
Cannot specify both both I(image) and I(source).
default: null
image_family:
description:
- The image family to use to create the instance.
If I(image) has been used I(image_family) is ignored.
Cannot specify both I(image) and I(source).
default: null
disk_type:
description:
- Specify a C(pd-standard) disk or C(pd-ssd)
for an SSD disk.
default: pd-standard
disk_auto_delete:
description:
- Indicate that the boot disk should be
deleted when the Node is deleted.
default: true
network:
description:
- The network to associate with the instance.
default: "default"
subnetwork:
description:
- The Subnetwork resource name for this instance.
default: null
can_ip_forward:
description:
- Set to True to allow instance to
send/receive non-matching src/dst packets.
default: false
external_ip:
description:
- The external IP address to use.
If C(ephemeral), a new non-static address will be
used. If C(None), then no external address will
be used. To use an existing static IP address
specify address name.
default: "ephemeral"
service_account_email:
description:
- service account email
default: null
service_account_permissions:
description:
- service account permissions (see
U(https://cloud.google.com/sdk/gcloud/reference/compute/instances/create),
--scopes section for detailed information)
default: null
choices: [
"bigquery", "cloud-platform", "compute-ro", "compute-rw",
"useraccounts-ro", "useraccounts-rw", "datastore", "logging-write",
"monitoring", "sql-admin", "storage-full", "storage-ro",
"storage-rw", "taskqueue", "userinfo-email"
]
automatic_restart:
description:
- Defines whether the instance should be
automatically restarted when it is
terminated by Compute Engine.
default: null
preemptible:
description:
- Defines whether the instance is preemptible.
default: null
tags:
description:
- a comma-separated list of tags to associate with the instance
default: null
metadata:
description:
- a hash/dictionary of custom data for the instance;
'{"key":"value", ...}'
default: null
description:
description:
- description of instance template
default: null
disks:
description:
- a list of persistent disks to attach to the instance; a string value
gives the name of the disk; alternatively, a dictionary value can
define 'name' and 'mode' ('READ_ONLY' or 'READ_WRITE'). The first entry
will be the boot disk (which must be READ_WRITE).
default: null
nic_gce_struct:
description:
- Support passing in the GCE-specific
formatted networkInterfaces[] structure.
default: null
disks_gce_struct:
description:
- Support passing in the GCE-specific
formatted formatted disks[] structure. Case sensitive.
see U(https://cloud.google.com/compute/docs/reference/latest/instanceTemplates#resource) for detailed information
default: null
version_added: "2.4"
project_id:
description:
- your GCE project ID
default: null
pem_file:
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
default: null
credentials_file:
description:
- path to the JSON file associated with the service account email
default: null
subnetwork_region:
version_added: "2.4"
description:
- Region that subnetwork resides in. (Required for subnetwork to successfully complete)
default: null
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials,
>= 0.20.0 if using preemptible option"
notes:
- JSON credentials strongly preferred.
author: "Gwenael Pellen (@GwenaelPellenArkeup) <gwenael.pellen@arkeup.com>"
'''
EXAMPLES = '''
# Usage
- name: create instance template named foo
gce_instance_template:
name: foo
size: n1-standard-1
image_family: ubuntu-1604-lts
state: present
project_id: "your-project-name"
credentials_file: "/path/to/your-key.json"
service_account_email: "your-sa@your-project-name.iam.gserviceaccount.com"
# Example Playbook
- name: Compute Engine Instance Template Examples
hosts: localhost
vars:
service_account_email: "your-sa@your-project-name.iam.gserviceaccount.com"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
tasks:
- name: create instance template
gce_instance_template:
name: my-test-instance-template
size: n1-standard-1
image_family: ubuntu-1604-lts
state: present
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
- name: delete instance template
gce_instance_template:
name: my-test-instance-template
size: n1-standard-1
image_family: ubuntu-1604-lts
state: absent
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
# Example playbook using disks_gce_struct
- name: Compute Engine Instance Template Examples
hosts: localhost
vars:
service_account_email: "your-sa@your-project-name.iam.gserviceaccount
|
.com"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
tasks:
|
- name: create instance template
gce_instance_template:
name: foo
size: n1-standard-1
state: present
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
disks_gce_struct:
- device_name: /dev/sda
boot: true
autoDelete: true
initializeParams:
diskSizeGb: 30
diskType: pd-ssd
sourceImage: projects/debian-cloud/global/images/family/debian-8
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gce import gce_connect
try:
import libcloud
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.
|
arnavd96/Cinemiezer
|
myvenv/lib/python3.4/site-packages/music21/romanText/rtObjects.py
|
Python
|
mit
| 48,702
| 0.004312
|
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: romanText/rtObjects.py
# Purpose: music21 objects for processing roman numeral analysis text files
#
# Authors: Christopher Ariza
# Michael Scott Cuthbert
#
# Copyright: Copyright © 2011-2012 Michael Scott Cuthbert and the music21 Project
# License: LGPL or BSD, see license.txt
#-------------------------------------------------------------------------------
'''
Objects for processing roman numeral analysis text files, as defined and
demonstrated by Dmitri Tymoczko.
'''
#from __future__ import unicode_literals
from fractions import Fraction
import io
import re
import unittest
from music21 import common
from music21 import exceptions21
from music21 import environment
from music21 import key
_MOD = 'romanText.rtObjects.py'
environLocal = environment.Environment(_MOD)
# alternate endings might end with a,b,c for non
# zero or more for everything after the first number
reMeasureTag = re.compile(r'm[0-9]+[a-b]*-*[0-9]*[a-b]*')
reVariant = re.compile(r'var[0-9]+')
reVariantLetter = re.compile(r'var([A-Z]+)')
reNoteTag = re.compile(r'[Nn]ote:')
reOptKeyOpenAtom = re.compile(r'\?\([A-Ga-g]+[b#]*:')
reOptKeyCloseAtom = re.compile(r'\?\)[A-Ga-g]+[b#]*:?')
# ?g:( ?
reKeyAtom = re.compile('[A-Ga-g]+[b#]*;:')
reAnalyticKeyAtom = re.compile('[A-Ga-g]+[b#]*:')
reKeySignatureAtom = re.compile(r'KS\-?[0-7]')
# must distinguish b3 from bVII; there may be b1.66.5
reBeatAtom = re.compile(r'b[1-9.]+')
reRepeatStartAtom = re.compile(r'\|\|\:')
reRepeatStopAtom = re.compile(r'\:\|\|')
reNoChordAtom = re.compile('NC')
#-------------------------------------------------------------------------------
class RomanTextException(exceptions21.Music21Exception):
pass
class RTTokenException(exceptions21.Music21Exception):
pass
class RTHandlerException(exceptions21.Music21Exception):
pass
class RTFileException(exceptions21.Music21Exception):
pass
#-------------------------------------------------------------------------------
class RTToken(object):
'''Stores each linear, logical entity of a RomanText.
A multi-pass parsing procedure is likely necessary, as RomanText permits
variety of groupings and markings.
>>> rtt = romanText.rtObjects.RTToken('||:')
>>> rtt
<RTToken '||:'>
A standard RTToken returns `False` for all of the following.
>>> rtt.isComposer() or rtt.isTitle() or rtt.isPiece()
False
>>> rtt.isAnalyst() or rtt.isProofreader()
False
>>> rtt.isTimeSignature() or rtt.isKeySignature() or rtt.isNote()
False
>>> rtt.isForm() or rtt.isPedal() or rtt.isMeasure() or rtt.isWork()
False
>>> rtt.isMovement() or rtt.isAtom()
False
'''
def __init__(self, src=u''):
self.src = src # store source character sequence
self.lineNumber = 0
def __repr__(self):
return '<RTToken %r>' % self.src
def isComposer(self):
return False
def isTitle(self):
return False
def isPiece(self):
return False
def isAnalyst(self):
return False
def isProofreader(self):
return False
def isTimeSignature(self):
return False
def isKeySignature(self):
return False
def isNote(self):
return False
def isForm(self):
'''Occasionally found in header.
'''
return False
def isMeasure(self):
return False
def isPedal(self):
return False
def isWork(self):
return False
def isMovement(self):
return False
def isAtom(self):
'''Atoms are any untagged data; generally only found inside of a
measure definition.
'''
return False
class RTTagged(RTToken):
'''In romanText, some data elements are tags, that is a tag name, a colon,
optional whitespace, and data. In non-RTTagged elements, there is just
data.
All tagged tokens are subclasses of this class. Examples are:
Title: Die Jahrzeiten
Composer: Fanny Mendelssohn
>>> rttag = romanText.rtObjects.RTTagged('Title: Die Jahrzeiten')
>>> rttag.tag
'Title'
>>> rttag.data
'Die Jahrzeiten'
>>> rttag.isTitle()
True
>>> rttag.isComposer()
False
'''
def __init__(self, src =u''):
RTToken.__init__(self, src)
# try to split off tag from data
self.tag = ''
self.data = ''
if ':' in src:
iFirst = src.find(':') # first index found at
self.tag = src[:iFirst].strip()
# add one to skip colon
self.data = src[iFirst+1:].strip()
else: # we do not have a clear tag; perhaps store all as data
self.data = src
def __repr__(self):
return '<RTTagged %r>' % self.src
def isComposer(self):
'''True is the tag represents a composer.
>>> rth = romanText.rtObjects.RTTagged('Composer: Claudio Monteverdi')
>>> rth.isComposer()
True
>>> rth.isTitle()
False
>>> rth.isWork()
False
>>> rth.data
'Claudio Monteverdi'
'''
if self.tag.lower() in ['composer']:
return True
return False
def isTitle(self):
'''True if tag represents a title, otherwise False.
>>> tag = romanText.rtObjects.RTTagged('Title: This is a title.')
>>> tag.isTitle()
True
>>> tag = romanText.rtObjects.RTTagged('Nothing: Nothing at all.')
>>> tag.isTitle()
False
'''
if self.tag.lower() in ['title']:
return True
return False
def isPiece(self):
'''
True if tag represents a piece, otherwise False.
>>> tag = romanText.rtObjects.RTTagged('Piece: This is a piece.')
>>> tag.isPiece()
True
>>> tag = romanText.rtObjects.RTTagged('Nothing: Nothing at all.')
>>> tag.isPiece()
False
'''
if self.tag.lower() in ['piece']:
return True
return False
def isAnalyst(self):
'''True if tag represents a analyst, otherwise False.
>>> tag = romanText.rtObjects.RTTagged('Analyst: This is an analyst.')
>>> tag.isAnalyst()
True
>>> tag = romanText.rtObjects.RTTagged('Nothing: Nothing at all.')
>>> tag.isAnalyst()
False
'''
if self.tag.lower() in ['analyst']:
return True
return False
def isProofreader(self):
'''True if tag represents a proofreader, otherwise False.
>>> tag = romanText.rtObjects.RTTagged('Proofreader: This is a proofreader.')
>>> tag.isProofreader()
True
>>> tag = romanText.rtObjects.RTTagged('Nothing: Nothing at all.')
>>> tag.isProofreader()
False
'''
if self.tag.lower() in ['proofreader', 'proof reader']:
return True
return False
def isTimeSignature(self):
'''True if tag represents a time signature, otherwise False.
>>> tag = romanText.rtObjects.RTTagged('TimeSignature: This is a time signature.')
>>> tag.isTimeSignature()
|
True
>>> tag = romanText.rtObjects.RTTagged('Nothing: Nothing at all.')
>>> tag.isTimeSignature()
False
TimeSignature header data can be found intermingled with measures.
'''
if self.tag.lower() in ['timesignature', 'time signature']:
return True
return False
def isKeySignature(self):
'''True if tag represents a key signature, otherwise False.
>>> t
|
ag = romanText.rtObjects.RTTagged('KeySignature: This is a key signature.')
>>> tag.isKeySignature()
True
>>> tag = romanText.rtObjects.RTTagged('Nothing: Nothing at all.')
>>> tag.isKeySignature()
False
KeySignatures are a type of tagged data found outside of measures,
such as "Key Signature: Bb," meaning one flat.
Note: this is not the same as a key de
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-monitor/azure/mgmt/monitor/models/webhook_receiver_py3.py
|
Python
|
mit
| 1,347
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class WebhookReceiver(Model):
"""A webhook receiver.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the webhook recei
|
ver. Names must be
unique ac
|
ross all receivers within an action group.
:type name: str
:param service_uri: Required. The URI where webhooks should be sent.
:type service_uri: str
"""
_validation = {
'name': {'required': True},
'service_uri': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'service_uri': {'key': 'serviceUri', 'type': 'str'},
}
def __init__(self, *, name: str, service_uri: str, **kwargs) -> None:
super(WebhookReceiver, self).__init__(**kwargs)
self.name = name
self.service_uri = service_uri
|
bbreslauer/PySciPlot
|
src/ui/Ui_ExportData.py
|
Python
|
gpl-3.0
| 13,612
| 0.004187
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Ui_ExportData.ui'
#
# Created: Sat May 28 00:16:57 2011
# by: PyQt4 UI code generator 4.8.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_ExportData(object):
def setupUi(self, ExportData):
ExportData.setObjectName(_fromUtf8("ExportData"))
ExportData.resize(354, 527)
self.verticalLayout_5 = QtGui.QVBoxLayout(ExportData)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.groupBox_2 = QtGui.QGroupBox(ExportData)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.gridLayout = QtGui.QGridLayout(self.groupBox_2)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label = QtGui.QLabel(self.groupBox_2)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.fileName = QtGui.QLineEdit(self.groupBox_2)
self.fileName.setObjectName(_fromUtf8("fileName"))
self.gridLayout.addWidget(self.fileName, 0, 1, 1, 1)
self.label_2 = QtGui.QLabel(self.groupBox_2)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.outputType = QtGui.QComboBox(self.groupBox_2)
self.outputType.setObjectName(_fromUtf8("outputType"))
self.outputType.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.outputType, 1, 1, 1, 2)
self.stackedWidget = QtGui.QStackedWidget(self.groupBox_2)
self.stackedWidget.setObjectName(_fromUtf8("stackedWidget"))
self.delimitedStackedWidget = QtGui.QWidget()
self.delimitedStackedWidget.setObjectName(_fromUtf8("delimitedStackedWidget"))
self.gridLayout_2 = QtGui.QGridLayout(self.delimitedStackedWidget)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.label_3 = QtGui.QLabel(self.delimitedStackedWidget)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_2.addWidget(self.label_3, 0, 0, 1, 1)
self.delimitedDelimiterGroupBox = QtGui.QGroupBox(self.delimitedStackedWidget)
self.delimitedDelimiterGroupBox.setTitle(_fromUtf8(""))
self.delimitedDelimiterGroupBox.setObjectName(_fromUtf8("delimitedDelimiterGroupBox"))
self.horizontalLayout = QtGui.QHBoxLayout(self.delimitedDelimiterGroupBox)
self.horizontalLayout.setContentsMargins(2, 0, 0, 0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.delimitedCommaRadio = QtGui.QRadioButton(self.delimitedDelimiterGroupBox)
self.delimitedCommaRadio.setChecked(True)
self.delimitedCommaRadio.setObjectName(_fromUtf8("delimitedCommaRadio"))
self.delimiterButtonGroup = QtGui.QButtonGroup(ExportData)
self.delimiterButtonGroup.setObjectName(_fromUtf8("delimiterButtonGroup"))
self
|
.delimiterButt
|
onGroup.addButton(self.delimitedCommaRadio)
self.horizontalLayout.addWidget(self.delimitedCommaRadio)
self.delimitedTabRadio = QtGui.QRadioButton(self.delimitedDelimiterGroupBox)
self.delimitedTabRadio.setObjectName(_fromUtf8("delimitedTabRadio"))
self.delimiterButtonGroup.addButton(self.delimitedTabRadio)
self.horizontalLayout.addWidget(self.delimitedTabRadio)
self.delimitedOtherRadio = QtGui.QRadioButton(self.delimitedDelimiterGroupBox)
self.delimitedOtherRadio.setObjectName(_fromUtf8("delimitedOtherRadio"))
self.delimiterButtonGroup.addButton(self.delimitedOtherRadio)
self.horizontalLayout.addWidget(self.delimitedOtherRadio)
self.delimitedOtherDelimiter = QtGui.QLineEdit(self.delimitedDelimiterGroupBox)
self.delimitedOtherDelimiter.setEnabled(False)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.delimitedOtherDelimiter.sizePolicy().hasHeightForWidth())
self.delimitedOtherDelimiter.setSizePolicy(sizePolicy)
self.delimitedOtherDelimiter.setMaximumSize(QtCore.QSize(20, 16777215))
self.delimitedOtherDelimiter.setBaseSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(12)
self.delimitedOtherDelimiter.setFont(font)
self.delimitedOtherDelimiter.setMaxLength(1)
self.delimitedOtherDelimiter.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.delimitedOtherDelimiter.setObjectName(_fromUtf8("delimitedOtherDelimiter"))
self.horizontalLayout.addWidget(self.delimitedOtherDelimiter)
self.horizontalLayout.setStretch(0, 5)
self.horizontalLayout.setStretch(1, 5)
self.gridLayout_2.addWidget(self.delimitedDelimiterGroupBox, 0, 1, 1, 1)
self.label_4 = QtGui.QLabel(self.delimitedStackedWidget)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)
self.delimitedDataDirectionGroupBox = QtGui.QGroupBox(self.delimitedStackedWidget)
self.delimitedDataDirectionGroupBox.setTitle(_fromUtf8(""))
self.delimitedDataDirectionGroupBox.setObjectName(_fromUtf8("delimitedDataDirectionGroupBox"))
self.horizontalLayout_3 = QtGui.QHBoxLayout(self.delimitedDataDirectionGroupBox)
self.horizontalLayout_3.setContentsMargins(2, 0, 0, 0)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.dataDirectionColumns = QtGui.QRadioButton(self.delimitedDataDirectionGroupBox)
self.dataDirectionColumns.setChecked(True)
self.dataDirectionColumns.setObjectName(_fromUtf8("dataDirectionColumns"))
self.dataDirectionButtonGroup = QtGui.QButtonGroup(ExportData)
self.dataDirectionButtonGroup.setObjectName(_fromUtf8("dataDirectionButtonGroup"))
self.dataDirectionButtonGroup.addButton(self.dataDirectionColumns)
self.horizontalLayout_3.addWidget(self.dataDirectionColumns)
self.dataDirectionRows = QtGui.QRadioButton(self.delimitedDataDirectionGroupBox)
self.dataDirectionRows.setChecked(False)
self.dataDirectionRows.setObjectName(_fromUtf8("dataDirectionRows"))
self.dataDirectionButtonGroup.addButton(self.dataDirectionRows)
self.horizontalLayout_3.addWidget(self.dataDirectionRows)
self.gridLayout_2.addWidget(self.delimitedDataDirectionGroupBox, 1, 1, 1, 1)
self.stackedWidget.addWidget(self.delimitedStackedWidget)
self.page_2 = QtGui.QWidget()
self.page_2.setObjectName(_fromUtf8("page_2"))
self.stackedWidget.addWidget(self.page_2)
self.gridLayout.addWidget(self.stackedWidget, 2, 0, 1, 3)
self.fileNameButton = QtGui.QPushButton(self.groupBox_2)
self.fileNameButton.setObjectName(_fromUtf8("fileNameButton"))
self.gridLayout.addWidget(self.fileNameButton, 0, 2, 1, 1)
self.verticalLayout_5.addWidget(self.groupBox_2)
self.groupBox_3 = QtGui.QGroupBox(ExportData)
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.groupBox_3)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label_6 = QtGui.QLabel(self.groupBox_3)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.verticalLayout.addWidget(self.label_6)
self.allWavesListView = QtGui.QListView(self.groupBox_3)
self.allWavesListView.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.allWavesListView.setObjectName(_fromUtf8("allWavesListView"))
self.verticalLayout.addWidget(self.allWavesListView)
self.horizontalLayout_2.addLayout(self.verticalLayout)
self.verticalLayout_2 = QtGui.QVBoxLayo
|
vileopratama/vitech
|
src/addons/point_of_sale/report/pos_receipt.py
|
Python
|
mit
| 2,154
| 0.003714
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
from openerp.osv import osv
from openerp.report import report_sxw
def titlize(journal_name):
words = journal_name.split()
while words.pop() != 'journal':
continue
return ' '.join(words)
class order(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(order, self).__init__(cr, uid, name, context=context)
user = self.pool['res.users'].browse(cr, uid, uid, context=context)
partner = user.company_id.partner_id
self.localcontext.update({
'time': time,
'disc': self.discount,
|
'net': self.netamount,
'get_journal_amt': self._get_journal_amt,
'address': partner or False,
'titlize': titlize
})
def netamount(self, order_line_id):
sql = 'select (qty*price_unit) as net_price from pos_order_line where id = %s'
self.cr.execute(sql, (order_line_id,))
res = self.cr.fetchone()
return res[0]
def discount(self, order_id):
sql = 'select discount, price_unit, qty from pos_order_line where order_id = %s '
self.cr.execute(sql, (order_id,))
res = self.cr.fetchall()
dsum = 0
for line in res:
if line[0] != 0:
dsum = dsum +(line[2] * (line[0]*line[1]/100))
return dsum
def _get_journal_amt(self, order_id):
data={}
sql = """ select aj.name,absl.amount as amt from account_bank_statement as abs
LEFT JOIN account_bank_statement_line as absl ON abs.id = absl.statement_id
LEFT JOIN account_journal as aj ON aj.id = abs.journal_id
WHERE absl.pos_statement_id =%d"""%(order_id)
self.cr.execute(sql)
data = self.cr.dictfetchall()
return data
class report_order_receipt(osv.AbstractModel):
_name = 'report.point_of_sale.report_receipt'
_inherit = 'report.abstract_report'
_template = 'point_of_sale.report_receipt'
_wrapped_report_class = order
|
|
Oreder/PythonSelfStudy
|
TestModule/Tmin.py
|
Python
|
mit
| 122
| 0
|
de
|
f Tmin(arg0, *args):
_min = arg0
for arg in args:
if arg < _min:
_min =
|
arg
return _min
|
netkicorp/addressimo
|
addressimo/config.py
|
Python
|
bsd-3-clause
| 2,378
| 0.002103
|
__author__ = 'mdavid'
from attrdict import AttrDict
import os
# Addressimo Configuration
config = AttrDict()
# General Setup
config.site_url = 'addressimo.netki.com'
config.cache_loader_process_pool_size = 4
config.cache_loader_blocktx_pool_size = 15
config.bip32_enabled = True
config.bip70_enabled = True
config.bip70_default_amount = 0
config.bip70_default_expiration = 900
config.bip72_compatability = True
config.bip70_audit_log = True
config.bip70_payment_expiration_days = 61
config.ir_expiration_days = 30
config.rpr_expiration_days = 16
config.ir_nonce_allowable = 5
config.ir_nonce_db_maxkeys = 100000000
config.old_nonce_cleanup_size = 1000
config.paymentprotocol_message_expiration_days = 7
# Operational Modes
config.store_and_forward_only = True
# Presigned Payment Request
config.presigned_pr_limit = 100
# Path Configuration
config.home_dir = '/Users/frank/PycharmProjects/addressimo/addressimo'
config.plugin_directories = [
'logger',
'resolvers',
'signer'
]
redis_uri = 'redis://localhost:6379'
if 'ADDRESSIMO_REDIS_URI' in os.environ:
redis_uri = os.environ['ADDRESSIMO_REDIS_URI']
# Redis Setup
config.redis_id_obj_uri = '%s/1' % redis_uri
config.redis_tx_map_uri = '%s/2' % redis_uri
config.redis_tx_uri = '%s/3' % redis_uri
config.redis_pr_store = '%s/3' % redis_uri
config.redis_payment_store = '%s/4' % redis_uri
config.redis_logdb_uri = '%s/6' % redis_uri
config.redis_address_branch_uri = '%s/13' % redis_uri
config.redis_addr_cache_uri = '%s/14' % redis_uri
config.redis_ratelimit_uri = '%s/15' % redis_uri
# Object Configuration
config.resolver_type = 'REDIS'
config.signer_type = 'LOCAL'
# Logging Plugin Setup
config.logger_type = 'LOCAL'
config.logger_api_endpoint = 'https://auditor.mydomain.com/log'
# Bitcoin Setup
config.bitcoin_user = 'bitcoinrpc'
config.bitcoin_pass = '03fd3f1cba637e40e984611b50bed238'
config.cache_blockheight
|
_threshold = 2
config.payment_submit_tx_retries = 5
# Admin public key for authenticating signatures for signed reques
|
ts to get_branches endpoint (hex encoded).
# That endpoint is used for HD wallets to retrieve which branches Addressimo has served addresses for
config.admin_public_key = 'ac79cd6b0ac5f2a6234996595cb2d91fceaa0b9d9a6495f12f1161c074587bd19ae86928bddea635c930c09ea9c7de1a6a9c468f9afd18fbaeed45d09564ded6'
#config.signer_api_endpoint = 'https://signer.mydomain.com/sign'
|
sauloal/cnidaria
|
scripts/venv/lib/python2.7/site-packages/cogent/app/ilm.py
|
Python
|
mit
| 3,567
| 0.017101
|
#!/usr/bin/env python
from cogent.app.util import CommandLineApplication,\
CommandLineAppResult, ResultPath
from cogent.app.parameters import Parameter,ValuedParameter,Parameters
__author__ = "Shandy Wikman"
__copyright__ = "Copyright 2007-2012, The Cogent Project"
__contributors__ = ["Shandy Wikman"]
__license__ = "GPL"
__version__ = "1.5.3"
__maintainer__ = "Shandy Wikman"
__email__ = "ens01svn@cs.umu.se"
__status__ = "Development"
class ILM(CommandLineApplication):
"""Application controller ILM application
Predict a secondary structure given a score matrix
|
Main options:
-L l: minimum loop length (default=3)
-V v: minimum virtual loop length (default=3)
-H h: minimum helix length (default=3)
-N n: number of helices selected per iteration (default=1)
-I i: number of iterations before termination(default=unlimited)
"""
_parameters = {
'-L':ValuedParameter(Prefix='-',Name='L',Delimiter=' '),
'-V':ValuedParameter(Prefix='-
|
',Name='V',Delimiter=' '),
'-H':ValuedParameter(Prefix='-',Name='H',Delimiter=' '),
'-N':ValuedParameter(Prefix='-',Name='N',Delimiter=' '),
'-I':ValuedParameter(Prefix='-',Name='I',Delimiter=' ')}
_command = 'ilm'
_input_handler = '_input_as_string'
class hlxplot(CommandLineApplication):
"""Application controller hlxplot application
Compute a helix plot score matrix from a sequence alignment
Options:
-b B: Set bad pair penalty to B
(Default = 2)
-g G: Set good pair score to G
(Default = 1)
-h H: Set minimum helix length to H
(Default = 2)
-l L: Set minimum loop length to L
(Default = 3)
-s S: Set helix length score to S
(Default = 2.0)
-t : Write output in text format
(Default = Binary format)
-x X: Set paired gap penalty to X
(Default = 3)
"""
_parameters = {
'-b':ValuedParameter(Prefix='-',Name='b',Delimiter=' '),
'-g':ValuedParameter(Prefix='-',Name='g',Delimiter=' '),
'-h':ValuedParameter(Prefix='-',Name='h',Delimiter=' '),
'-l':ValuedParameter(Prefix='-',Name='l',Delimiter=' '),
'-s':ValuedParameter(Prefix='-',Name='s',Delimiter=' '),
'-t':ValuedParameter(Prefix='-',Name='t',Delimiter=' '),
'-x':ValuedParameter(Prefix='-',Name='x',Delimiter=' ')}
_command = 'hlxplot'
_input_handler = '_input_as_string'
class xhlxplot(CommandLineApplication):
"""Application controller xhlxplot application
Compute an extended helix plot score matrix from a single sequence
Options:
-b B: Set bad pair penalty to B
(Default = 200)
-h H: Set minimum helix length to H
(Default = 2)
-l L: Set minimum loop length to L
(Default = 3)
-x X: Set paired gap penalty to X
(Default = 500)
-t : Write output in text format
(Default = Binary format)
-c : No Closing GU
(Default = allows closing GU)
"""
_parameters = {
'-b':ValuedParameter(Prefix='-',Name='b',Delimiter=' '),
'-h':ValuedParameter(Prefix='-',Name='h',Delimiter=' '),
'-l':ValuedParameter(Prefix='-',Name='l',Delimiter=' '),
'-x':ValuedParameter(Prefix='-',Name='x',Delimiter=' '),
'-t':ValuedParameter(Prefix='-',Name='t',Delimiter=' '),
'-c':ValuedParameter(Prefix='-',Name='c',Delimiter=' ')}
_command = 'xhlxplot'
_input_handler = '_input_as_string'
|
ankurjimmy/catawampus
|
tr/vendor/tornado/maint/appengine/py27/cgi_runtests.py
|
Python
|
apache-2.0
| 25
| 0.04
|
../
|
common/cgi_runtests.p
|
y
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.