repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
jlhg/bdorpy | bdorpy/idfix.py | Python | mit | 912 | 0.004386 | #!/usr/bin/env python3
#
# idfix.py
#
# Copyright (C) 2013, Jian-Long Huang
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
#
# Author: Jian-Long Huang (jianlong@ntu.edu.tw)
# Version: 0.1
# Created: 2013.1.7
#
# Usage: idfix.py <idmap> <fasta> <output>
import sys
def | main():
with open(sys.argv[1], 'r') as fin, open(sys.argv[2], 'r') as ffa, open( | sys.argv[3], 'w') as fo:
full_id = {}
for line in ffa:
if line[0] != '>':
continue
full_id.update({line.split()[0].split('|')[-2]: line.split()[0].split('>')[1]})
for line in fin:
if line.split('\t')[1].rstrip() in full_id:
fo.write(line.split('\t')[0] + '\t' + full_id[line.split('\t')[1].rstrip()] + '\n')
fo.flush()
else:
sys.exit('Id not found, stop!')
if __name__ == '__main__':
main()
|
pyrocko/pyrocko | test/base/test_response_plot.py | Python | gpl-3.0 | 2,397 | 0 | from __future__ import absolute_import
import unittest
import tempfile
import shutil
import os
import numpy as num
from matplotlib import image, pyplot as plt
from pyrocko import util
from pyrocko.plot import response
from .. import common
noshow = True
class ResponsePlotTestCase(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir)
def fpath(self, fn):
return os.path.join(self.tempdir, fn)
def fpath_ref(self, fn):
try:
return common.test_data_file | (fn)
except util.DownloadError:
return common.test_data_file_no_download(fn)
def compare_with_ref(self, fname, tolerance, show=False):
fpath = self.fpath(fname)
fpath_ref = self.fpath_ref(fname)
if not os.path.exists(fpath_ref):
shutil.copy(fpath, fpath_ref)
img = image.imread(fpath)
im | g_ref = image.imread(fpath_ref)
self.assertEqual(img.shape, img_ref.shape)
d = num.abs(img - img_ref)
merr = num.mean(d)
if (merr > tolerance or show) and not noshow:
fig = plt.figure()
axes1 = fig.add_subplot(1, 3, 1, aspect=1.)
axes2 = fig.add_subplot(1, 3, 2, aspect=1.)
axes3 = fig.add_subplot(1, 3, 3, aspect=1.)
axes1.imshow(img)
axes1.set_title('Candidate')
axes2.imshow(img_ref)
axes2.set_title('Reference')
axes3.imshow(d)
axes3.set_title('Mean abs difference: %g' % merr)
plt.show()
plt.close(fig)
assert merr <= tolerance
def test_response_plot(self):
for fn, format in [
('test1.resp', 'resp'),
('test1.sacpz', 'sacpz')]:
fpath_resp = common.test_data_file(fn)
fname = 'test_response_plot_%s.png' % fn
fpath_png = self.fpath(fname)
resps, labels = response.load_response_information(
fpath_resp, format)
labels = [lab[len(fpath_resp)+1:] or 'dummy' for lab in labels]
response.plot(
responses=resps, labels=labels, filename=fpath_png, dpi=50)
# self.compare_with_ref(fname, 0.01)
if __name__ == "__main__":
util.setup_logging('test_response_plot', 'warning')
unittest.main()
|
Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/Crypto/SelfTest/Cipher/common.py | Python | gpl-3.0 | 65 | 0.015385 | ../../../../. | ./../share/pyshared/Crypto/SelfTest/Ci | pher/common.py |
MauHernandez/cyclope | cyclope/apps/medialibrary/admin.py | Python | gpl-3.0 | 6,248 | 0.001761 | #!/usr/bin/env | python
# -*- coding: utf-8 -*-
#
# Copyright 2010-2013 Código Sur Sociedad Civil.
# All rights reserved.
#
# This file is part of Cyclope.
#
# Cyclope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Cyclope is distribute | d in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from django.conf import settings
from django.contrib import admin
from django import forms
from django.db import models
from cyclope import settings as cyc_settings
from cyclope.core.collections.admin import CollectibleAdmin
from cyclope.admin import BaseContentAdmin
from models import *
from filebrowser.fields import FileBrowseField
from filebrowser.base import FileObject
from filebrowser.functions import handle_file_upload, convert_filename
# This is a standard ClearableFileInput.
# We just need to "translate" some data from the FileBrowseField
class CustomFileInput(forms.widgets.ClearableFileInput):
def render(self, name, value, attrs=None):
# FileBrowseField has no url attribute so we set url to url_full
if type(value) == FileObject:
value.url = value.url_full
return super(CustomFileInput, self).render(name, value, attrs)
class MediaAdmin(CollectibleAdmin, BaseContentAdmin):
inlines = CollectibleAdmin.inlines + BaseContentAdmin.inlines
search_fields = ('name', 'description', )
list_filter = CollectibleAdmin.list_filter + ('creation_date',)
def get_form(self, request, obj=None, **kwargs):
media_file_field = self.model.media_file_field
image_file_field = self.model.image_file_field
form = super(MediaAdmin, self).get_form(request, obj, **kwargs)
simple_widgets = False
if not request.user.is_superuser:
simple_widgets = True
form.base_fields[media_file_field].widget = CustomFileInput()
if image_file_field:
form.base_fields[image_file_field].widget = CustomFileInput()
form.simple = simple_widgets
if obj:
form.media_file_initial = getattr(obj, media_file_field)
# This is a hack; if the field is required it will fail validation
# when the user does not upload a file.
# TODO(nicoechaniz): implement proper validation for this case
form.base_fields[media_file_field].required = False
if image_file_field:
form.image_file_initial = getattr(obj, image_file_field)
form.base_fields[image_file_field].required = False
return form
has_thumbnail = [Picture, MovieClip, FlashMovie]
def media_admin_factory(media_model):
class MediaLibraryForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MediaLibraryForm, self).__init__(*args, **kwargs)
author_choices = [('', '------')]
for author in Author.objects.all():
if media_model in [ctype.model_class()
for ctype in author.content_types.all()]:
author_choices.append((author.id, author.name))
self.fields['author'].choices = author_choices
def save(self, *args, **kwargs):
# We override the standard behavior because we've overriden the FileBrowseField
# with a simple ClearableFileInput
if self.simple:
abs_paths = {}
instance = super(MediaLibraryForm, self).save(commit=False)
image_file_field = instance.image_file_field
file_fields = [ instance.media_file_field ]
if image_file_field:
file_fields.append(image_file_field)
for f_field in file_fields:
folder = media_model._meta.get_field_by_name(f_field)[0].directory
abs_paths[f_field] = os.path.join(
settings.MEDIA_ROOT, settings.FILEBROWSER_DIRECTORY, folder
)
if f_field in self.files.keys():
f = self.files[f_field]
f.name = convert_filename(f.name)
name = handle_file_upload(abs_paths[f_field], f)
setattr(instance, f_field, name)
else:
# TODO(nicoechaniz): this is ugly! refactor
if f_field in ["image", "still"]:
if hasattr(self, "image_file_initial"):
setattr(instance, f_field, self.image_file_initial)
else:
if hasattr(self, "media_file_initial"):
setattr(instance, f_field, self.media_file_initial)
instance.save()
return instance
else:
return super(MediaLibraryForm, self).save(*args, **kwargs)
class Meta:
model = media_model
if media_model in has_thumbnail:
list_display = ['name', 'published', 'thumbnail']
else:
list_display = ['name', 'published']
list_display += CollectibleAdmin.list_display
return type('%sAdmin' % media_model.__name__,
(MediaAdmin,),
{'form': MediaLibraryForm, 'list_display': list_display})
admin.site.register(Picture, media_admin_factory(Picture))
admin.site.register(SoundTrack, media_admin_factory(SoundTrack))
admin.site.register(MovieClip, media_admin_factory(MovieClip))
admin.site.register(Document, media_admin_factory(Document))
admin.site.register(FlashMovie, media_admin_factory(FlashMovie))
admin.site.register(RegularFile, media_admin_factory(RegularFile))
admin.site.register(ExternalContent, media_admin_factory(ExternalContent))
|
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/PyQt4/QtGui/__init__/QGradient.py | Python | gpl-2.0 | 2,965 | 0.009106 | # encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python2.7/dist-packages/PyQt4/QtGui.so
# by generator 1.135
# no doc
# i | mports
import PyQt4.QtCore as __PyQt4_QtCore
class QGradient(): # skipped bases: <type 'sip.simplewrapper'>
"""
QGradient()
QGradient(QGradient)
"""
def coordinateMode(self): # real si | gnature unknown; restored from __doc__
""" QGradient.coordinateMode() -> QGradient.CoordinateMode """
pass
def setColorAt(self, p_float, QColor): # real signature unknown; restored from __doc__
""" QGradient.setColorAt(float, QColor) """
pass
def setCoordinateMode(self, QGradient_CoordinateMode): # real signature unknown; restored from __doc__
""" QGradient.setCoordinateMode(QGradient.CoordinateMode) """
pass
def setSpread(self, QGradient_Spread): # real signature unknown; restored from __doc__
""" QGradient.setSpread(QGradient.Spread) """
pass
def setStops(self, list_of_tuple_of_float_QColor): # real signature unknown; restored from __doc__
""" QGradient.setStops(list-of-tuple-of-float-QColor) """
pass
def spread(self): # real signature unknown; restored from __doc__
""" QGradient.spread() -> QGradient.Spread """
pass
def stops(self): # real signature unknown; restored from __doc__
""" QGradient.stops() -> list-of-tuple-of-float-QColor """
pass
def type(self): # real signature unknown; restored from __doc__
""" QGradient.type() -> QGradient.Type """
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __init__(self, QGradient=None): # real signature unknown; restored from __doc__ with multiple overloads
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
ConicalGradient = 2
CoordinateMode = None # (!) real value is ''
LinearGradient = 0
LogicalMode = 0
NoGradient = 3
ObjectBoundingMode = 2
PadSpread = 0
RadialGradient = 1
ReflectSpread = 1
RepeatSpread = 2
Spread = None # (!) real value is ''
StretchToDeviceMode = 1
Type = None # (!) real value is ''
|
mzmmoazam/DrawBot | nlu.py | Python | apache-2.0 | 2,221 | 0.010806 | import json
import apiai
CLIENT_ACCESS_TOKEN = 'api key'
def nlu(mytext):
ai = apiai.ApiAI(CLIENT_ACCESS_TOKEN)
request = ai.text_request()
request.lang = 'en' # optional, default value equal 'en'
# request.session_id = "<SESSION ID, UBIQUE FOR EACH USER>"
request.query = mytext
response = request.getresponse()
string = response.read().decode('utf-8')
json_obj = json.loads(string)
# {'hexagon': '', 'rhombus': '', 'circle': '', 'circle_direction': '', 'rect_direction': 'down', 'rhombus_direction': '', 'triangle': 'Triangle', 'triangle_direction': '', 'square_direction': '', 'square': '', 'hexagon_direction': '', 'rect': '', 'number': [10]}
# if json_obj['result']['action']=='x direction y by z units'
# for i in json_obj:
# print(i,"\t",json_obj[i])
# print(json_obj['result']['metadata']['intentName'])
# pr | int(json_obj['result']['actionIncomplete'])
# print(json_obj['result']['parameters'])
if json_obj['result']['metadata']['intentName'] != 'x direction y by z units':
return json.dumps({"action":json_obj['result']['action'],"message":json_obj['result']['fulfillment']['speech'],'two_figures':False})
else:
a=[]
di=json_obj['result']['parameters']
for i in di :
if di[i]=='':
a.append(i)
for i in a:
| del di[i]
# di["two_figures"]=True
result={}
result['by'] = di['number'][0]
del di['number']
# {'number': [10], 'triangle': 'Triangle', 'rect_direction': 'down'}
for i in ['rect_direction','circle_direction','triangle_direction','rhombus_direction','hexagon_direction','square_direction']:
if i in di:
result['second']=i.split('_')[0].lower()
result['direction']=di[i]
del di[i]
for i in di:
result['first']=di[i]
result["two_figures"] = True
print(result)
return json.dumps(result)
# {'two_figures': True, 'first': 'rect', 'second': 'Triangle', 'by': 10, 'direction': 'down'}
# nlu("rectangle below triangle by 10 units")
|
kevin-brown/drf-nested-viewsets | rest_nested_viewsets/routers.py | Python | mit | 1,254 | 0 | from rest_framework import routers
class NestedRouter(routers.DefaultRouter):
"""
Custom router which hides nested URLs as they cannot be reversed.
"""
def get_api_root_view(self):
"""
Return a view to use as the API root.
Ignores any URLs which do not reverse correctly.
"""
from django.core.urlresolvers import NoReverseMatch
from rest_framework import views
from rest_framework.response import Response
from rest_framework.urlresolvers import reverse
api_root_dict = {}
list_name = self.routes[0].name
for prefix, viewset, basename in self.registry:
api_root_dict[prefix] = list_name.format(basename=basename)
class APIRoot(views.APIView):
_ignore_model_permissions = True
def get(self, request, format=None):
ret = {}
for key, url_name in api_root_dict.items():
try:
ret[key] = reverse(url_name, request=request,
format=fo | rmat)
| except NoReverseMatch:
pass
return Response(ret)
return APIRoot.as_view()
|
SergeySatskiy/codimension | codimension/ui/parsererrors.py | Python | gpl-3.0 | 3,321 | 0 | # -*- coding: utf-8 -*-
#
# codimension - graphics python two-way code editor and analyzer
# Copyright (C) 2010-2017 Sergey Satskiy <sergey.satskiy@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Python code parser errors dialog"""
from os.path import exists, basename
from utils.globals import GlobalData
from utils.fileutils import isPythonFile
from utils.colorfont import getZoomedMonoFont
from .qt import (Qt, QDialog, QTextEdit, QDialogButtonBox, QVBoxLayout,
QSizePolicy)
from .labels import FitLabel
class ParserErrorsDialog(QDialog):
"""Python code parser errors dialog implementation"""
def __init__(self, fileName, info=None, parent=None):
QDialog.__init__(self, parent)
if info is None:
if not exists(fileName):
raise Exception('Cannot open ' + fileName)
if not isPythonFile(fileName):
raise Exception('Unexpected file type (' + fileName +
'). A python file is expected.')
self.__createLayout(fileName, info)
self.setWindowTitle('Lexer/parser errors: ' + basename(fileName))
self.show()
def __createLayout(self, fileName, info):
"""C | reates the dialog layout"""
self.resize | (600, 220)
self.setSizeGripEnabled(True)
verticalLayout = QVBoxLayout(self)
# Info label
infoLabel = FitLabel(self)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
infoLabel.sizePolicy().hasHeightForWidth())
infoLabel.setSizePolicy(sizePolicy)
infoLabel.setText('Lexer/parser errors for ' + fileName)
verticalLayout.addWidget(infoLabel)
# Result window
resultEdit = QTextEdit(self)
resultEdit.setTabChangesFocus(False)
resultEdit.setAcceptRichText(False)
resultEdit.setReadOnly(True)
resultEdit.setFont(getZoomedMonoFont())
if info is not None:
modInfo = info
else:
modInfo = GlobalData().briefModinfoCache.get(fileName)
if modInfo.isOK:
resultEdit.setText('No errors found')
else:
resultEdit.setText('\n'.join(modInfo.lexerErrors +
modInfo.errors))
verticalLayout.addWidget(resultEdit)
# Buttons
buttonBox = QDialogButtonBox(self)
buttonBox.setOrientation(Qt.Horizontal)
buttonBox.setStandardButtons(QDialogButtonBox.Close)
verticalLayout.addWidget(buttonBox)
buttonBox.rejected.connect(self.close)
|
fbacchella/ovirtcmd | samples/single_node_cluster.py | Python | apache-2.0 | 3,574 | 0.005316 | import ovlib
from ovlib.eventslib import event_waiter, EventsCode
mac_pool = context.macpools.get("Default")
try:
dc = context.datacenters.get(host_name)
except ovlib.OVLibErrorNotFound:
dc = context.datacenters.add(data_center={'name': host_name, 'local': True, 'storage_format': 'V4', 'mac_pool': mac_pool})
dc_net_vlan = set()
dc_nets_id = set()
for net in dc.networks.list():
if net.vlan is not None:
dc_net_vlan.add(net.vlan.id)
if net.name == "ovirtmgmt" and net.mtu != 9000:
net.update(network={'mtu': 9000})
dc_nets_id.add(net.id)
try:
cluster = context.clusters.get(host_name)
except ovlib.OVLibErrorNotFound:
cluster = context.clusters.add(cluster={'name': host_name, 'cpu': {'type': 'AMD Opteron G3'}, 'data_center': dc, 'mac_pool': mac_pool})
futurs = []
for (name, vlan) in (("VLAN1", 1), ("VLAN2", 2), ('VLAN3', 3)):
if not vlan in dc_net_vlan:
new_net = context.networks.add(network={'name': name, 'vlan': {'id': "%d" % vlan} , 'mtu': 9000, 'data_center': dc, 'usages': ['VM'],
}, wait= False)
futurs.append(new_net)
for f in futurs:
network = f.wait()
dc_nets_id.add(network.id)
cluster_nets_id = set()
for net in cluster.networks.list():
cluster_nets_id.add(net.id)
futurs = []
for missing in dc_nets_id - cluster_nets_id:
futurs.append(cluster.networks.add(network={'id': missing, 'required': False}, wait=False))
try:
host = context.hosts.get(host_name)
excep | t o | vlib.OVLibErrorNotFound:
events_returned = []
waiting_events = [EventsCode.VDS_DETECTED]
with event_waiter(context, "host.name=%s" % host_name, events_returned, verbose=True, break_on=waiting_events):
host = context.hosts.add(host={
'name': host_name, 'address': host_name,
'cluster': cluster,
'override_iptables': False,
'ssh': {'authentication_method': 'PUBLICKEY'},
'power_management': {
'enabled': True,
'kdump_detection': False,
'pm_proxies': [{'type': 'CLUSTER'}, {'type': 'DC'}, {'type': 'OTHER_DC'}]
}
}, wait=True)
host.refresh()
storages = host.storage.list()
if len(storages) == 1 and storages[0].type.name == 'FCP' and storages[0].type.name == 'FCP' and storages[0].logical_units[0].status.name == 'FREE':
lu = {'id': storages[0].id}
vg = {'logical_units': [lu]}
storage = {'type': 'FCP', 'volume_group': vg}
sd = {'name': host_name, 'type': 'DATA', 'data_center': dc, 'host': host, 'storage': storage}
sd = context.storagedomains.add(storage_domain={'name': host_name, 'type': 'DATA', 'data_center': dc, 'host': host, 'storage': storage})
else:
sd = context.storagedomains.get(host_name)
events_returned = []
waiting_events = [EventsCode.VDS_DETECTED]
if(host.status.name != 'UP'):
with event_waiter(context, "host.name=%s" % host_name, events_returned, verbose=True, break_on=waiting_events):
print(events_returned)
if sd.status is not None and sd.status.value == 'unattached':
futurs.append(dc.storage_domains.add(storage_domain=sd, wait=False))
if host.hardware_information.product_name is not None and len(list(host.fence_agents.list())) == 0:
fence_agent_type={'ProLiant DL185 G5': 'ipmilan'}[host.hardware_information.product_name]
futurs.append(host.fence_agents.add(agent={'address': host_name + "-ilo", 'username': 'admin', 'password': 'password', 'type': fence_agent_type, 'order': 1}, wait=False))
for f in futurs:
pf.wait()
|
MOA-2011/enigma2-plugin-extensions-openwebif | plugin/controllers/views/mobile/movies.py | Python | gpl-2.0 | 8,464 | 0.014532 | #!/usr/bin/env python
##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
import builtins as builtin
except ImportError:
import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
from Plugins.Extensions.OpenWebif.local import tstrings
##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1406885498.426455
__CHEETAH_genTimestamp__ = 'Fri Aug 1 18:31:38 2014'
__CHEETAH_src__ = '/home/wslee2/models/5-wo/force1plus/openpli3.0/build-force1plus/tmp/work/mips32el-oe-linux/enigma2-plugin-extensions-openwebif-1+git5+3c0c4fbdb28d7153bf2140459b553b3d5cdd4149-r0/git/plugin/controllers/views/mobile/movies.tmpl'
__CHEETAH_srcLastModified__ = 'Fri Aug 1 18:30:05 2014'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
| class movies(Template):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
super(movies, self).__init__(*args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.s | plit()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def respond(self, trans=None):
## CHEETAH: main method generated for this template
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
write(u'''<html>\r
<head>\r
\t<title>OpenWebif</title>\r
\t<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />\r
\t<meta name="viewport" content="user-scalable=no, width=device-width"/>\r
\t<meta name="apple-mobile-web-app-capable" content="yes" />\r
\t<link rel="stylesheet" type="text/css" href="/css/jquery.mobile-1.0.min.css" media="screen"/>\r
\t<link rel="stylesheet" type="text/css" href="/css/iphone.css" media="screen"/>\r
\t<script src="/js/jquery-1.6.2.min.js"></script>\r
\t<script src="/js/jquery.mobile-1.0.min.js"></script>\r
</head>\r
<body> \r
\t<div data-role="page">\r
\r
\t\t<div id="header">\r
\t\t\t<div class="button" onClick="history.back()">''')
_v = VFFSL(SL,"tstrings",True)['back'] # u"$tstrings['back']" on line 17, col 49
if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['back']")) # from line 17, col 49.
write(u'''</div>\r
\t\t\t<h1><a style="color:#FFF;text-decoration:none;" href=\'/mobile\'>OpenWebif</a></h1>
\t\t\t<h2>''')
_v = VFFSL(SL,"tstrings",True)['movies'] # u"$tstrings['movies']" on line 19, col 8
if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['movies']")) # from line 19, col 8.
write(u'''</h2>\r
\t\t</div>\r
\r
\t\t<div data-role="fieldcontain">\r
\t\t <select name="select-choice-1" id="select-choice-moviedir" onChange="window.location.href=\'/mobile/movies?dirname=\'+escape(options[selectedIndex].value);">\r
\t\t\t <option value="''')
_v = VFFSL(SL,"directory",True) # u'$directory' on line 24, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$directory')) # from line 24, col 21.
write(u'''">''')
_v = VFFSL(SL,"directory",True) # u'$directory' on line 24, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$directory')) # from line 24, col 33.
write(u'''</option>\r
''')
for bookmark in VFFSL(SL,"bookmarks",True): # generated from line 25, col 6
write(u'''\t\t\t <option value="''')
_v = VFFSL(SL,"bookmark",True) # u'$bookmark' on line 26, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$bookmark')) # from line 26, col 21.
write(u'''">''')
_v = VFFSL(SL,"bookmark",True) # u'$bookmark' on line 26, col 32
if _v is not None: write(_filter(_v, rawExpr=u'$bookmark')) # from line 26, col 32.
write(u'''</option>\r
''')
write(u'''\t\t </select>\r
\t\t</div>\r
\r
\t\t<div id="contentContainer">\r
\t\t\t<ul data-role="listview" data-inset="true" data-theme="d">\r
\t\t\t\t<li data-role="list-divider" role="heading" data-theme="b">''')
_v = VFFSL(SL,"tstrings",True)['movies'] # u"$tstrings['movies']" on line 33, col 64
if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['movies']")) # from line 33, col 64.
write(u'''</li>\r
''')
for movie in VFFSL(SL,"movies",True): # generated from line 34, col 5
if VFFSL(SL,"movie.eventname",True) != "": # generated from line 35, col 5
write(u'''\t\t\t\t<li>''')
_v = VFFSL(SL,"movie.eventname",True) # u'$movie.eventname' on line 36, col 9
if _v is not None: write(_filter(_v, rawExpr=u'$movie.eventname')) # from line 36, col 9.
write(u'''</li>\r
''')
else: # generated from line 37, col 5
write(u'''\t\t\t\t<li>''')
_v = VFFSL(SL,"movie.filename",True) # u'$movie.filename' on line 38, col 9
if _v is not None: write(_filter(_v, rawExpr=u'$movie.filename')) # from line 38, col 9.
write(u'''</li>\r
''')
write(u'''\t\t\t</ul>\r
\t\t</div>\r
\r
\t\t<div id="footer">\r
\t\t\t<p>OpenWebif Mobile</p>\r
\t\t\t<a onclick="document.location.href=\'/index?mode=fullpage\';return false;" href="#">''')
_v = VFFSL(SL,"tstrings",True)['show_full_openwebif'] # u"$tstrings['show_full_openwebif']" on line 46, col 86
if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['show_full_openwebif']")) # from line 46, col 86.
write(u'''</a>\r
\t\t</div>\r
\t\t\r
\t</div>\r
</body>\r
</html>\r
''')
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
_mainCheetahMethod_for_movies= 'respond'
## END CLASS DEFINITION
if not hasattr(movies, '_initCheetahAttributes'):
templateAPIClass = getattr(movies, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(movies)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other vo |
kornai/4lang | src/fourlang/lemmatizer.py | Python | mit | 5,587 | 0.000716 | import logging
import os
import sys
from nltk.corpus import stopwords as nltk_stopwords
from hunmisc.utils.huntool_wrapper import Hundisambig, Ocamorph, OcamorphAnalyzer, MorphAnalyzer # nopep8
from stemming.porter2 import stem as porter_stem
from utils import get_cfg
class Lemmatizer():
def __init__(self, cfg):
self.cfg = cfg
self.analyzer, self.morph_analyzer = self.get_analyzer()
self.stopwords = set(nltk_stopwords.words('english'))
self.stopwords.add('as') # TODO
self.stopwords.add('root') # TODO
self.read_cache()
self.oov = set()
def clear_cache(self):
self.cache = {}
self.oov = set()
def _analyze(self, word):
stem = porter_stem(word)
lemma = list(self.analyzer.analyze(
[[word]]))[0][0][1].split('||')[0].split('<')[0]
cand_krs = self.morph_analyzer.analyze([[word]]).next().next()
candidates = [cand.split('||')[0].split('<')[0] for cand in cand_krs]
| se | lf.cache[word] = (stem, lemma, candidates)
def _lemmatize_with_stopwords(self, word, uppercase):
if word == 'have':
return 'HAS'
elif not uppercase:
return word
elif word in self.stopwords:
return word.upper()
else:
return word
def lemmatize(self, word, defined=None, stem_first=False, uppercase=False,
debug=False):
# if 'defined' is provided, will refuse to return lemmas not in it
# if the word is defined, we just return it
if defined is not None and word in defined:
return self._lemmatize_with_stopwords(word, uppercase)
# if the word is not in our cache, we run all analyses
if word not in self.cache:
self._analyze(word)
stem, lemma, candidates = self.cache[word]
# if stem_first flag is on, we rerun lemmatize on the stem
# and return the result unless it doesn't exist
if stem_first:
if defined is None:
logging.warning("stem_first=True and defined=None, \
'lemmatize' is now a blind Porter stemmer")
stemmed_lemma = self.lemmatize(
stem, defined=defined, stem_first=False, uppercase=uppercase)
if stemmed_lemma is not None:
return self._lemmatize_with_stopwords(stemmed_lemma, uppercase)
# we return the lemma unless it's not in defined
if defined is None or lemma in defined:
return self._lemmatize_with_stopwords(lemma, uppercase)
# we go over the other candidates as a last resort
for cand in candidates:
if cand in defined:
return self._lemmatize_with_stopwords(cand, uppercase)
# last resort is the porter stem:
if stem in defined:
return self._lemmatize_with_stopwords(stem, uppercase)
# if that doesn't work either, we return None
return None
def get_analyzer(self):
hunmorph_path = self.cfg.get('lemmatizer', 'hunmorph_path')
ocamorph_fn = os.path.join(hunmorph_path, "ocamorph")
morphdb_model_fn = os.path.join(hunmorph_path, "morphdb_en.bin")
hundisambig_fn = os.path.join(hunmorph_path, "hundisambig")
hunpos_model_fn = os.path.join(hunmorph_path, "en_wsj.model")
logging.warning('loading hunmorph using binaries in {0}'.format(hunmorph_path))
for fn in (ocamorph_fn, morphdb_model_fn, hundisambig_fn,
hunpos_model_fn):
if not os.path.exists(fn):
raise Exception("can't find hunmorph resource: {0}".format(fn))
ocamorph = Ocamorph(ocamorph_fn, morphdb_model_fn)
ocamorph_analyzer = OcamorphAnalyzer(ocamorph)
hundisambig = Hundisambig(hundisambig_fn, hunpos_model_fn)
morph_analyzer = MorphAnalyzer(ocamorph, hundisambig)
return morph_analyzer, ocamorph_analyzer
def read_cache(self):
self.clear_cache()
cache_fn = self.cfg.get('lemmatizer', 'cache_file')
if not os.path.exists(cache_fn):
return
logging.info('reading hunmorph cache...')
with open(cache_fn) as f_obj:
for line in f_obj:
try:
fields = line.decode('utf-8').strip().split('\t')
except (ValueError, UnicodeDecodeError), e:
raise Exception('error parsing line in tok2lemma file: \
{0}\n{1}'.format(e, line))
word, stem, lemma = fields[:3]
candidates = fields[3:]
self.cache[word] = (stem, lemma, candidates)
logging.info('done!')
def write_cache(self):
cache_fn = self.cfg.get('lemmatizer', 'cache_file')
logging.info('writing hunmorph cache...')
with open(cache_fn, 'w') as f_obj:
for word, (stem, lemma, candidates) in self.cache.iteritems():
f_obj.write(u"{0}\t{1}\t{2}\t{3}\n".format(
word, stem, lemma, "\t".join(candidates)).encode('utf-8'))
logging.info('done!')
def main():
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s : " +
"%(module)s (%(lineno)s) - %(levelname)s - %(message)s")
cfg_file = sys.argv[1] if len(sys.argv) > 1 else None
cfg = get_cfg(cfg_file)
lemmatizer = Lemmatizer(cfg)
while True:
word = raw_input('> ')
print lemmatizer.lemmatize(word)
if __name__ == "__main__":
main()
|
uclouvain/OSIS-Louvain | learning_unit/views/learning_unit_class/tutor_repartition.py | Python | agpl-3.0 | 6,168 | 0.001459 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.shortcuts import render
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from django.views.generic import FormView
from base.ddd.utils.business_validator import MultipleBusinessExceptions
from base.views.common import display_success_messages, display_error_messages
from base.views.mixins import AjaxTemplateMixin
from ddd.logic.effective_class_repartition.commands import SearchAttributionCommand, \
SearchTutorsDistributedToClassCommand
from ddd.logic.effective_class_repartition.dtos import TutorAttributionToLearningUnitDTO, TutorClassRepartitionDTO
from infrastructure.messages_bus import message_bus_instance
from learning_unit.forms.classes.tutor_repartition import ClassTutorRepartitionForm, ClassRemoveTutorRepartitionForm, \
ClassEditTutorRepartitionForm
from learning_unit.views.learning_unit_class.common import CommonClassView
class TutorRepartitionView(CommonClassView, AjaxTemplateMixin, FormView):
template_name = "class/add_charge_repartition_inner.html"
permission_required = 'attribution.can_change_class_repartition'
form_class = ClassTutorRepartitionForm
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update(
{
'learning_unit': self.learning_unit,
'learning_unit_year': self.learning_unit_year,
'effective_class': self.effective_class,
'can_add_charge_repartition': self.request.user.has_perm(
"attribution.can_change_class_repartition", self.get_permission_object()
)
}
)
return context
@cached_property
def tutor(self) -> 'TutorAttributionToLearningUnitDTO':
cmd = SearchAttributionCommand(
learning_unit_attribution_uuid=self.kwargs['attribution_uuid'],
learning_unit_year=self.effective_class.entity_id.learning_unit_identity.year,
learning_unit_code=self.effective_class.entity_id.learning_unit_identity.code
)
return message_bus_instance.invoke(cmd)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['effective_class'] = self.effective_class
kwargs['tutor'] = self.tutor
kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
form = self.form_class(
request.POST,
user=request.user,
tutor=self.tutor,
effective_class=self.effective_class
)
try:
form.save()
except MultipleBusinessExceptions as e:
display_error_messages(request, [exc.message for exc in e.exceptions])
if not form.errors:
display_success_messages(request, self.get_success_msg())
return self._ajax_res | ponse()
return render(request, self.template_name, {
"form": form,
})
def get_success_ur | l(self):
return self.common_url_tabs()['url_class_tutors']
def get_success_msg(self) -> str:
return _("Repartition added for %(tutor)s (%(function)s)") % {
'tutor': self.tutor.full_name,
'function': self.tutor.function_text
}
class TutorRepartitionRemoveView(TutorRepartitionView):
template_name = "class/remove_charge_repartition_inner.html"
permission_required = 'attribution.can_delete_class_repartition'
form_class = ClassRemoveTutorRepartitionForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['tutor'] = self.tutor
kwargs['user'] = self.request.user
return kwargs
def get_success_msg(self) -> str:
return _("Repartition deleted for %(tutor)s (%(function)s)") % {
'tutor': self.tutor.full_name,
'function': self.tutor.function_text
}
class TutorRepartitionEditView(TutorRepartitionView):
template_name = "class/add_charge_repartition_inner.html"
permission_required = 'attribution.can_change_class_repartition'
form_class = ClassEditTutorRepartitionForm
@cached_property
def tutor(self) -> 'TutorClassRepartitionDTO':
command = SearchTutorsDistributedToClassCommand(
learning_unit_code=self.learning_unit.code,
learning_unit_year=self.learning_unit.year,
class_code=self.effective_class.class_code,
)
tutors = message_bus_instance.invoke(command)
for tutor in tutors:
if str(tutor.attribution_uuid) == str(self.kwargs['attribution_uuid']):
return tutor
def get_success_msg(self) -> str:
return _("Repartition edited for %(tutor)s (%(function)s)") % {
'tutor': self.tutor.full_name,
'function': self.tutor.function_text
}
|
matiboy/generator-django | app/templates/tasks/local/pip.py | Python | mit | 1,694 | 0.015348 | import fabric.api
import fabric.tasks
import fabric.colors
import os
class PipFreeze(fabric.tasks.Task):
name = 'pip_freeze'
def run(self, commit=True):
fabric.api.local("pip freeze > requirements.txt")
if commit:
fabric.api.local("git add requirements.txt")
with fabric.api.settings(warn_only=True):
fabric.api.local('git comm | it -m "New requirements"')
class PipFreezeAlias(PipFreeze):
name = 'lpf'
class PipInstall(fabric.tasks.Task):
name = 'pip_install'
def run(self, package, environment=Non | e):
if environment is None:
environment = 'common'
else:
environment = environment.replace('save-', '')
if environment == 'dev':
environment = 'development'
valid_envs = ('development', 'production', 'common', 'testing',)
if environment not in valid_envs:
print(fabric.colors.red('\tEnvironment must be one of {}'.format(', '.join(valid_envs))))
return
filename = environment.upper()
# Run pip install locally
fabric.api.local("pip install {}".format(package))
# Run pip freeze and capture the output
icy = fabric.api.local("pip freeze", capture=True)
lines = icy.split('\n')
for line in lines:
datpackage, a, version = line.partition('==')
# Pip freeze always gives == ?
if datpackage == package:
# Correct line, print out a message and add to correct file
print(fabric.colors.green('\tAdding package {} (version {}) to {} requirements'.format(datpackage, version, environment)))
with open('requirements/{}'.format(environment), 'a') as f:
f.write(line)
class PipInstallAlias(PipInstall):
name = 'lpi'
|
scream7/leetcode | algorithms/python/125.py | Python | apache-2.0 | 534 | 0 | class Solution(object):
def isPalindrome(self, s):
"""
:type | s: str
:rtype: bool
"""
i = 0
j = len(s) - 1
while i <= j:
if not (s[i].isalpha() or s[i].isdigit()):
i += 1
elif not (s[j].isalpha() or s[j].isdigit()):
j -= 1
else:
if s[i].lower() != s[j].lower():
return False
else:
i += 1
j -= 1
return Tru | e
|
openstack/watcher | watcher/tests/decision_engine/strategy/strategies/test_saving_energy.py | Python | apache-2.0 | 8,589 | 0 | # -*- encoding: utf-8 -*-
# Copyright (c) 2017 ZTE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from unittest import mock
from watcher.common import clients
from watcher.common import utils
from watcher.decision_engine.strategy import strategies
from watcher.tests.decision_engine.strategy.strategies.test_base \
import TestBaseStrategy
class TestSavingEnergy(TestBaseStrategy):
def setUp(self):
super(TestSavingEnergy, self).setUp()
mock_node1_dict = {
'uuid': '922d4762-0bc5-4b30-9cb9-48ab644dd861'}
mock_node2_dict = {
'uuid': '922d4762-0bc5-4b30-9cb9-48ab644dd862'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.fake_nodes = [mock_node1, mock_node2]
p_ironic = mock.patch.object(
clients.OpenStackClients, 'ironic')
self.m_ironic = p_ironic.start()
self.addCleanup(p_ironic.stop)
p_nova = mock.patch.object(
clients.OpenStackClients, 'nova')
self.m_nova = p_nova.start()
self.addCleanup(p_nova.stop)
self.m_ironic.node.list.return_value = self.fake_nodes
self.m_c_model.return_value = self.fake_c_cluster.generate_scenario_1()
self.strategy = strategies.SavingEnergy(
config=mock.Mock())
self.strategy.input_parameters = utils.Struct()
self.strategy.input_parameters.update(
{'free_used_percent': 10.0,
'min_free_hosts_num': 1})
self.strategy.free_used_percent = 10.0
self.strategy.min_free_hosts_num = 1
self.strategy._ironic_client = self.m_ironic
self.strategy._nova_client = self.m_nova
def test_get_hosts_pool_with_vms_node_pool(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power on'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power off'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 2, 'service': {'host': 'hostname_0'}, 'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 2, 'service': {'host': 'hostname_1'}, 'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
self.strategy.get_hosts_pool()
self.assertEqual(len(self.strategy.with_vms_node_pool), 2)
self.assertEqual(len(self.strategy.free_poweron_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweroff_node_pool), 0)
def test_get_hosts_pool_free_poweron_node_pool(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power on'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power on'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_0'}, 'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_1'}, 'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
self.strategy.get_hosts_pool()
self.assertEqual(len(self.strategy.with_vms_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweron_node_pool), 2)
self.assertEqual(len(self.strategy.free_poweroff_node_pool), 0)
def test_get_hosts_pool_free_poweroff_node_pool(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power off'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power off'}
moc | k_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'ho | stname_0'}, 'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_1'}, 'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
self.strategy.get_hosts_pool()
self.assertEqual(len(self.strategy.with_vms_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweron_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweroff_node_pool), 2)
def test_get_hosts_pool_with_node_out_model(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power off'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power off'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_0'},
'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_10'},
'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
self.strategy.get_hosts_pool()
self.assertEqual(len(self.strategy.with_vms_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweron_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweroff_node_pool), 1)
def test_save_energy_poweron(self):
self.strategy.free_poweroff_node_pool = [
mock.Mock(uuid='922d4762-0bc5-4b30-9cb9-48ab644dd861'),
mock.Mock(uuid='922d4762-0bc5-4b30-9cb9-48ab644dd862')
]
self.strategy.save_energy()
self.assertEqual(len(self.strategy.solution.actions), 1)
action = self.strategy.solution.actions[0]
self.assertEqual(action.get('input_parameters').get('state'), 'on')
def test_save_energy_poweroff(self):
self.strategy.free_poweron_node_pool = [
mock.Mock(uuid='922d4762-0bc5-4b30-9cb9-48ab644dd861'),
mock.Mock(uuid='922d4762-0bc5-4b30-9cb9-48ab644dd862')
]
self.strategy.save_energy()
self.assertEqual(len(self.strategy.solution.actions), 1)
action = self.strategy.solution.actions[0]
self.assertEqual(action.get('input_parameters').get('state'), 'off')
def test_execute(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power on'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power on'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_0'}, 'state |
devassistant/dapp | test/communicator_test_case.py | Python | gpl-2.0 | 3,911 | 0.005369 | import copy
import pytest
from dapp import protocol_version, DAPPBadMsgType, DAPPTimeOut
class CommunicatorTestCase(object):
pv = 'dapp_protocol_version: {0}'.format(protocol_version).encode('utf8')
# a general confirmation message
msg_received_lines = [b'START', b'msg_type: msg_received', pv, b'STOP']
msg_received_dict = {'msg_type': 'msg_received', 'dapp_protocol_version': protocol_version}
# a custom message that can be sent by either client or server
some_msg_lines = [b'START', b'ctxt:', b' foo: bar', b'spam: spam', pv,
b'msg_type: type', b'STOP']
some_msg_dict = {'ctxt': {'foo': 'bar'}, 'spam': 'spam', 'msg_type': 'type',
'dapp_protocol_version': protocol_version}
# client calling a command
c_call_msg_lines = [b'START', b'ctxt:', b' spam: spam', b'msg_type: call_command', pv,
b'command_type: foo', b'command_input: bar', b'STOP']
c_call_msg_dict = {'ctxt': {'spam': 'spam'}, 'msg_type': 'call_command',
'command_type': 'foo', 'command_input': 'bar',
'dapp_protocol_version': protocol_version}
# the 3 below are various responses to c_call_msg_lines
s_no_such_cmd_msg_lines = [b'START', b'ctxt:', b' foo: bar', pv,
b'msg_type: no_such_command', b'STOP']
s_cmd_exc_msg_lines = [b'START', b'ctxt:', b' foo: bar', pv,
b'msg_type: command_exception', b'exception: problem', b'STOP']
s_cmd_ok_msg_lines = [b'START', b'ctxt:', b' spam: spam', b' foo: bar', pv,
b'msg_type: command_result', b'lres: True', b'res: result', b'STOP']
# server telling client to start
s_run_msg_lines = [b'START', b'ctxt:', b' spam: spam', b'msg_type: run', pv, b'STOP']
# client saying that it finished successfully
c_ok_msg_dict = {'ctxt': {'foo': 'bar', 'spam': 'spam'}, 'msg_type': 'finished',
'lres': True, 'res': 'success', 'dapp_protocol_version': protocol_version}
def _read_sent_msg(self, from_pos=0, nbytes=-1):
where = self.wfd.tell()
self.wfd.seek(from_pos)
b = self.wfd.read(nbytes)
self.wfd.seek(where)
return b
def _write_msg(self, msg, seek='where', msg_number=1):
# deepcopy msg so that we don't alter it
msg = copy.deepcopy(msg)
if not isinstance(msg, list):
msg = list(msg.splitlines())
msg.insert(1, b'msg_number: ' + str(msg_number).encode('utf8'))
msg = b'\n'.join(msg)
where = self.lfd.tell()
if not msg.endswith(b'\n'):
msg = msg + b'\n'
self.lfd.write(msg)
if seek == 'where':
self.lfd.seek(where, 0)
elif seek == 'start':
self.lfd.seek(0, 0)
else: # end
self.lfd.seek(0, 2)
def _write_msg_received(self, seek='where', msg_number=1):
self._write_msg(self.msg_received_lines, seek=seek, msg_number=msg_number)
def assert_msg_dict(self, expected, actual, msg_number=1):
expected['msg_number'] = msg_number
assert expected == actual
def test_send_msg(self):
self._write_msg_received(seek='start')
self.c.send_msg('type', ctxt={'foo': 'bar'}, data={'spam': 'spam'})
msg = self._read_sent_msg()
assert set(msg.splitlines()) == set(self.some_msg_lines) | set([b'msg_number: 1'])
def test_recv_msg(self):
self._write_msg(self.some_msg_lines)
msg = self.c.recv_msg()
self.assert_msg_dict(self.some_msg_dict, msg)
def test_recv_msg_wrong_type(self):
# we don't test various malformed messages here; they're checked
# by test_check_loaded_msg in test_general
self._write_msg(self.some_msg_lines)
with pytest.raises(DAPPBadMsgType):
self.c.recv_msg(allowed_types=['foo'])
def test_recv_msg_timeout(self):
with pytest.raises(DAPPTimeOut):
| self.c. | recv_msg(timeout=1)
|
cnamejj/PyProc | regentest/self_status.py | Python | gpl-2.0 | 4,347 | 0.002761 | #!/usr/bin/env python
"""Handle records from /proc/self/status data files"""
import regentest as RG
import ProcHandlers as PH
PFC = PH.ProcFieldConstants
PDC = PH.ProcDataConstants
# ---
# pylint: disable=R0914
def re_self_status(inprecs):
"""Iterate through parsed records and re-generate data file"""
__nametemp = "Name:\t{prog:s}"
__grlisttemp = "{acc:s}{gr:d} "
__tasktemp = "State:\t{st:s}\n\
Tgid:\t{tgid:d}\n\
{ngid:s}\
Pid:\t{pid:d}\n\
PPid:\t{ppid:d}\n\
TracerPid:\t{trpid:d}\n\
Uid:\t{uid:d}\t{euid:d}\t{suid:d}\t{fsuid:d}\n\
Gid:\t{gid:d}\t{egid:d}\t{sgid:d}\t{fsgid:d}\n\
FDSize:\t{fdsize:d}\n\
Groups:\t{grlist:s}"
__memtemp = "VmPeak:\t{peak:8d} kB\n\
VmSize:\t{sz:8d} kB\n\
VmLck:\t{lock:8d} kB\n\
VmPin:\t{pin:8d} kB\n\
VmHWM:\t{hwm:8d} kB\n\
VmRSS:\t{rss:8d} kB\n\
VmData:\t{dat:8d} kB\n\
VmStk:\t{stack:8d} kB\n\
VmExe:\t{exe:8d} kB\n\
VmLib:\t{lib:8d} kB\n\
VmPTE:\t{pte:8d} kB\n\
VmSwap:\t{swap:8d} kB"
__sigtemp = "Threads:\t{thr:d}\n\
SigQ:\t{squeue:s}\n\
SigPnd:\t{pend:s}\n\
ShdPnd:\t{shpend:s}\n\
SigBlk:\t{block:s}\n\
SigIgn:\t{ign:s}\n\
SigCgt:\t{caught:s}"
__captemp = "CapInh:\t{inh:s}\n\
CapPrm:\t{perm:s}\n\
CapEff:\t{eff:s}\n\
CapBnd:\t{bset:s}"
__seccomptemp = "Seccomp:\t{sec:d}"
__ngidtemp = "Ngid:\t{ngid:d}\n"
__cpustemp = "Cpus_allowed:\t{cpus:s}\n\
Cpus_allowed_list:\t{cpus_list:s}\n\
Mems_allowed:\t{mems:s}\n\
Mems_allowed_list:\t{mems_list:s}"
__cswitchtemp = "voluntary_ctxt_switches:\t{vol:d}\n\
nonvoluntary_ctxt_switches:\t{nonvol:d}"
__first = True
__has_seccomp = False
__has_ngid = False
__ngid = ""
for __hilit in inprecs:
__ff = inprecs.field
if __first:
__first = False
__hits = inprecs.hit_order
for __seq in __hits:
if __hits[__seq] == PFC.F_SEC_COMP:
__has_seccomp = True
elif __hits[__seq] == PFC.F_NUMA_GID:
__has_ngid = True
print __nametemp.format(prog=__ff[PFC.F_PROG_NAME])
__grlist = ""
__grs = __ff[PFC.F_GROUPS]
for __off in range(0, len(__grs)):
if __grs[__off] != PDC.NO_GID:
__grlist = __grlisttemp.format(acc=__grlist, gr=__grs[__off])
if __has_ngid:
__ngid = __ngidtemp.format(ngid=__ff[PFC.F_NUMA_GID])
print __tasktemp.format(st=__ff[PFC.F_RUNSTATUS],
tgid=__ff[PFC.F_THREAD_GID], ngid=__ngid, pid=__ff[PFC.F_PID],
ppid=__ff[PFC.F_PPID], trpid=__ff[PFC.F_TRACER_PID],
uid=__ff[PFC.F_UID], euid=__ff[PFC.F_EUID],
suid=__ff[PFC.F_SUID], fsuid=__ff[PFC.F_FSUID],
gid=__ff[PFC.F_GID], egid=__ff[PFC.F_EGID],
sgid=__ff[PFC.F_SGID], fsgid=__ff[PFC.F_FSGID],
fdsize=__ff[PFC.F_FDSIZE], grlist=__grlist)
print __memtemp.format(peak=__ff[PFC.F_VM_PEAK], sz=__ff[PFC.F_VM_SIZE],
lock=__ff[PFC.F_VM_LOCK], pin=__ff[PFC.F_VM_PIN],
hwm=__ff[PFC.F_VM_HWM], rss=__ff[PFC.F_VM_RSS],
dat=__ff[PFC.F_VM_DATA], stack=__ff[PFC.F_VM_STACK],
exe=__ff[PFC.F_VM_EXE], lib=__ff[PFC.F_VM_LIB],
pte=__ff[PFC.F_VM_PTE], swap=__ff[PFC.F_VM_SWAP])
print __sigtemp.format(thr=__ff[PFC.F_THREADS],
squeue=__ff[PFC.F_SIG_QUEUE], pend=__ff[PFC.F_SIG_PEND],
shpend=__ff[PFC.F_SIG_PEND], block=__ff[PFC.F_SIG_BLOCK],
ign=__ff[PFC.F_SIG_IGN], caught=__ff[PFC.F_SIG_CAUGHT])
print __captemp.format(inh=__ff[PFC.F_CAP_INHERIT],
perm=__ff[PFC.F_CAP_PERM], eff=__ff[PFC.F_CAP_EFF],
bset=__ff[PFC.F_CAP_BSET])
if __has_seccomp:
print __seccomptemp.format(sec=__ff[PFC.F_SEC_COMP])
print __cpustemp.format(cpus=__ff[PFC.F_CPU_ALLOW_MASK],
cpus_list=__ff[PFC.F_CPU_ALLOW_LIST],
mems=__ff[PFC.F_MEM_ALLOW_MASK],
mems_list=__ff[PFC.F_MEM_ALLOW_LIST])
print __cswitchtemp.format(vol=__ff[PFC.F_CSWITCH_VOL],
nonvol=__ff[PFC.F_CSWITCH_NONVOL])
# pylint: enable=R0914
| #...+....1.. | ..+....2....+....3....+....4....+....5....+....6....+....7....+....8
RG.RECREATOR[PH.GET_HANDLER("/proc/self/status")] = re_self_status
|
hoangmle/crave-bundle-2015-07-22 | metaSMT/bindings/python/examples/prime_test.py | Python | mit | 1,487 | 0.036315 | #!/usr/bin/python
from metasmt.core import *
from metasmt.operators import *
import random
default_bitwidth = 24
def prime_test(number, solver=boolector_solver(), bitwidth=default_bitwidth):
a = new_bitvector( bitwidth )
b = new_bitvector( bitwidth )
ext = lambda x: zero_extend( bitwidth, x)
## a*b=c (avoiding overflows)
solver.assertion(
logic_equal(
bv_mul( ext(a), ext(b)),
bv_uint(number)[2*bitwidth]
)
)
solver.assertion( logic_nequal( a, bv_uint(1)[bitwidth]) )
solver.assertion( logic_nequal( b, bv_uint(1)[bitwidth]) )
if solver.solve():
return (
solver.read_value(a),
solver.read | _value(b),
)
else:
r | eturn None
def prime_test_operators(number, solver=boolector_solver(), bitwidth=default_bitwidth):
a = new_bitvector( bitwidth )
b = new_bitvector( bitwidth )
ext = lambda x: zero_extend( bitwidth, x)
## a*b=c (avoiding overflows)
solver &= ext(a) * ext(b) == bv_uint(number)[2*bitwidth]
solver &= a != bv_uint(1)[bitwidth]
solver &= b != bv_uint(1)[bitwidth]
solver &= a <= b
if solver.solve():
return ( solver[a], solver[b] )
else:
return None
def main():
while True:
rand = random.randint(2, 2**default_bitwidth-1)
print "%8d" % rand,
result = prime_test_operators( rand )
if result is None:
print "is prime"
break;
else:
print "can be factorized into %8d * %8d" % result
if __name__ == '__main__':
main()
|
danjac/ownblock | ownblock/ownblock/apps/complaints/urls.py | Python | mit | 192 | 0 | from rest_ | framework import routers
from . import views
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'complaints', views.ComplaintVi | ewSet)
urlpatterns = router.urls
|
DavidAndreev/indico | indico/util/suggestions.py | Python | gpl-3.0 | 6,250 | 0.0032 | # This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import division, unicode_literals
from collections import defaultdict
from datetime import date, timedelta
from sqlalchemy.orm import joinedload, load_only
from indico.modules.events import Event
from indico.modules.events.contributions.util import get_events_with_linked_contributions
from indico.modules.events.registration.util import get_events_registered
from indico.modules.events.surveys.util import get_events_with_submitted_surveys
from indico.util.date_time import now_utc, utc_to_server
from indico.util.redis import avatar_links
from indico.util.struct.iterables import window
def _get_blocks(events, attended):
blocks = []
block = []
for event in events:
if event not in attended:
if block:
blocks.append(block)
block = []
continue
block.append(event)
if block:
blocks.append(block)
return blocks
def _query_categ_events(categ, start_dt, end_dt):
return (Event.query
.with_parent(categ)
.filter(Event.happens_between(start_dt, end_dt))
.options(load_only('id', 'start_dt', 'end_dt')))
def _get_category_score(user, categ, attended_events, debug=False):
if debug:
print repr(categ)
# We care about events in the whole timespan where the user attended some events.
# However, this might result in some missed events e.g. if the user was not working for
# a year and then returned. So we throw away old blocks (or rather adjust the start time
# to the start time of the newest block)
first_event_date = attended_events[0].start_dt.replace(hour=0, minute=0)
last_event_date = attended_events[-1].start_dt.replace(hour=0, minute=0) + timedelta(days=1)
blocks = _get_blocks(_query_categ_events(categ, first_event_date, last_event_date), attended_events)
for a, b in window(blocks):
# More than 3 months between blocks? Ignore the old block!
if b[0].start_dt - a[-1].start_dt > timedelta(weeks=12):
first_event_date = b[0].start_dt.replace(hour=0, minute=0)
# Favorite categories get a higher base score
score = int(categ in user.favorite_categories)
if debug:
print '{0:+.3f} - initial'.format(score)
# Attendance percentage goes to the score directly. If the attendance is high chances are good that the user
# is either very interested in whatever goes on in the category or it's something he has to attend regularily.
total = _query_categ_events(categ, first_event_date, last_event_date).count()
if total:
attended_block_event_count = sum(1 for e in attended_events if e.start_dt >= first_event_date)
score += attended_block_event_count / total
if debug:
print '{0:+.3f} - attendance'.format(score)
# If there are lots/few unattended events after the last attended one we also update the score with that
total_after = _query_categ_events(categ, last_event_date + timedelta(days=1), None).count()
if total_after < total * 0.05:
score += 0.25
elif total_after > total * 0.25:
score -= 0.5
if debug:
print '{0:+.3f} - unattended new events'.format(score)
# Lower the score based on how long ago the last attended event was if there are no future events
# We start applying this modifier only if the event has been more than 40 days in the past to avoid
# it from happening in case of monthly events that are not created early enough.
days_since_last_event = (date.today() - last_event_date.date()).days
if days_since_last_event > 40:
score -= 0.025 * days_since_last_event
if debug:
print '{0:+.3f} - days since last event'.format(score)
# For events in the future however we raise the score
now_local = utc_to_server(now_utc())
attending_future = (_query_categ_events(categ, now_local, last_event_date)
| .filter(Event.id.in_(e.id for e in attended_events))
.all())
if attending_future:
score += 0.25 * len(attending_future)
if debug:
print '{0:+.3f} - future event count'.format(score)
days_to_future_event = (attending_future[0].start_dt.date() - date.today()).days
score += max(0.1, -(max(0, days_to_future_event - 2) / 4) ** (1 / 3) + 2.5)
if debug:
| print '{0:+.3f} - days to next future event'.format(score)
return score
def get_category_scores(user, debug=False):
# XXX: check if we can add some more roles such as 'contributor' to assume attendance
event_ids = set()
event_ids.update(int(id_)
for id_, roles in avatar_links.get_links(user).iteritems()
if 'abstract_submitter' in roles)
event_ids.update(id_
for id_, roles in get_events_with_linked_contributions(user).iteritems()
if 'contribution_submission' in roles)
event_ids |= get_events_registered(user)
event_ids |= get_events_with_submitted_surveys(user)
attended = (Event.query
.filter(Event.id.in_(event_ids), ~Event.is_deleted)
.options(joinedload('category'))
.order_by(Event.start_dt, Event.id)
.all())
categ_events = defaultdict(list)
for event in attended:
categ_events[event.category].append(event)
return dict((categ, _get_category_score(user, categ, events, debug))
for categ, events in categ_events.iteritems())
|
stackforge/networking-bagpipe-l2 | networking_bagpipe/tests/unit/agent/base.py | Python | apache-2.0 | 12,322 | 0 | # Copyright (c) 2015 Orange.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from unittest import mock
from oslo_utils import uuidutils
from networking_bagpipe.agent import bagpipe_bgp_agent
fro | m networking_bagpipe.agent.bgpvpn import constants as bgpv | pn_const
from networking_bagpipe.bagpipe_bgp import constants as bbgp_const
from neutron.plugins.ml2.drivers.linuxbridge.agent.common \
import constants as lnx_agt_constants
from neutron.plugins.ml2.drivers.linuxbridge.agent \
import linuxbridge_neutron_agent as lnx_agt
from neutron.plugins.ml2.drivers.openvswitch.agent.common \
import constants as ovs_agt_constants
from neutron.plugins.ml2.drivers.openvswitch.agent \
import ovs_agent_extension_api as ovs_ext_agt
from neutron.plugins.ml2.drivers.openvswitch.agent import vlanmanager
from neutron.tests import base
from neutron.tests.unit.plugins.ml2.drivers.openvswitch.agent \
import ovs_test_base
PORT10_ID = uuidutils.generate_uuid()
PORT10 = {'id': PORT10_ID,
'mac_address': '00:00:de:ad:be:ef',
'ip_address': '10.0.0.2'}
PORT11 = {'id': uuidutils.generate_uuid(),
'mac_address': '00:00:de:ad:f0:0d',
'ip_address': '10.0.0.3'}
NETWORK1 = {'id': uuidutils.generate_uuid(),
'gateway_ip': '10.0.0.1',
'segmentation_id': '101'}
PORT20 = {'id': uuidutils.generate_uuid(),
'mac_address': '00:00:de:ad:be:ef',
'ip_address': '20.0.0.2'}
PORT21 = {'id': uuidutils.generate_uuid(),
'mac_address': '00:00:de:ad:f0:0d',
'ip_address': '20.0.0.3'}
NETWORK2 = {'id': uuidutils.generate_uuid(),
'gateway_ip': '20.0.0.1',
'segmentation_id': '202'}
ROUTER1 = {'id': uuidutils.generate_uuid()}
port_2_net = {
PORT10['id']: NETWORK1,
PORT11['id']: NETWORK1,
PORT20['id']: NETWORK2,
PORT21['id']: NETWORK2,
}
LOCAL_VLAN_MAP = {
NETWORK1['id']: 31,
NETWORK2['id']: 52
}
BGPVPN_L2_RT10 = {'route_targets': ['BGPVPN_L2:10'],
'import_targets': [],
'export_targets': []
}
BGPVPN_L2_RT20 = {'route_targets': ['BGPVPN_L2:20'],
'import_targets': [],
'export_targets': []
}
BGPVPN_L3_RT100 = {'route_targets': ['BGPVPN_L3:100'],
'import_targets': [],
'export_targets': []
}
BGPVPN_L3_RT200 = {'route_targets': ['BGPVPN_L3:200'],
'import_targets': [],
'export_targets': []
}
class DummyPort(object):
def __init__(self, network, port, bgpvpn_port=False,
evpn=None, ipvpn=None):
self.id = port['id']
self.network_id = network['id']
self.mac_address = port['mac_address']
self.ip_address = port['ip_address']
self.gateway_ip = network['gateway_ip']
if bgpvpn_port:
if evpn:
self.l2vpn = copy.deepcopy(evpn)
if ipvpn:
self.l3vpn = copy.deepcopy(ipvpn)
else:
if evpn:
self.evpn = copy.deepcopy(evpn)
if ipvpn:
self.ipvpn = copy.deepcopy(ipvpn)
class DummyVif(object):
def __init__(self, ofport, port_name):
self.ofport = ofport
self.port_name = port_name
class DummyBGPVPN(object):
def __init__(self, network, l2vpn=None, l3vpn=None, gateway_mac=None):
self.id = uuidutils.generate_uuid()
self.network_id = network['id']
if l2vpn:
self.l2vpn = copy.deepcopy(l2vpn)
if l3vpn:
self.l3vpn = copy.deepcopy(l3vpn)
if gateway_mac:
self.gateway_mac = gateway_mac
class RTList(list):
def __eq__(self, other):
return set(self) == set(other)
class BaseTestAgentExtension(object):
agent_extension_class = None
DUMMY_VIF10 = None
DUMMY_VIF11 = None
DUMMY_VIF20 = None
DUMMY_VIF21 = None
def setUp(self):
self.mocked_bagpipe_agent = mock.Mock(
spec=bagpipe_bgp_agent.BaGPipeBGPAgent
)
self.mocked_bagpipe_agent.do_port_plug = mock.Mock()
self.mocked_bagpipe_agent.do_port_plug_refresh = mock.Mock()
patcher = mock.patch('networking_bagpipe.agent.bagpipe_bgp_agent.'
'BaGPipeBGPAgent.get_instance',
return_value=self.mocked_bagpipe_agent)
patcher.start()
self.addCleanup(patcher.stop)
self.agent_ext = self.agent_extension_class()
self.connection = mock.Mock()
def _port_data(self, port, delete=False, admin_state_up=True):
data = {
'port_id': port['id']
}
if not delete:
data.update({
'port_id': port['id'],
'admin_state_up': admin_state_up,
'network_id': port_2_net[port['id']]['id'],
'segmentation_id': port_2_net[port['id']]['segmentation_id'],
'network_type': 'vxlan',
'device_owner': 'compute:None',
'mac_address': port['mac_address'],
'fixed_ips': [
{
'ip_address': port['ip_address'],
}
]
})
return data
def _get_expected_local_port(self, bbgp_vpn_type, network_id, port_id,
detach=False):
raise NotImplementedError
def _check_network_info(self, network_id, expected_size,
vpn_type=None, vpn_rts=None):
if expected_size == 0:
self.assertNotIn(network_id, self.agent_ext.networks_info,
"Network %s expected to have no ports left"
% network_id)
else:
self.assertIn(network_id, self.agent_ext.networks_info)
network_info = self.agent_ext.networks_info[network_id]
self.assertEqual(len(network_info.ports), expected_size,
"Network ports size not as expected")
class BaseTestLinuxBridgeAgentExtension(base.BaseTestCase,
BaseTestAgentExtension):
driver_type = lnx_agt_constants.EXTENSION_DRIVER_TYPE
def setUp(self):
base.BaseTestCase.setUp(self)
BaseTestAgentExtension.setUp(self)
agent_extension_api = mock.Mock()
self.agent_ext.consume_api(agent_extension_api)
self.agent_ext.initialize(self.connection,
lnx_agt_constants.EXTENSION_DRIVER_TYPE)
patcher = mock.patch('neutron.agent.linux.ip_lib.device_exists',
return_value=True)
patcher.start()
self.addCleanup(patcher.stop)
def _get_expected_local_port(self, bbgp_vpn_type, network_id, port_id,
detach=False):
linuxbr = lnx_agt.LinuxBridgeManager.get_bridge_name(network_id)
if bbgp_vpn_type == bbgp_const.EVPN:
r = {
'linuxbr': linuxbr,
'local_port': {
'linuxif': lnx_agt.LinuxBridgeManager.get_tap_device_name(
port_id)
}
}
if detach:
del r['linuxbr']
return r
else: # if bbgp_const.IPVPN:
return {
'local_port': {
'linuxif': linuxbr
}
}
PATCH_INT_TO_MPLS = 5
PATCH_INT_TO_TUN = 7
PATCH_TUN_TO_MPLS = 1
PATCH |
domino14/Webolith | djAerolith/flashcards/urls.py | Python | gpl-3.0 | 157 | 0 | from django.conf.urls import url
from flashcards.views import main, new_quiz
urlpatterns = [
url(r'^$', main),
url(r'^api/new_quiz$', new_quiz), |
]
| |
ZeitOnline/zeit.content.text | src/zeit/content/text/testing.py | Python | bsd-3-clause | 140 | 0 | import zeit.cms.testing
ZCML_LAYER = zeit.cms.testing.ZCMLL | ayer(
'ftesting.zcml', product_config=zeit.cms.testing.c | ms_product_config)
|
pmrowla/p101stat | tests/conftest.py | Python | bsd-3-clause | 969 | 0 | # -*- coding: utf-8 -*-
"""Defines fixtures available to all tests."""
import py | test
from webtest import TestApp
from p101stat.app import create_app
from p101stat.database import db as _db
from p101stat.settings import TestConfig
from .factories import IdolFactory
@pytest.yield_fixture(scope='functio | n')
def app():
"""An application for the tests."""
_app = create_app(TestConfig)
ctx = _app.test_request_context()
ctx.push()
yield _app
ctx.pop()
@pytest.fixture(scope='function')
def testapp(app):
"""A Webtest app."""
return TestApp(app)
@pytest.yield_fixture(scope='function')
def db(app):
"""A database for the tests."""
_db.app = app
with app.app_context():
_db.create_all()
yield _db
# Explicitly close DB connection
_db.session.close()
_db.drop_all()
@pytest.fixture
def idol(db):
"""A idol for the tests."""
idol = IdolFactory()
db.session.commit()
return idol
|
Avocarrot/i2o | manage.py | Python | mit | 246 | 0 | #!/ | usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "i2o.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| |
nuigroup/pymt-widgets | pymt/ui/window/win_glut.py | Python | lgpl-3.0 | 4,290 | 0.000932 | '''
Window GLUT: windowing provider based on GLUT
'''
__all__ = ('MTWindowGlut', )
import sys
import os
from pymt.ui.window import BaseWindow
from pymt.logger import pymt_logger
from pymt.base import stopTouchApp, getEventLoop
from OpenGL.GLUT import GLUT_RGBA, GLUT_DOUBLE, GLUT_ALPHA, GLUT_DEPTH, \
GLUT_MULTISAMPLE, GLUT_STENCIL, GLUT_ACCUM, GLUT_RIGHT_BUTTON, \
GLUT_DOWN, GLUT_ACTIVE_CTRL, GLUT_ACTIVE_ALT, GLUT_ACTIVE_SHIFT, \
glutInitDisplayMode, glutInit, glutCreateWindow, glutReshapeWindow, \
glutMouseFunc, glutMouseFunc, glutKeyboardFunc, glu | tShowWindow, \
glutFullScreen, glutDestroyWindow, glutReshapeFunc, glutDisplayFunc, \
glutMotionFunc, glutGetModifiers, glutSwapBuffers, glutPostRedisplay, \
glutMainLoop
class MTWindowGlut(BaseWindow):
__glut_window = None
def create_window(self, params):
if self.__gl | ut_window is None:
# init GLUT !
pymt_logger.debug('WinGlut: GLUT initialization')
glutInit('')
if 'PYMT_GLUT_UNITTEST' in os.environ:
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE)
else:
glutInitDisplayMode(
GLUT_RGBA | GLUT_DOUBLE | GLUT_ALPHA | GLUT_DEPTH |
GLUT_MULTISAMPLE | GLUT_STENCIL | GLUT_ACCUM)
# create the window
self.__glut_window = glutCreateWindow('pymt')
# register all callbcaks
glutReshapeFunc(self._glut_reshape)
glutMouseFunc(self._glut_mouse)
glutMotionFunc(self._glut_mouse_motion)
glutKeyboardFunc(self._glut_keyboard)
# update window size
glutShowWindow()
self.size = params['width'], params['height']
if params['fullscreen']:
pymt_logger.debug('WinGlut: Set window to fullscreen mode')
glutFullScreen()
super(MTWindowGlut, self).create_window(params)
def close(self):
if self.__glut_window:
glutDestroyWindow(self.__glut_window)
self.__glut_window = None
super(MTWindowGlut, self).close()
def on_keyboard(self, key, scancode=None, unicode=None):
self._glut_update_modifiers()
if ord(key) == 27:
stopTouchApp()
return True
super(MTWindowGlut, self).on_keyboard(key, scancode, unicode)
def _set_size(self, size):
if super(MTWindowGlut, self)._set_size(size):
glutReshapeWindow(*size)
return True
size = property(BaseWindow._get_size, _set_size)
def flip(self):
glutSwapBuffers()
super(MTWindowGlut, self).flip()
def mainloop(self):
'''Main loop is done by GLUT itself.'''
# callback for ticking
def _glut_redisplay():
evloop = getEventLoop()
# hack, glut seem can't handle the leaving on the mainloop
# so... leave with sys.exit() :[
try:
evloop.idle()
except KeyboardInterrupt:
evloop.quit = True
if evloop.quit:
sys.exit(0)
glutPostRedisplay()
# install handler
glutDisplayFunc(_glut_redisplay)
# run main loop
glutMainLoop()
#
# GLUT callbacks
#
def _glut_reshape(self, w, h):
self.size = w, h
def _glut_mouse(self, button, state, x, y):
self._glut_update_modifiers()
btn = 'left'
if button == GLUT_RIGHT_BUTTON:
btn = 'right'
if state == GLUT_DOWN:
self.dispatch_event('on_mouse_down', x, y, btn, self.modifiers)
else:
self.dispatch_event('on_mouse_up', x, y, btn, self.modifiers)
def _glut_mouse_motion(self, x, y):
self.dispatch_event('on_mouse_move', x, y, self.modifiers)
def _glut_keyboard(self, key, x, y):
self.dispatch_event('on_keyboard', key, None, None)
def _glut_update_modifiers(self):
self._modifiers = []
mods = glutGetModifiers()
if mods & GLUT_ACTIVE_SHIFT:
self._modifiers.append('shift')
if mods & GLUT_ACTIVE_ALT:
self._modifiers.append('alt')
if mods & GLUT_ACTIVE_CTRL:
self._modifiers.append('ctrl')
|
NeuralEnsemble/NeuroTools | test/test_plotting.py | Python | gpl-2.0 | 5,059 | 0.008697 | """
Unit tests for the NeuroTools.plotting module
"""
import matplotlib
matplotlib.use('Agg')
import unittest
from NeuroTools import plotting
import pylab
import os
class PylabParamsTest(unittest.TestCase):
def runTest(self):
# define arbitrary values
fig_width_pt = 123.4
ratio = 0.1234
text_fontsize = 10
tick_labelsize = 8
useTex = False
inches_per_pt = 1.0/72.27 # Convert pt to inch
fig_width = fig_width_pt*inches_per_pt # width in inches
fig_height = fig_width*ratio # height in inches
testDict = {
'axes.labelsize' : text_fontsize,
'font.size' : text_fontsize,
'xtick.labelsize' : tick_labelsize,
'ytick.labelsize' : tick_labelsize,
'text.usetex' : useTex,
'figure.figsize' : [fig_width, fig_height]}
plotting.set_pylab_params(fig_width_pt=fig_width_pt, ratio=ratio,
text_fontsize=text_fontsize,
tick_labelsize=tick_labelsize, useTex=useTex)
for k in testDict.keys():
assert pylab.rcParams.has_key(k)
assert pylab.rcParams[k] == testDict[k]
class GetDisplayTest(unittest.TestCase):
def runTest(self):
a = plotting.get_display(True)
assert a != None
a = plotting.get_display(False)
assert a == None
a = plotting.get_display(1234)
assert a == 1234
class ProgressBarTest(unittest.TestCase):
def runTest(self):
import time
print '\nINFO: Testing progress bar...'
for i in range(100):
plotting.progress_bar(i/100.)
time.sleep(.01)
print '\n'
class Save2DImageTest(unittest.TestCase):
def runTest(self):
import numpy
mat = numpy.random.random([50,50])
filename = 'deleteme.png'
if os.path.exists(filename): os.remove(filename)
plotting.save_2D_image(mat, filename)
assert os.path.exists(filename)
os.remove(filename)
class Save2DMovieTest(unittest.TestCase):
def runTest(self):
import numpy
frames = []
duration = 0.1
for i in rang | e(10):
frames.append(numpy.random.randint(0,255,[10,10]))
filename = 'deleteme.zip'
if os.path.exists(filename): os.remove(filename)
plotting.save_2D_movie(frames, filename, duration)
| assert os.path.exists(filename)
os.remove(filename)
class SetLabelsTest(unittest.TestCase):
def runTest(self):
f = plotting.get_display(True)
x = range(10)
p = pylab.plot(x)
plotting.set_labels(pylab, 'the x axis', 'the y axis')
# set up a SimpleMultiplot with arbitrary values
self.nrows = 1
self.ncolumns = 1
title = 'testMultiplot'
xlabel = 'testXlabel'
ylabel = 'testYlabel'
scaling = ('linear','log')
self.smt = plotting.SimpleMultiplot(nrows=self.nrows, ncolumns=self.ncolumns, title=title, xlabel=xlabel, ylabel=ylabel, scaling=scaling)
plotting.set_labels(self.smt.panel(0), 'the x axis', 'the y axis')
class SetAxisLimitsTest(unittest.TestCase):
def runTest(self):
f = plotting.get_display(True)
x = range(10)
pylab.plot(x)
plotting.set_axis_limits(pylab, 0., 123., -123., 456.)
# set up a SimpleMultiplot with arbitrary values
self.nrows = 1
self.ncolumns = 1
title = 'testMultiplot'
xlabel = 'testXlabel'
ylabel = 'testYlabel'
scaling = ('linear','log')
self.smt = plotting.SimpleMultiplot(nrows=self.nrows, ncolumns=self.ncolumns, title=title, xlabel=xlabel, ylabel=ylabel, scaling=scaling)
plotting.set_axis_limits(self.smt.panel(0), 0., 123., -123., 456.)
class SimpleMultiplotTest(unittest.TestCase):
def setUp(self):
# define arbitrary values
self.nrows = 4
self.ncolumns = 5
title = 'testMultiplot'
xlabel = 'testXlabel'
ylabel = 'testYlabel'
scaling = ('linear','log')
self.smt = plotting.SimpleMultiplot(nrows=self.nrows, ncolumns=self.ncolumns, title=title, xlabel=xlabel, ylabel=ylabel, scaling=scaling)
class SimpleMultiplotSaveTest(SimpleMultiplotTest):
def runTest(self):
filename = "deleteme.png"
if os.path.exists(filename): os.remove(filename)
self.smt.save(filename)
assert os.path.exists(filename)
os.remove(filename)
class SimpleMultiplotSetFrameTest(SimpleMultiplotTest):
def runTest(self):
numPanels = self.nrows * self.ncolumns
boollist = [True,False,False,True]
for i in range(numPanels):
ax_indexed = self.smt.panel(i)
ax_next = self.smt.next_panel()
assert ax_indexed == ax_next
self.smt.set_frame(ax_indexed,boollist,linewidth=4)
if __name__ == "__main__":
unittest.main()
|
miltonsarria/dsp-python | informatica/class_python/ejemplo_qt_server.py | Python | mit | 2,390 | 0.017573 | import os
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import numpy as np
import threading
import time
##########################################################
##########################################################
##########################################################
########### main window frame ###########################
##########################################################
class ServForm(QMainWindow):
def __init__(self, paren | t=None):
QMainWindow.__init__(self, parent)
self.setWindowTitle('Main: ventana principal')
self.create_main_frame | ()
#function when close event.... ask yes or no?
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message',
"Exit?", QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
#function apply layout, crear el entorno completo (lo visual)
def create_main_frame(self):
self.main_frame = QWidget()
self.label_port = QLabel("Puerto: ")
self.edit_port = QLineEdit()
self.start_button = QPushButton("Iniciar servidor")
self.dialog =QTextEdit()
self.edit_line = QLineEdit()
self.send_button = QPushButton("Enviar")
hbox1 = QHBoxLayout()
for w in [ self.label_port, self.edit_port, self.start_button]:
hbox1.addWidget(w)
hbox1.setAlignment(w, Qt.AlignVCenter)
hbox2 = QHBoxLayout()
for w in [ self.edit_line, self.send_button]:
hbox2.addWidget(w)
hbox2.setAlignment(w, Qt.AlignVCenter)
vbox = QVBoxLayout()
vbox.addLayout(hbox1)
vbox.addWidget(self.dialog)
vbox.addLayout(hbox2)
self.main_frame.setLayout(vbox)
self.setCentralWidget(self.main_frame)
##########################################################
##########################################################
def main():
app = QApplication(sys.argv)
form = ServForm()
form.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
#self.axes2.spines['right'].set_visible(False)
|
Summerotter/furryyellowpages | app/__init__.py | Python | mit | 1,090 | 0.00367 | from flask import Flask
from flask.ext.bootstrap import Bootstrap
from flask.ext.mail import Mail
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.pagedown import PageDown
from config import config
import os
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
confi | g[config_name].init_app(app)
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
from .main import main as | main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
return app
#
#app = create_app(os.getenv('FLASK_CONFIG') or 'default') |
marqsm/LED-bot | LEDBot/emojiHandler.py | Python | mit | 1,723 | 0.002322 | import pickle
from os.path import dirname, exists, join
import re
HERE = dirname(__file__)
class Emoji():
def __init__(self):
self.emoji_directory = dict()
self._pickle_path = join(HERE, 'emoji-dict.pickle')
self.init()
def init(self):
if exists(self._pickle_path) and self.load(self._pickle_path):
print("Emoji dictionary re | ady.")
e | lse:
self.create_dict(self.load_emoji_names())
self.dump(self._pickle_path)
print("Created new pickle file with emoji dictionary.")
def load_emoji_names(self):
with open(join(HERE, 'emoji.txt')) as f:
return f.read().splitlines()
def create_dict(self, emoji_names):
emoji_d = dict()
for emoji in emoji_names:
emoji_img = emoji.strip(':') + ".png"
emoji_d[emoji] = join(HERE, 'emoji',emoji_img)
self.emoji_directory = emoji_d
def load(self, filename):
with open(filename, "rb") as f:
try:
self.emoji_directory = pickle.load(f)
print("Pickle load was successful.")
return True
except:
print("Loading emoji dictionary failed.")
return False
def dump(self, filename):
try:
with open(filename, "w+") as f:
pickle.dump(self.emoji_directory, f)
print("Pickle dump was successful.")
return True
except:
print("Could not dump.")
return False
def check_emoji(self, emoji_name):
# pattern to match emoji
pattern = "^:[a-zA-Z0-9_]*:$"
return re.match(pattern, emoji_name)
|
mythkiven/python | demo/CET查询/learn_images.py | Python | mit | 1,312 | 0 | # coding: utf-8
"""
labeled_images文件夹中:
1. 包含的文件夹名为标记名
2. 标记名下的文件夹中包含了学习图片
"""
import os
from sklearn import svm
from PIL import Image
from numpy import array
from utils import *
clf = None
def get_image_fit_data(dir_name):
"""读取labeled_images文件夹的图片,返回图片的特征矩阵及相应标记"""
X = []
Y = []
name_list = os.listdir(dir_name)
for name in name_list:
if not os.path.isdir(os.path.join(dir_name, name)):
continue
image_files = os.listdir(os.path.join(dir_name, name))
for img in image_files: |
i = Image.open(os.path.join(dir_name, name, img))
X.append(array(i).flatten())
Y.append(name)
return X, Y
def get_classifier_from_learn():
"""学习数据获取分类器"""
global clf
if not clf:
clf = svm.SVC()
X, Y = get_image_fit_data("labeled_images")
clf.fit(X, Y)
return clf
def main():
clf = get_class | ifier_from_learn()
print(clf)
PX, PY = get_image_fit_data("predict_images")
for x, y in zip(PX, PY):
r = clf.predict(x.reshape(1, -1))
print(r, y)
if __name__ == '__main__':
main()
|
reimandlab/ActiveDriverDB | website/search/gene.py | Python | lgpl-2.1 | 9,332 | 0.000107 | from abc import ABC, abstractmethod
from collections import defaultdict
from Levenshtein import distance
from sqlalchemy import and_
from werkzeug.utils import cached_property
from models import Protein, UniprotEntry, ProteinReferences
from models import Gene
from database import db
class GeneMatch:
def __init__(self, gene=None, scored_matches=None, matched_isoforms=None):
self.gene = gene
self.matches = scored_matches or {}
self.matched_isoforms = matched_isoforms or []
@classmethod
def from_feature(cls, gene, matched_feature, match_score, matched_isoforms=None):
return cls(gene, {matched_feature: match_score}, matched_isoforms)
@property
def best_score(self):
"""Score is based on edit distance. Less is better."""
return min(self.matches.values())
def __iadd__(self, other):
if not self.gene:
self.gene = other.gene
assert self.gene == other.gene
for feature, score in other.matches.items():
if feature in self.matches:
self.matches[feature] = min(self.matches[feature], score)
else:
self.matches[feature] = score
self.matched_isoforms.extend(other.matched_isoforms)
return self
def __getattr__(self, key):
return getattr(self.gene, key)
class GeneOrProteinSearch(ABC):
def __init__(self, options=None):
self.options = options
@property
@abstractmethod
def name(self):
"""Internal name; also a base for pretty name shown to the user."""
pass
@cached_property
def pretty_name(self):
return self.name.replace('_', ' ').title()
@abstractmethod
def search(self, phrase, sql_filters=None, limit=None):
pass
@property
def base_query(self):
return Gene.query
@property
def query(self):
query = self.base_query
if self.options:
query = query.options(self.options)
return query
class GeneSearch(GeneOrProteinSearch):
@property
@abstractmethod
def feature(self):
"""Name of the feature analysed by this GeneSearch."""
return ''
def get_feature(self, gene):
return getattr(gene, self.feature)
def search(self, phrase, sql_filters=None, limit=None):
"""Perform look up for a gene using provided phrase.
The default implementation uses `get_feature`
to perform search using the defined feature.
If isoform-level filters are applied, these will
be executed on the preferred_isoform of gene.
"""
feature = self.get_feature(Gene)
filters = [feature.like(phrase.strip() + '%')]
if sql_filters:
filters += sql_filters
orm_query = (
self.query
.join(Protein, Gene.preferred_isoform) # to allow PTM filter
.filter(and_(*filters))
)
if limit:
orm_query = orm_query.limit(limit)
return [
GeneMatch.from_feature(gene, self, self.sort_key(gene, phrase))
for gene in orm_query
]
def sort_key(self, gene, phrase):
return distance(self.get_feature(gene), phrase)
class SymbolGeneSearch(GeneSearch):
"""Look up a gene by HGNC symbol
Targets: Gene.name
Example:
search for "TP53" should return TP53 (among others)
"""
name = 'gene_symbol'
feature = 'name'
class GeneNameSearch(GeneSearch):
"""Look up a gene by full name, defined by HGNC
Targets: Gene.full_name
Example:
search for "tumour protein" should return TP53 (among others)
"""
name = 'gene_name'
feature = 'full_name'
class ProteinSearch(GeneOrProteinSearch):
"""Looks up a gene, based on a feature of its isoforms.
The matched isoforms are recorded in GeneMatch object.
"""
def create_query(
self, limit, filters, sql_filters, entities=(Gene, Protein),
add_joins=lambda query: query
):
if sql_filters:
filters += sql_filters
genes = (
add_joins(
self.query
.join(Protein, Gene.isoforms)
)
.filter(and_(*filters))
.group_by(Gene)
)
if limit:
genes = genes.limit(limit)
genes = genes.subquery('genes')
query = (
add_joins(
db.session.query(*entities)
.select_from(Gene)
.join(Protein, Gene.isoforms)
)
.filter(and_(*filters))
.filter(Gene.id == genes.c.id)
)
if self.options:
query = query.options(self.options)
return query
@staticmethod
@abstractmethod
def sort_key(result, phrase):
pass
def parse_matches(self, query, phrase):
matches = []
# aggregate by genes
isoforms_by_gene = defaultdict(set)
for gene, isoform in query:
isoforms_by_gene[gene].add(isoform)
for gene, isoforms in isoforms_by_gene.items():
match = GeneMatch.from_feature(
gene,
self,
self.best_score(isoforms, phrase),
matched_isoforms=isoforms
)
matches.append(match)
return matches
def best_score(self, results, phrase):
return min(
self.sort_key(isoform, phrase)
for isoform in results
)
class ProteinNameSearch(ProteinSearch):
name = 'protein_name'
def search(self, phrase, sql_filters=None, limit=None):
filters = [Protein.full_name.ilike(phrase + '%')]
query = self.create_query(limit, filters, sql_filters)
return self.parse_matche | s(query, phrase)
@staticmethod
def sort_key(isoform, phrase):
return distance(isoform.full_name, phrase)
class RefseqGeneSearch(ProteinSearch):
"""Look up a gene by isoforms RefSeq.
Only numeric phrases and phrases starting with:
"NM_" or "nm_" will be evaluated. |
Targets: Protein.refseq
Example:
search for "NM_00054" should return: TP53 [with matched
isoforms = Protein(refseq=NM_000546)] (among others)
"""
name = 'refseq'
pretty_name = 'RefSeq'
def search(self, phrase, sql_filters=None, limit=None):
if phrase.isnumeric():
phrase = 'NM_' + phrase
if not (phrase.startswith('NM_') or phrase.startswith('nm_')):
return []
filters = [Protein.refseq.like(phrase + '%')]
query = self.create_query(limit, filters, sql_filters)
return self.parse_matches(query, phrase)
@staticmethod
def sort_key(isoform, phrase):
return distance(isoform.refseq, phrase)
class SummarySearch(ProteinSearch):
"""Look up a gene by summary of isoforms.
This is full-text search and may be expensive.
Targets: Protein.summary
"""
name = 'summary'
def __init__(self, options=None, minimal_length=3):
super().__init__(options)
self.minimal_length = minimal_length
def search(self, phrase, sql_filters=None, limit=None):
if len(phrase) < self.minimal_length:
return []
filters = [Protein.summary.ilike('%' + phrase + '%')]
query = self.create_query(limit, filters, sql_filters)
return self.parse_matches(query, phrase)
@staticmethod
def sort_key(isoform, phrase):
return distance(isoform.summary, phrase)
class UniprotSearch(ProteinSearch):
"""Look up a gene by isoforms Uniprot accession.
Only phrases longer than 2 characters will be evaluated.
Targets: Protein.external_references.uniprot_entries
"""
name = 'uniprot'
def search(self, phrase, sql_filters=None, limit=None):
if len(phrase) < 3:
return []
filters = [UniprotEntry.accession.like(phrase + '%')]
def add_joins(q):
return (
q
.join(ProteinReferences)
.join(ProteinReferences.uniprot_as |
klmitch/jenkins-job-builder | tests/modules/test_helpers.py | Python | apache-2.0 | 2,542 | 0 | #
# Copyright (c) 2016 Kien Ha <kienha9922@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from testtools.matchers import Equals
import xml.etree.ElementTree as XML
import yaml
from jenkins_jobs.errors import MissingAttributeError
from jenkins_jobs.modules.helpers import convert_mapping_to_xml
from tests.base import LoggingFixture
class TestCaseTestHelpers(LoggingFixture, testtools.TestCase):
def test_convert_mapping_to_xml(self):
"""
Tests the test_convert_mapping_to_xml_fail_required function
"""
# Test default values
default_root = XML.Element('testdefault')
default_data = yaml.load("string: hello")
default_mappings = [('default-string', 'defaultString', 'default')]
convert_mapping_to_xml(
default_root,
default_data,
default_mappings,
fail_required=True)
result = default_root.find('defaultString').text
self.assertThat(result, Equals('default'))
# Test user input
user_input_root = XML.Element('testUserInput')
user_input_data | = yaml.load("user-input-string: hello")
user_input_mappings = [('user-input-string', 'userInputString',
'user-input')]
convert_mapping_to_xml(
user_input_root,
user_input_data,
user_input_mappings,
fail_required=True)
result = user_input_root.find('userInputString').text
self.assertThat( | result, Equals('hello'))
# Test missing required input
required_root = XML.Element('testrequired')
required_data = yaml.load("string: hello")
required_mappings = [('required-string', 'requiredString', None)]
self.assertRaises(MissingAttributeError,
convert_mapping_to_xml,
required_root,
required_data,
required_mappings,
fail_required=True)
|
SINGROUP/pycp2k | pycp2k/classes/_each323.py | Python | lgpl-3.0 | 1,114 | 0.001795 | from pycp2k.inputsection import InputSection
class _each323(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Just_energy = None
self.Powell_opt = None
self.Qs_scf = None
self.Xas_scf = None
self.Md = None
self.Pint = None
self.Metadynamics = None
self.Geo_opt = None
self.Rot_opt = None
self.Cell_opt = Non | e
self.Band = None
self.Ep_lin_solver = None
self.Spline_find_coeffs = None
self.Replica_eval = None
self.Bsse = None
self.Shell_opt = None
self.Tddft_scf = None
self._name = "EACH"
self._keywords = {'Bsse': 'BSSE', 'Cell_opt': 'CELL_OPT', 'Just_energy': ' | JUST_ENERGY', 'Band': 'BAND', 'Xas_scf': 'XAS_SCF', 'Rot_opt': 'ROT_OPT', 'Replica_eval': 'REPLICA_EVAL', 'Tddft_scf': 'TDDFT_SCF', 'Shell_opt': 'SHELL_OPT', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt': 'GEO_OPT', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Powell_opt': 'POWELL_OPT', 'Qs_scf': 'QS_SCF', 'Ep_lin_solver': 'EP_LIN_SOLVER'}
|
crate/crate-python | src/crate/client/sqlalchemy/tests/create_table_test.py | Python | apache-2.0 | 6,206 | 0 | # -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file | to you under the Apach | e License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from crate.client.sqlalchemy.types import Object, ObjectArray
from crate.client.cursor import Cursor
from unittest import TestCase
from unittest.mock import patch, MagicMock
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
@patch('crate.client.connection.Cursor', FakeCursor)
class CreateTableTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
self.Base = declarative_base(bind=self.engine)
def test_create_table_with_basic_types(self):
class User(self.Base):
__tablename__ = 'users'
string_col = sa.Column(sa.String, primary_key=True)
unicode_col = sa.Column(sa.Unicode)
text_col = sa.Column(sa.Text)
int_col = sa.Column(sa.Integer)
long_col1 = sa.Column(sa.BigInteger)
long_col2 = sa.Column(sa.NUMERIC)
bool_col = sa.Column(sa.Boolean)
short_col = sa.Column(sa.SmallInteger)
datetime_col = sa.Column(sa.DateTime)
date_col = sa.Column(sa.Date)
float_col = sa.Column(sa.Float)
double_col = sa.Column(sa.DECIMAL)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE users (\n\tstring_col STRING, '
'\n\tunicode_col STRING, \n\ttext_col STRING, \n\tint_col INT, '
'\n\tlong_col1 LONG, \n\tlong_col2 LONG, '
'\n\tbool_col BOOLEAN, '
'\n\tshort_col SHORT, '
'\n\tdatetime_col TIMESTAMP, \n\tdate_col TIMESTAMP, '
'\n\tfloat_col FLOAT, \n\tdouble_col DOUBLE, '
'\n\tPRIMARY KEY (string_col)\n)\n\n'),
())
def test_with_obj_column(self):
class DummyTable(self.Base):
__tablename__ = 'dummy'
pk = sa.Column(sa.String, primary_key=True)
obj_col = sa.Column(Object)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE dummy (\n\tpk STRING, \n\tobj_col OBJECT, '
'\n\tPRIMARY KEY (pk)\n)\n\n'),
())
def test_with_clustered_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p'
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED BY (p)\n\n'),
())
def test_with_partitioned_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_partitioned_by': 'p',
'invalid_option': 1
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') PARTITIONED BY (p)\n\n'),
())
def test_with_number_of_shards_and_replicas(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_number_of_replicas': '2',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED INTO 3 SHARDS WITH (NUMBER_OF_REPLICAS = 2)\n\n'),
())
def test_with_clustered_by_and_number_of_shards(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk, p)\n'
') CLUSTERED BY (p) INTO 3 SHARDS\n\n'),
())
def test_table_with_object_array(self):
class DummyTable(self.Base):
__tablename__ = 't'
pk = sa.Column(sa.String, primary_key=True)
tags = sa.Column(ObjectArray)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'tags ARRAY(OBJECT), \n\t'
'PRIMARY KEY (pk)\n)\n\n'), ())
|
amartinez1/nonymous | likes/views.py | Python | mit | 4,991 | 0.00581 | # Create your views here.
from django.http import HttpResponse
from django.db.models import Sum
from confess.models import Post
from .middleware import retrieve_token
from .models import Like, MtmLike2Conf
from django.views.generic import TemplateView
import logging
from django.core import serializers
import json
logging.basicConfig()
logger = logging.getLogger(__name__)
VOTE = +1
''' This Class handles the like feature, if an user likes first time it creates a token, if an user has alredy liked a Post
it wil be unliked or downvoted '''
class like(TemplateView):
def get(self, request, *args, **kwargs):
logger.info('Entry')
count = 0
label = 'Unlike'
post_id = request.GET['confession_id']
user_token = retrieve_token(request)
posts = Post.objects.get(id=int(post_id))
# check if user has liked specified post
if Like.objects.filter(post=posts, user_token=user_token).exists():
user_like = Like.objects.get(post=posts, user_token=user_token)
if int(user_like.post.id) == int(post_id) and user_like.user_token == user_token:
if user_like.liked == False:
user_like.vote = VOTE
user_like.liked = True
user_like.label = label
user_like.save()
posts.liked = True
posts.total_likes = posts.total_likes + 1
posts.save()
data = {
'count': posts.total_likes,
'label': label,
'user_token': user_token
}
result = json.dumps(data)
print result
return HttpResponse(result, content_type='application/json')
else:
label = 'Like'
user_like.label = label
user_like.save()
count = unlike(request, posts, user_token)
data = {
'count': count,
'label': label,
'user_token': user_token
}
result = json.dumps(data)
return HttpResponse(result, content_type='aplication/json')
else:
# "create the like object..."
like = Like(post=Post.objects.get(id=post_id),
user_token=user_token, vote=VOTE, liked=True)
like.label = label
| like.save()
posts.liked = True
posts.total_likes = posts.total_likes + 1
posts.save()
# update many to many reference object
print("creating many to many relashionship obj")
mtm_like_2_conf = MtmLike2Conf(confession=posts, like=like)
mtm_like_2_conf.save()
print("mtm created [%s]"% mtm_like_2_conf)
print like.post, like.user_token, like.vote, like.id
| like = like.id
print "saved!!!"
count = posts.total_likes
data = {
'count': count,
'label': label,
'user_token': user_token
}
result = json.dumps(data)
return HttpResponse(result, content_type='aplication/json')
def unlike(request, posts, user_token):
'''This metod downvotes or unlikes a post, sparams: request, post Object , user_token'''
down_vote = 0
post_obj = Post.objects.get(id=posts.id)
post_obj.total_likes = post_obj.total_likes - 1
post_obj.save()
like = Like.objects.get(post=post_obj, user_token=user_token)
like.vote = down_vote
like.liked = False
like.save()
# count = like_count(request, post_obj)
count = post_obj.total_likes
return count
def like_count(request, post):
like = Like.objects.filter(post=post)
likes = 0
# prueba--contando likes
if like.exists():
likes = like.aggregate(Sum('vote'))['vote__sum'] or 0
post.total_likes = likes
post.save()
return likes
else:
post.total_likes = likes
post.save()
return likes
class fill_modal(TemplateView):
def get(self, request, *args, **kwargs):
post_id = request.GET['id']
post = Post.objects.get(id=post_id)
data = serializers.serialize(
'json', [post, ], fields=('pk', 'title', 'text', 'posted', 'total_likes'))
struct = json.loads(data)
data = json.dumps(struct[0])
return HttpResponse(data, content_type='application/json')
class user_like(TemplateView):
def get(self, request, *args, **kwargs):
token = retrieve_token(request)
post_id = request.GET['id']
like_obj = Like.objects.get(post=post_id, user_token=token)
data = {'label': like_obj.label}
result = json.dumps(data)
return HttpResponse(result, content_type='aplication/json')
|
stevenewey/wagtail | wagtail/wagtailimages/views/frontend.py | Python | bsd-3-clause | 977 | 0.002047 | from wsgiref.util import FileWrapper
import | imghdr
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.core.exceptions import PermissionDenied
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.utils import verify_signature
from wagtail.wagtailimages.exceptions import InvalidFilterSpecError
def serve(request, signature, image_id, filter_spec):
image = | get_object_or_404(get_image_model(), id=image_id)
if not verify_signature(signature.encode(), image_id, filter_spec):
raise PermissionDenied
try:
rendition = image.get_rendition(filter_spec)
rendition.file.open('rb')
image_format = imghdr.what(rendition.file)
return HttpResponse(FileWrapper(rendition.file), content_type='image/' + image_format)
except InvalidFilterSpecError:
return HttpResponse("Invalid filter spec: " + filter_spec, content_type='text/plain', status=400)
|
hachreak/invenio-demosite | invenio_demosite/base/fixtures/oai_harvest.py | Python | gpl-2.0 | 1,982 | 0.001514 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio Demosite.
# Copyright (C) 2013 CERN.
#
# Invenio Demosite is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio Demosite is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from datetime import datetime
from fixture import DataSet
class OaiREPOSITORYData(DataSet):
class OaiREPOSITORY_2:
f1 = u'reportnumber'
f2 = u'division'
f3 = u''
setRecList = None
setDefinition = u'c=;p1=CERN;f1=reportnumber;m1=a;p2=(EP|PPE);f2=division;m2=r;p3=;f3=;m3=;'
last_updated = datetime.now()
id = 2
setSpec = u'cern:experiment'
setDescription = u''
p3 = u''
p1 = u'CERN'
setName = u'CERN experimental papers'
setCollection = u''
p2 = u'(EP|PPE)'
m1 = u'a'
m3 = u''
m2 = u'r'
class OaiREPOSITORY_3:
f1 = u'reportnumber'
f2 = u'd | ivision'
f3 = u''
setRecList = None
setDefinition = u'c=;p1=CERN;f1=reportnumber;m1=a;p2=TH;f2=division;m2=e;p3=;f3=;m3=;'
last_updated = datetime.now()
id = 3
setSpec = u'cern: | theory'
setDescription = u''
p3 = u''
p1 = u'CERN'
setName = u'CERN theoretical papers'
setCollection = u''
p2 = u'TH'
m1 = u'a'
m3 = u''
m2 = u'e'
__all__ = ('OaiREPOSITORYData', )
|
spulec/moto | moto/instance_metadata/urls.py | Python | apache-2.0 | 207 | 0 | from .respo | nses import InstanceMetadataResponse
url_bases = ["http://169.254.169.254"]
instance_metadata = InstanceMetadataResponse()
url_paths = {"{0}/(?P<path>.+)": | instance_metadata.metadata_response}
|
callowayproject/django-massmedia | massmedia/urls.py | Python | apache-2.0 | 3,236 | 0.001236 | from django.conf.urls import patterns, url
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.base import TemplateView
from django.http import HttpResponseNotFound
from models import Collection, Image, Video, Audio, Flash, Document, Embed
media_dict = {
'collection': {
'queryset': Collection.objects.public(), 'meta': Collection._meta},
'image': {
'queryset': Image.objects.public(), 'meta': Image._meta},
'audio': {
'queryset': Audio.objects.public(), 'meta': Audio._meta},
'video': {
'queryset': Video.objects.public(), 'meta': Video._meta},
'flash': {
'queryset': Flash.objects.public(), 'meta': Flash._meta},
'document': {
'queryset': Document.objects.public(), 'meta': Document._meta},
'embed': {
'queryset': Embed.objects.public(), 'meta': Embed._meta},
}
class MediaIndexView(TemplateView):
template_name = 'massmedia/index.html'
def get_context_data(self, **kwargs):
kwargs['media'] = media_dict
return super(MediaIndexView, self).get_context_data(**kwargs)
def generic_wrapper(request, *args, **kwargs):
"""
This allows us to get the mediatype variable from the url and pass the
correct queryset to the generic view
"""
if 'mediatype' in kwargs and kwargs['mediatype'] in media_dict:
mediatype = kwargs.pop('mediatype')
queryset = media_dict[mediatype]['queryset']
if 'extra_context' in kwargs:
kwargs['extra_context'].update({'mediatype': mediatype})
else:
kwargs['extra_context'] = {'mediatype': mediatype}
if 'enlarge' in kwargs:
kwargs.pop('enlarge')
kwargs['template_name'] = 'massmedia/enlarge_%s_detail.html' % mediatype
if 'slug' in kwargs or 'object_id' in kwargs:
return DetailView.as_view(queryset=queryset)(request, *args, **kwargs)
if 'template_name' not in kwargs:
kwargs['template_name'] = 'massmedia/list.html'
return ListView.as_view(queryset=queryset)(request, *args, **kwargs)
return HttpResponseNotFound()
urlpatterns = patterns('',
url(
r'^$',
MediaIndexView.as_view(),
name="massmedia_index"),
url(
r'^(?P<enlarge>enlarge)/(?P<mediatype>\w+)/(?P<slug>[-\w]+)/$',
generic_wrapper,
name="massmedia_enlarge_detail"),
url(
r'^(?P<enlarge>enlarge)/( | ?P<mediatype>\w+)/(?P<object_id>\d+)/$',
generic_wrapper,
name="massmedia_enlarge_detail_pk"),
url(
r'^(?P<mediatype>\w+)/$',
generic_wrapper,
kwargs={'paginate_by': 15, },
name='massmediatype_index'),
url(
r'^(?P<mediatype>\w+)/(?P<slug>[-\w]+)/$',
generic_wrapper,
name="massmedia_detail"),
url(
r'^(?P<mediatype>\w+)/(?P<object_id>\d+)/$',
generic_wrapper,
| name="massmedia_detail_pk"),
url(
r'^collection/(?P<slug>[-\w]+)/(?P<type>[-\w]+)/$',
'massmedia.views.list_by_collection_by_type',
name="massmedia_collection_by_type"),
(r'^widget/(?P<id>\d+)/(?P<type>[-\w]+)/$', 'massmedia.views.widget'),
)
|
beernarrd/gramps | gramps/gen/filters/rules/person/_ismorethannthgenerationancestorof.py | Python | gpl-2.0 | 2,942 | 0.006798 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# IsMoreThanNthGenerationAncestorOf
#
#-------------------------------------------------------------------------
class IsMoreThanNthGener | ationAncestorOf(Rule):
"""Rule that checks for a person that is an ancestor of a specified person
at least N generations away"""
labels = [ _('ID:'), _('Number of generations:') ]
name = _('Ancestors of <person> at least <N> generations away')
category = _("Ancestral filters")
description = _("Matches people that are ancestors "
"of a specified person at least N generations away")
def prepare(self, db, user):
self.db = db
| self.map = set()
try:
root_handle = db.get_person_from_gramps_id(self.list[0]).get_handle()
self.init_ancestor_list(root_handle,0)
except:
pass
def reset(self):
self.map.clear()
def apply(self,db,person):
return person.handle in self.map
def init_ancestor_list(self, handle, gen):
# if p.get_handle() in self.map:
# loop_error(self.orig,p)
if not handle:
return
if gen >= int(self.list[1]):
self.map.add(handle)
p = self.db.get_person_from_handle(handle)
fam_id = p.get_main_parents_family_handle()
fam = self.db.get_family_from_handle(fam_id)
if fam:
f_id = fam.get_father_handle()
m_id = fam.get_mother_handle()
if f_id:
self.init_ancestor_list(f_id, gen+1)
if m_id:
self.init_ancestor_list(m_id, gen+1)
|
cpodlesny/lisbon | src/gallery/views.py | Python | mit | 7,355 | 0.000136 | from django.contrib import messages
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.core.urlresolvers import reverse
from django.shortcuts import render, redirect, get_object_or_404
from django.utils.translation import ugettext_lazy as _
from helpers.models import Helpers
from offer.models import OfferCategory
from tours.models import Category
from .forms import GalleryForm
from .models import Gallery
def get_lang(request):
lang = request.LANGUAGE_CODE
return lang
def get_company():
return Helpers.objects.get(id=1).company_name
def gallery_list(request):
query = request.GET.get('q')
if query:
return redirect(reverse('search') + '?q=' + query)
footer = {
'pt': Helpers.objects.get(id=1).about_footer_PT,
'en': Helpers.objects.get(id=1).about_footer_EN,
'de': Helpers.objects.get(id=1).about_footer_DE
}
lang = get_lang(request)
queryset_list = Gallery.objects.all()
breadcrumbs = [
{'url': '/', 'name': _('Home')},
{'url': '#', 'name': _('Gallery'), 'active': True}
]
paginator = Paginator(queryset_list, 6)
page_request_var = 'page'
page = request.GET.get(page_request_var)
try:
queryset = paginator.page(page)
except PageNotAnInteger:
queryset = paginator.page(1)
except EmptyPage:
queryset = pagina | tor.page(paginator.num_pages)
context = {
'footer | ': {
'about': footer[lang],
'icon': Helpers.objects.get(id=1).footer_icon
},
'nav': {
'tour_categories_list': Category.objects.all(),
'offer_categories_list': OfferCategory.objects.all(),
},
'company': get_company(),
'title': _('Gallery'),
'breadcrumbs': breadcrumbs,
'object_list': queryset,
'page_request_var': page_request_var,
}
return render(request, 'partials/gallery.html', context)
def gallery_detail(request, pk=None):
query = request.GET.get('q')
if query:
return redirect(reverse('search') + '?q=' + query)
gallery = Gallery.objects.get(pk=pk)
lang = get_lang(request)
footer = {
'pt': Helpers.objects.get(id=1).about_footer_PT,
'en': Helpers.objects.get(id=1).about_footer_EN,
'de': Helpers.objects.get(id=1).about_footer_DE
}
gallery_title = {
'pt': gallery.title_PT,
'en': gallery.title_EN,
'de': gallery.title_DE
}
gallery_description = {
'pt': gallery.description_PT,
'en': gallery.description_EN,
'de': gallery.description_DE
}
breadcrumbs = [
{'url': '/', 'name': _('Home')},
{'url': '/gallery', 'name': _('Gallery')},
{'url': '#', 'name': gallery_title[lang], 'active': True}
]
gallery_current = {
'title': gallery_title[lang],
'description': gallery_description[lang],
'id': gallery.id,
'video': gallery.video,
'img': gallery.img,
'img1': gallery.img_1,
'img2': gallery.img_2,
'img3': gallery.img_3,
}
context = {
'footer': {
'about': footer[lang],
'icon': Helpers.objects.get(id=1).footer_icon
},
'nav': {
'tour_categories_list': Category.objects.all(),
'offer_categories_list': OfferCategory.objects.all(),
},
'company': get_company(),
'breadcrumbs': breadcrumbs,
'title': gallery_title[lang],
'object': gallery_current,
}
return render(request, 'templates/_gallery_details.html', context)
def gallery_update(request, pk=None):
query = request.GET.get('q')
if query:
return redirect(reverse('search') + '?q=' + query)
footer = {
'pt': Helpers.objects.get(id=1).about_footer_PT,
'en': Helpers.objects.get(id=1).about_footer_EN,
'de': Helpers.objects.get(id=1).about_footer_DE
}
lang = get_lang(request)
if not request.user.is_staff or not request.user.is_superuser:
return redirect('accounts:signup')
else:
gallery = get_object_or_404(Gallery, pk=pk)
lang = get_lang(request)
gallery_title = {
'pt': gallery.title_PT,
'en': gallery.title_EN,
'de': gallery.title_DE
}
breadcrumbs = [
{'url': '/', 'name': _('Home')},
{'url': '/gallery', 'name': _('Gallery')},
{'url': '#', 'name': gallery_title[lang], 'active': True}
]
form = GalleryForm(request.POST or None, request.FILES or None, instance=gallery)
if form.is_valid():
gallery = form.save(commit=False)
gallery.save()
messages.success(request, _('Gallery edited'))
return redirect('gallery:list')
context = {
'footer': {
'about': footer[lang],
'icon': Helpers.objects.get(id=1).footer_icon
},
'nav': {
'tour_categories_list': Category.objects.all(),
'offer_categories_list': OfferCategory.objects.all(),
},
'company': get_company(),
'title': _('Gallery edit'),
'breadcrumbs': breadcrumbs,
'instance': gallery,
'form': form,
'value': _('Add'),
}
return render(request, 'templates/_form.html', context)
def gallery_create(request):
query = request.GET.get('q')
if query:
return redirect(reverse('search') + '?q=' + query)
lang = get_lang(request)
footer = {
'pt': Helpers.objects.get(id=1).about_footer_PT,
'en': Helpers.objects.get(id=1).about_footer_EN,
'de': Helpers.objects.get(id=1).about_footer_DE
}
if not request.user.is_staff or not request.user.is_superuser:
return redirect('accounts:signup')
else:
form = GalleryForm(request.POST or None, request.FILES or None)
breadcrumbs = [
{'url': '/', 'name': _('Home')},
{'url': '/gallery', 'name': _('Gallery')},
{'url': '#', 'name': _('Create Gallery'), 'active': True}
]
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.save()
messages.success(request, _('Gallery created'))
return redirect('gallery:list')
context = {
'footer': {
'about': footer[lang],
'icon': Helpers.objects.get(id=1).footer_icon
},
'nav': {
'tour_categories_list': Category.objects.all(),
'offer_categories_list': OfferCategory.objects.all(),
},
'company': get_company(),
'title': _('Create Gallery'),
'breadcrumbs': breadcrumbs,
'value': _('Add'),
'form': form
}
return render(request, 'templates/_form.html', context)
def gallery_delete(request, pk=None):
if not request.user.is_staff or not request.user.is_superuser:
return redirect('accounts:signup')
instance = get_object_or_404(Gallery, pk=pk)
instance.delete()
messages.success(request, _('Gallery deleted'))
return redirect('gallery:list')
|
SeldonIO/seldon-server | python/seldon/vw.py | Python | apache-2.0 | 12,228 | 0.011286 | import sys
from fileutil import *
import json
from wabbit_wappa import *
from subprocess import call
import numpy as np
import random
from socket import *
import threading, Queue, subprocess
import time
import psutil
import pandas as pd
from seldon.pipeline.pandas_pipelines import BasePandasEstimator
from sklearn.utils import check_X_y
from sklearn.utils import check_array
from sklearn.base import BaseEstimator,ClassifierMixin
import logging
logger = logging.getLogger(__name__)
class VWClassifier(BasePandasEstimator,BaseEstimator,ClassifierMixin):
"""
Wrapper for Vowpall Wabbit classifier with pandas support
Parameters
----------
target : str
Target column
target_readable : str
More descriptive version of target variable
included : list str, optional
columns to include
excluded : list str, optional
columns to exclude
id_map : dict (int,str), optional
map of class ids to high level names
num_iterations : int
number of iterations over data to run vw
raw_predictions_file : str
file to push raw predictions from vw to
model_file : str
model filename
pid_file : str
file to store pid of vw server so we can terminate it
vw_args : optional dict
extra args to pass to vw
"""
def __init__(self, target=None, target_readable=None,included=None,excluded=None,id_map={},num_iterations=1, raw_predictions_file="/tmp/raw_predictions",model_file="/tmp/model",pid_file='/tmp/vw_pid_file',**vw_args):
super(VWClassifier, self).__init__(target,target_readable,included,excluded,id_map)
self.clf = None
self.num_iterations = num_iterations
self.model_file="/tmp/model"
self.param_suffix="_params"
self.model_suffix="_model"
self.raw_predictions_file=raw_predictions_file
self.raw_predictions_thread_running = False
self.tailq = Queue.Queue(maxsize=1000)
self.vw = None
self.vw_mode = None
self.pid_file = pid_file
self.vw_args = vw_args
self.model = None
self.model_saved = False
def __getstate__(self):
"""
Remove things that should not be pickled
"""
result = self.__dict__.copy()
del result['model_saved']
del result['vw']
del result['tailq']
del result['raw_predictions_thread_running']
return result
def __setstate__(self, dict):
"""
Add thread based variables when creating
"""
| self.__dict__ = dict
self.model_saved = False
self.vw = None
self.tailq = Queue.Queue(maxsize=1000)
self.raw_predictions_thread_running=False
if not self.model is None:
self._start_vw_if_needed("test")
def _wait_model_saved(self,fname):
"""
| Hack to wait for vw model to finish saving. It creates a file <model>.writing during this process
"""
logger.info("waiting for %s",fname)
time.sleep(1)
while os.path.isfile(fname):
logger.info("sleeping until model is saved")
time.sleep(1)
def _save_model(self,fname):
"""
Save vw model from running vw instance
"""
self.vw.save_model(fname)
self._wait_model_saved(fname+".writing")
with open(fname, mode='rb') as file: # b is important -> binary
self.model = file.read()
def _write_model(self):
"""
Write the vw model to file
"""
with open(self.model_file, mode='wb') as modelfile: # b is important -> binary
modelfile.write(self.model)
self.model_saved = True
@staticmethod
def _is_number(s):
try:
float(s)
return True
except ValueError:
return False
def _get_feature(self,name,val):
"""
Create a vw feature from name and value
"""
if isinstance(val, basestring):
if len(val) > 0:
if self._is_number(val):
return (name,float(val))
else:
if len(name) > 0:
return (name+"_"+val)
else:
return (val)
else:
if not np.isnan(val):
return (name,float(val))
def _convert_row(self,row,tag=None):
"""Convert a dataframe row into a vw line
"""
ns = {}
ns["def"] = []
for col in row.index.values:
if not col == self.target:
val = row[col]
feature = None
if isinstance(val,basestring):
feature = self._get_feature(col,val)
if not feature is None:
ns["def"].append(feature)
elif isinstance(val,dict):
for key in val:
feature = self._get_feature(key,val[key])
if not feature is None:
if not col in ns:
ns[col] = []
ns[col].append(feature)
elif isinstance(val,list):
for v in val:
feature = self._get_feature("",v)
if not feature is None:
if not col in ns:
ns[col] = []
ns[col].append(feature)
if self.target in row:
target = row[self.target]
target = int(target)
if self.zero_based:
target += 1
else:
target = None
namespaces = []
for k in ns:
if not k == 'def':
namespaces.append(Namespace(name=k,features=ns[k]))
return self.vw.make_line(response=target,features=ns['def'],namespaces=namespaces)
@staticmethod
def _sigmoid(x):
return 1 / (1 + math.exp(-x))
@staticmethod
def _normalize( predictions ):
s = sum( predictions )
normalized = []
for p in predictions:
normalized.append( p / s )
return normalized
def _start_raw_predictions(self):
"""Start a thread to tail the raw predictions file
"""
if not self.raw_predictions_thread_running:
thread = threading.Thread(target=self._tail_forever, args=(self.raw_predictions_file,))
thread.setDaemon(True)
thread.start()
self.raw_predictions_thread_running = True
def close(self):
"""Shutdown the vw process
"""
if not self.vw is None:
f=open(self.pid_file)
for line in f:
logger.info("terminating pid %s",line)
p = psutil.Process(int(line))
p.terminate()
self.vw.close()
self.vw = None
def _tail_forever(self,fn):
"""Tail the raw predictions file so we can get class probabilities when doing predictions
"""
p = subprocess.Popen(["tail", "-f", fn], stdout=subprocess.PIPE)
while 1:
line = p.stdout.readline()
self.tailq.put(line)
if not line:
break
def _get_full_scores(self):
"""Get the predictions from the vw raw predictions and normalise them
"""
rawLine = self.tailq.get()
parts = rawLine.split(' ')
tagScores = parts[len(parts)-1].rstrip()
scores = []
for score in parts:
(classId,score) = score.split(':')
scores.append(self._sigmoid(float(score)))
nscores = self._normalize(scores)
fscores = []
c = 1
for nscore in nscores:
fscores.append(nscore)
c = c + 1
return np.array(fscores)
def _exclude_include_features(self,df):
if not self.included is None:
df = df[list(set(self.included+[self.target]).intersection(df.columns))]
if not self.excluded is None:
|
GNOME/caribou | tools/convert_cldr.py | Python | lgpl-2.1 | 3,381 | 0.000592 | #!/usr/bin/python
from gi.repository import Gdk
from xml.etree.ElementTree import ElementTree, Element
import re
ESCAPE_PATTERN = re.compile(r'\\u\{([0-9A-Fa-f]+?)\}')
ISO_PATTERN = re.compile(r'[A-E]([0-9]+)')
def parse_single_key(value):
key = Element('key')
uc = 0
if hasattr(__builtins__, 'unichr'):
def unescape(m):
return chr(int(m.group(1), 16))
else:
def unescape(m):
return chr(int(m.group(1), 16))
value = ESCAPE_PATTERN.sub(unescape, value)
if len(value) > 1:
key.set('text', value)
uc = ord(value[0])
keyval = Gdk.unicode_to_keyval(uc)
name = Gdk.keyval_name(keyval)
key.set('name', name)
return key
def convert(source, tree):
root = Element('layout')
for index, keymap in enumerate(tree.iter('keyMap')):
level = Element('level')
rows = {}
root.append(level)
level.set('name', 'level%d' % (index+1))
# FIXME: heuristics here
modifiers = keymap.get('modifiers')
if not modifiers:
mode = 'default'
elif 'shift' in modifiers.split(' ') or 'lock' in modifiers.split(' '):
mode = 'latched'
else:
mode = 'locked'
level.set('mode', mode)
for _map | in keymap.iter('map'):
value = _map.get('to')
key = parse_single_key(value)
iso = _map.get('iso' | )
if not ISO_PATTERN.match(iso):
sys.stderr.write('invalid ISO key name: %s\n' % iso)
continue
if not iso[0] in rows:
rows[iso[0]] = []
rows[iso[0]].append((int(iso[1:]), key))
# add attribute to certain keys
name = key.get('name')
if name == 'space':
key.set('align', 'center')
key.set('width', '6.0')
if name in ('space', 'BackSpace'):
key.set('repeatable', 'yes')
# add subkeys
longPress = _map.get('longPress')
if longPress:
for value in longPress.split(' '):
subkey = parse_single_key(value)
key.append(subkey)
for k, v in sorted(list(rows.items()), key=lambda x: x[0], reverse=True):
row = Element('row')
for key in sorted(v, key=lambda x: x):
row.append(key[1])
level.append(row)
return root
def indent(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
if __name__ == "__main__":
import sys
if len(sys.argv) != 2:
print("supply a CLDR keyboard file")
sys.exit(1)
source = sys.argv[-1]
itree = ElementTree()
itree.parse(source)
root = convert(source, itree)
indent(root)
otree = ElementTree(root)
if hasattr(sys.stdout, 'buffer'):
out = sys.stdout.buffer
else:
out = sys.stdout
otree.write(out, xml_declaration=True, encoding='UTF-8')
|
aashish-jain/BLUED-events | plot_ipeak.py | Python | unlicense | 1,306 | 0.029862 | import csv
import matplotlib.pyplot as plt
import numpy as np
from os import listdir
directory='/media/aashish/0C0091CC0091BCE0/BLUED-D1-1/events'
files=listdir(directory)
count=0
completed=False
while(completed==False):
try:
for f in files:
plt.close('all')
print(count,f)
data = np.loadtxt(directory+'/'+f, | delimiter=',')
count+=1
t,i1,i2,v=[],[],[],[]
i = 0
while(True):
t.append(max(data[i:i+200,0]))
i1.append(max(data[i:i+200,1]))
| i2.append(max(data[i:i+200,2]))
v.append(max(data[i:i+200,3]))
i+=200
if (i == 59800):
break
print(max(i1),max(i2),max(v),max(t))
print(len(t),len(i1),len(i2),len(v))
_f, axarr = plt.subplots(3, sharex=True)
axarr[0].plot(t,i1)
axarr[0].set_title('I1')
axarr[1].plot(t,i2)
axarr[1].set_title('I2')
axarr[2].plot(t,v)
axarr[2].set_title('V')
_f.subplots_adjust(hspace=0.15,wspace=0.15,top=0.96,bottom=0.03,right=0.96,left=0.03)
plt.draw()
plt.show()
completed=True
except:
print("Some error")
|
Likegram/study_run | server/app/views.py | Python | mit | 1,803 | 0.001109 | from django.shortcuts import render
# Create your views here.
from .models import Course, Student, StudentCourse
from .serializers import CourseSerializer, StudentSerialiser
from rest_framework import viewsets
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
class StudentViewSet(viewsets.ModelViewSet):
queryset = Student.objects.all()
serializer_class = StudentSerialiser
@list_route(methods=['GET'])
def make(self, request):
username = request.GET.get('username', None)
if username:
Student.objects.get_or_create(nickname=username)
return Response({'success': True})
class CourseViewSet(viewsets.ModelViewSet):
queryset = Course.objects.all()
serializer_class = CourseSerializer
def get_queryset(self):
result = super(CourseViewSet, self).get_queryset()
username = self.request.GET.get('username', None)
active = self.request.GET.get('active', None)
if not username or active != '1':
return result
user = Student.objects.get(nickname=username)
courses_ids = StudentCourse.objects.filter(student=user, active=True).values_list('course_id', flat=True)
return result.filter(id__in=courses_ids)
@detail_route(methods=['GET'])
def start(self, request, pk=None):
username = request.GET.get('username', None)
user = St | udent.objects.get(nickname=username)
course = Course.objects.get(id=pk)
student_course, created = StudentCourse.objects.get_or_create(student=user, course=course)
StudentCourse.objects.filter(student=user).update(active=False)
student_course.active = True
student_cour | se.save()
return Response({'success': True})
|
tensorflow/datasets | tensorflow_datasets/audio/gtzan/gtzan.py | Python | apache-2.0 | 3,225 | 0.003411 | # codin | g=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless | required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GZTAN dataset."""
import os
import tensorflow as tf
import tensorflow_datasets.public_api as tfds
_CITATION = """
@misc{tzanetakis_essl_cook_2001,
author = "Tzanetakis, George and Essl, Georg and Cook, Perry",
title = "Automatic Musical Genre Classification Of Audio Signals",
url = "http://ismir2001.ismir.net/pdf/tzanetakis.pdf",
publisher = "The International Society for Music Information Retrieval",
year = "2001"
}
"""
_DESCRIPTION = """
The dataset consists of 1000 audio tracks each 30 seconds long.
It contains 10 genres, each represented by 100 tracks.
The tracks are all 22050Hz Mono 16-bit audio files in .wav format.
The genres are:
* blues
* classical
* country
* disco
* hiphop
* jazz
* metal
* pop
* reggae
* rock
"""
_DOWNLOAD_URL = "http://opihi.cs.uvic.ca/sound/genres.tar.gz"
_HOMEPAGE_URL = "http://marsyas.info/index.html"
_CLASS_LABELS = [
"blues", "classical", "country", "disco", "hiphop", "jazz", "metal", "pop",
"reggae", "rock"
]
class GTZAN(tfds.core.GeneratorBasedBuilder):
"""GTZAN Dataset."""
VERSION = tfds.core.Version("1.0.0")
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict({
"audio": tfds.features.Audio(file_format="wav", sample_rate=22050),
"label": tfds.features.ClassLabel(names=_CLASS_LABELS),
"audio/filename": tfds.features.Text(),
}),
supervised_keys=("audio", "label"),
homepage=_HOMEPAGE_URL,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
dl_paths = dl_manager.download_and_extract({"genres": _DOWNLOAD_URL})
path = os.path.join(dl_paths["genres"], "genres")
# There is no predefined train/val/test split for this dataset.
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN, gen_kwargs={"path": path}),
]
def _generate_examples(self, path):
"""Yields examples.
Args:
path: Path of the downloaded and extracted directory
Yields:
Next examples
"""
for root, _, file_name in tf.io.gfile.walk(path):
for fname in file_name:
if fname.endswith(".wav"): # select only .wav files
# Each .wav file has name in the format of <genre>.<number>.wav
label = fname.split(".")[0]
key = fname
example = {
"audio": os.path.join(root, fname),
"label": label,
"audio/filename": fname,
}
yield key, example
|
invicnaper/MWF | Utils/t/form_test.py | Python | gpl-2.0 | 10,150 | 0.033718 | #!/usr/bin/env python
# coding=utf-8
#
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
import httplib
import sys
def test(name,A,B):
if A != B:
print "Error :" + name
print "-----Actual--"
print A,"---Expected--"
print B,"-------------"
sys.exit(1)
else:
print "Ok:"+name
h=httplib.HTTPConnection('localhost:8080');
h.request('GET','/test')
r=h.getresponse()
body=r.read();
ref_body = \
"""\
non loaded<br>
<form action="/test" method="post" >
<p>text <span class="cppcms_form_input"><input type="text" name="_1" ></span></p>
<p>textarea <span class="cppcms_form_input"><textarea name="_2" ></textarea></span></p>
<p>int <span class="cppcms_form_input"><input type="text" name="_3" value="" ></span></p>
<p>double <span class="cppcms_form_input"><input type="text" name="_4" value="" ></span></p>
<p>pass <span class="cppcms_form_input"><input type="password" name="_5" ></span></p>
<p>pass2 <span class="cppcms_form_input"><input type="password" name="_6" ></span></p>
<p>yes or not <span class="cppcms_form_input"><input type="text" name="_7" ></span></p>
<p>E-Mail <span class="cppcms_form_input"><input type="text" name="_8" ></span></p>
<p>Checkbox <span class="cppcms_form_input"><input type="checkbox" name="_9" value="y" ></span></p>
<p>Select Multiple <span class="cppcms_form_input"><select multiple name="_10" >
<option value="0" selected >a</option>
<option value="1" selected >b</option>
<option value="2" >c</option>
<option value="id1" >tr1</option>
</select></span></p>
<p>Select <span class="cppcms_form_input"><select name="_11" >
<option value="0" >a</option>
<option value="1" >b</option>
<option value="2" >c</option>
<option value="id2" selected >tr2</option>
</select></span></p>
<p>Radio <span class="cppcms_form_input"><div class="cppcms_radio" >
<input type="radio" value="0" name="_12" checked > x<br>
<input type="radio" value="1" name="_12" > y<br>
<input type="radio" value="id3" name="_12" > tr3<br>
</div></span></p>
<p>Submit <span class="cppcms_form_input"><input type="submit" name="_13" value="Button" ></span></p>
</form>
"""
test("/test",body,ref_body)
def test_valid(name,params,ans,url='/non_empty'):
h=httplib.HTTPConnection('localhost:8080');
h.request('GET','/test' + url + '?' + params)
r=h.getresponse()
test(name,r.read()[:len(ans)],ans)
test_valid('non_empty1','_1=1&_2=1&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=10&_10=1&_11=1&_12=1&_13=1','valid')
test_valid('non_empty2','_1=&_2=1&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=10&_10=1&_11=1&_12=1&_13=1','invalid')
test_valid('non_empty3','_1=1&_2=&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=10&_10=1&_11=1&_12=1&_13=1','invalid')
test_valid('non_empty4','_1=1&_2=1&_3=&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=10&_10=1&_11=1&_12=1&_13=1','invalid')
test_valid('non_empty5','_1=1&_2=1&_3=1&_4=1&_5=&_6=1&_7=yes&_8=a@a&_9=10&_10=1&_11=1&_12=1&_13=1','invalid')
test_valid('non_empty6','_1=1&_2=1&_3=1&_4=1&_5=1&_6=&_7=yes&_8=a@a&_9=10&_10=1&_11=1&_12=1&_13=1','invalid')
test_valid('non_empty7','_1=1&_2=1&_3=1&_4=1&_5=1&_6=1&_7=&_8=a@a&_9=10&_10=1&_11=1&_12=1&_13=1','invalid')
test_valid('non_empty8','_1=1&_2=1&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=&_9=10&_10=1&_11=1&_12=1&_13=1','invalid')
test_valid('non_empty9','_1=1&_2=1&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=&_10=1&_11=1&_12=1&_13=1','valid') # checkbox ok
test_valid('non_empty10','_1=1&_2=1&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=10&_10=&_11=1&_12=1&_13=1','invalid')
test_valid('non_empty11','_1=1&_2=1&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=10&_10=1&_11=&_12=1&_13=1','invalid')
test_valid('non_empty12','_1=1&_2=1&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=10&_10=1&_11=1&_12=&_13=1','invalid')
test_valid('non_empty12','_1=1&_2=1&_3=1&_4=1&_5=1&_6=1&_7=yes&_8=a@a&_9=10&_10=1&_11=1&_12=1&_13=','valid') # Submit ok
test_valid('empty','_1=&_2=&_3=&_4=&_5=&_6=&_7=yes&_8=a@a&_9=&_10=&_11=&_12=&_13=','valid','') # Empty ok only regex, email fails
test_valid('empty1','_1=&_2=&_3=&_4=&_5=&_6=&_7=yes&_8=&_9=&_10=&_11=&_12=&_13=','invalid','') # Empty ok only regex, email fails
test_valid('empty2','_1=&_2=&_3=&_4=&_5=&_6=&_7=&_8=a@a&_9=&_10=&_11=&_12=&_13=','invalid','') # Empty ok only regex, email fails
h=httplib.HTTPConnection('localhost:8080');
h.request('GET','/test/sub')
r=h.getresponse()
body=r.read();
ref_body = \
"""\
<p>pass <span class="cppcms_form_input"><input type="password" name="_5" ></span></p>
<p>pass2 <span class="cppcms_form_input"><input type="password" name="_6" ></span></p>
<p>yes or not <span class="cppcms_form_input"><input type="text" name="_7" ></span></p>
<p>E-Mail <span class="cppcms_form_input"><input type="text" name="_8" ></span></p>
<p>Checkbox <span class="cppcms_form_input"><input type="checkbox" name="_9" value="y" ></span></p>
<p>Select Multiple <span class="cppcms_form_input"><select multiple name="_10" >
<option value="0" selected >a</option>
<option value="1" selected >b</option>
<option value="2" >c</option>
<option value="id1" >tr1</option>
</select></span></p>
"""
test("subset",body,ref_body)
def test_valid(name,url,params,ans):
def get():
h=httplib.HTTPConnection('localhost:8080');
h.request('GET','/test' + url + '?' + params)
r=h.getresponse()
test(name+' GET',r.read(),ans)
def post():
h=httplib.HTTPConnection('localhost:8080');
headers = {"Content-type": "application/x-www-form-urlencoded"}
h.request('POST','/test' + url,params,headers)
r=h.getresponse()
test(name+' POST',r.read(),ans)
get()
post()
test_valid('text','/text','_1=','invalid\n')
test_valid('text1','/text','_1=x','invalid\nx')
test_valid('text2','/text','_1=xx','valid\nxx')
test_valid('text3','/text','_1=xxxxx','valid\nxxxxx')
test_valid('text4','/text','_1 | =xxxxxx','invalid\nxxxxxx') |
test_valid('text5','/text','_1=%d7%a9%d6%b8%d7%9c%d7%95%d7%9d','valid\nשָלום')
test_valid('text6','/text','_1=%d7%a9%d7%9c','valid\nשל')
test_valid('text7','/text','_1=%FF%FF','invalid\n\xFF\xFF')
test_valid('text8','/text','_1=%01%01','invalid\n\x01\x01')
test_valid('text9.1','/text','_1=xx%DF%7F','invalid\nxx\xDF\x7F')
test_valid('text9.2','/text','_1=xx%C2%7F','invalid\nxx\xC2\x7F')
test_valid('text9.3','/text','_1=xx%e0%7F%80','invalid\nxx\xe0\x7F\x80')
test_valid('text9.4','/text','_1=xx%f0%7F%80%80','invalid\nxx\xf0\x7F\x80\x80')
test_valid('number','/number','_1=','invalid\n')
test_valid('number1','/number','_1=10','valid\n10')
test_valid('number2','/number','_1=10.0','valid\n10')
test_valid('number3','/number','_1=10.0e+','invalid\n')
test_valid('number5','/number','_1=10.0e1','valid\n100')
test_valid('number6','/number','_1=10.0x','invalid\n')
test_valid('number7','/number','_1=A10.0','invalid\n')
test_valid('number8','/number','_1=0','invalid\n0')
test_valid('number9','/number','_1=1000','invalid\n1000')
test_valid('number10','/number','_1=10A','invalid\n')
test_valid('pass1','/pass','_1=&_2=','invalid\n')
test_valid('pass2','/pass','_1=x&_2=x','valid\n')
test_valid('pass3','/pass','_1=x1&_2=x2','invalid\n')
test_valid('checkbox1','/checkbox','_1=n','valid\n0')
test_valid('checkbox2','/checkbox','_1=y','valid\n1')
test_valid('sm1','/sm','foo=bar','invalid\n0 0 0 0 \n\n')
test_valid('sm2','/sm','_1=1&_1=0','valid\n1 1 0 0 \n0 1 \n')
test_valid('sm3','/sm','_1=1&_1=id1','valid\n0 1 0 1 \n1 id1 \n')
test_valid('sm4','/sm','_1=0&_1=1&_1=2','invalid\n1 1 1 0 \n0 1 2 \n')
test_valid('select1','/select','foo=bar','invalid\n-1 ')
test_valid('select2','/select','_1=0','valid\n0 0')
test_valid('select3','/select','_1=0&_1=1','invalid\n-1 ')
test_valid('select4','/select','_1=10','invalid\n-1 ')
test_valid('radio1','/radio','foo=bar','invalid\n-1 ')
test_valid('radio2','/radio','_1=0','valid\n0 0')
test_valid('radio3','/radio','_1=0&_1=1','invalid\n-1 ')
test_valid('radio4','/radio','_1=10','invalid\n-1 ')
test_valid('submit1','/submit','_1=1','valid\n1')
test_valid('submit2','/submit','_2=1','valid\n0')
body='<p><label for="submit_id">message</label> <span class="cppcms_form_error">error</sp |
AlperSakarya/AWS-Import-Export-Manifest-Generator | forms.py | Python | bsd-2-clause | 4,728 | 0.007191 | from flask.ext.wtf import Form
from wtforms import TextField, TextAreaField, SubmitField, PasswordField, validators, ValidationError, Sel | ectField
class AWSIEFormFields(Form):
deviceId = TextField(u'Device ID', validators=[validators.required()])
notificationEmail = TextField("Notification Email", [validators.email | ("Please enter 7 characters to search")])
region = SelectField(u'Region', choices=[('us-east-1', 'us-east-1'), ('us-west-1', 'us-west-1'),
('us-west-2', 'us-west-2'), ('eu-west-1 ', 'eu-west-1'), ('ap-southeast-1', 'ap-southeast-1')])
acl = TextField("ACL", [validators.email("Please enter an ACL if you like")])
bucket = TextField("Import Bucket", [validators.required("Please enter the bucket name for importing files")])
logPrefix = TextField("Log Prefix", [validators.required("Please enter a log prefix")])
prefix = TextField("Prefix", [validators.required("Please enter a prefix")])
substitutions = TextField("Replace box1 with box2")
substitutionsb = TextField("Ignore directories")
substitutions2 = TextField("Replace box1 with box2")
substitutions2b = TextField("Ignore directories")
logBucket = TextField("Log Bucket", [validators.required("Please enter the bucket name for job logs")])
trueCryptPassword = TextField("TrueCrypt Password")
pinCode = TextField("Pin Code")
cacheControl = TextField("Cache Control")
contentDisposition = TextField("Content Disposition")
contentLanguage = TextField("Content Language")
contentTypes = SelectField(u'Map Content types', choices=[('', ''), ('yes', 'yes')])
diskTimestampMetadataKey = TextField("Disk Time Stamp Metadata Key")
expires = TextField("Expires")
ignore = SelectField(u'Exclude Lost+Found', choices=[('', ''), ('yes', 'yes')])
ignore2 = SelectField(u'Exclude Recycle Bin', choices=[('', ''), ('yes', 'yes')])
ignore3 = SelectField(u'Exclude ~ and .swp files', choices=[('', ''), ('yes', 'yes')])
ignore4 = TextField("Ignore directories")
ignore5 = TextField("Ignore directories")
ignore6 = TextField("Ignore files with specific extension")
ignore7 = TextField("Ignore files with specific extension")
setContentEncodingForGzFiles = SelectField(u'Set Encoding for .gz files', choices=[('', ''), ('yes', 'Yes')])
staticMetadata = TextField("Static Metadata")
storageClass = SelectField(u'Select Storage Class', choices=[('', ''), ('REDUCED_REDUNDANCY', 'REDUCED_REDUNDANCY')])
serviceLevel = SelectField(u'Expedite return shipping', choices=[('', ''), ('expeditedShipping', 'Expedited Shipping'), ('standard', 'standard Shipping')])
name = TextField("Name", [validators.required("Please enter your name, it's required")])
company = TextField("Company")
street1 = TextField("Street1", [validators.required("Please enter your street, it's required")])
street2 = TextField("Street2")
street3 = TextField("Street3")
city = TextField("City", [validators.required("Please enter your city, it's required")])
stateOrProvince = TextField("State or Province", [validators.required("Please enter your state or province, it's required")])
postalCode = TextField("Postal Code", [validators.required("Please enter your postal code, it's required")])
phoneNumber = TextField("Phone Number", [validators.required("Please enter your phone number, it's required")])
country = TextField("Country", [validators.required("Please enter your country, it's required")])
dataDescription = TextField("Description of The Data", [validators.required("Please enter a description, it's required")])
encryptedData = SelectField(u'Encrypted Data', choices=[('', ''), ('Yes', 'Yes'), ('No', 'No')])
exportCertifierName = TextField("Shipper Name", [validators.required("Please enter a name, it's required")])
requiresExportLicense = SelectField(u'Requires Export License', choices=[('', ''), ('Yes', 'Yes'), ('No', 'No')])
deviceValue = TextField("Device Value", [validators.required("Please enter a value, it's required")])
deviceCountryOfOrigin = TextField("Drive Manufacture Country", [validators.required("Please a country, it's required")])
deviceType = SelectField(u'Device Type', choices=[('', ''), ('externalStorageDevice', 'externalStorageDevice'), ('usbFlashDrive', 'usbFlashDrive'), ('sataDrive', 'sataDrive')])
typeOfExport = SelectField(u'Type of Export', choices=[('', ''), ('return', 'return'), ('permanent', 'permanent'), ('temporary', 'temporary')])
archivecomment = TextField("Archive Comment")
fileSystem = SelectField(u'File System', choices=[('', ''), ('NTFS', 'NTFS'), ('EXT4', 'EXT4')])
submit = SubmitField("Generate")
|
tnewman/PIoT | readsensors.py | Python | mit | 987 | 0.009119 | #!/usr/bin/env python3
""" Reads all sensors configured in the PIoT sensor directory every 30
seconds.
"""
running = True
def signal_handler(signum, frame):
global running
running = False
# Signal handling must occur before SQLAlchemy imports
# in the service layer, or the signal will not be
# handled properly when the program first starts.
import signal
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
import logging, schedule, sys, time
from piot.service import SensorReadingSchedulingService
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
scheduling_service=SensorReadingSchedulingService()
def run_jobs():
""" Runs the sensor reading job every 30 seconds.
"""
schedule.every(30).seconds.do(scheduling_service.sensor_reading_job)
schedulin | g_service.sensor_reading_job()
while running:
schedule.run_pending()
time.sleep(1)
if __name__ = | = '__main__':
run_jobs()
|
zenoss/ZenPacks.community.NetWare | ZenPacks/community/NetWare/modeler/plugins/NetWareFileSystemMap.py | Python | gpl-2.0 | 2,147 | 0.011178 | __doc__="""NetWareFileSystemMap
FileSystemMap maps the interface and ip tables to interface objects
$Id: NetWareFileSystemMap.py,v 0.9 2009/03/09 efeldman"""
__version__ = '$Revision: 0.9 $'[11:-2]
import re
from Products.DataCollector.plugins.CollectorPlugin import SnmpPlugin, GetTableMap, GetMap
from Products.DataCollector.plugins.DataMaps import ObjectMap
import Globals
class NetWareFileSystemMap(SnmpPlugin):
maptype = "FileSystemMap"
compname = "os"
relname = "filesystems"
modname = "Products.ZenModel.FileSystem"
deviceProperties = \
SnmpPlugin.deviceProperties + ('zFileSystemMapIgnoreNames',)
columns = {
'.1': 'nwVolId',
'.2': 'nwVolPhysicalName',
'.3': 'nwVolSize',
'.4': 'nwVolFree',
'.5': 'nwVolFreeable',
'.6': 'nwVolNonFreeable',
'.7': 'nwVolBlockSize',
'.8': 'nwVolMounted',
'.15': 'nwVolFileSystemID',
}
snmpGetTableMaps = (
GetTableMap('nwFSTable', '.1.3.6.1.4.1.23.2.28.2.14.1', columns),
)
nwVolFileSystemID = {
3: 'netware',
5: 'nss',
}
def process(self, device, results, log):
"""collect snmp information from this device"""
getdata, tabledata = results
fstable = tabledata.get( "nwFSTable" )
skipfsnames = getattr(device, 'zFileSystemMapIgnoreNames', None)
maps = []
rm = self.relMap()
| for fs in fstable.values():
if not fs.has_key("nwVolSize"): continue
if not self.checkColumns(fs, self.columns, log): continue
if fs['nwVolSize'] > 0 and (not skipfsnames or not re.search(skipfsnames,fs['nwVolPhysicalName'])):
om = self.objectMap()
| om.id = self.prepId(fs['nwVolPhysicalName'])
om.snmpindex = fs['nwVolId']
om.mount = fs['nwVolPhysicalName']
om.type = fs['nwVolFileSystemID']
om.blockSize = 1024
om.totalBlocks = fs['nwVolSize']
rm.append(om)
maps.append(rm)
return maps
|
petems/ansible-json | callback_plugins/json_logs.py | Python | mit | 1,719 | 0.018034 | # adapted from https://gist.github.com/cliffano/9868180
import os
import time
import json
from json import JSONEncoder
def json_log(res, host):
if type(res) == type(dict()):
if 'verbose_override' not in res:
res.update({"host": host})
combined_json = JSONEncoder().encode(res)
print(combined_json)
class CallbackModule(object):
def on_any(self, *args, **kwargs):
pass
def runner_on_failed(self, host, res, ignore_errors=False):
json_log(res, host)
def runner_on_ok(self, host, res):
json_log(res, host)
def runner_on_error(self, host, msg):
pass
def runner_on_skipped(self, host, item=None):
pass
def runner_on_unreachable(self, host, res):
json_log(res, host)
def runner_on_no_hosts(self):
pass
def runner_on_async_poll(self, host, res, jid, clock):
json_log(res, host)
def runner_on_async_ok(self, host, res, jid):
json_log(res, host)
def runner_on_async_failed(self, host, res, jid):
json_log(res, host)
def playbook_on_start(self):
pass
def playbook_on_notify(self, host, handler):
pass
def playbook_on_no_hosts_matched(self):
pass
def playbook_on_no_hosts_remaining(self):
pass
def playbook_on_task_start(self, name, is_conditional):
pass
def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
pass
def playbook_on_setup(self):
pass
def playbook_on_import_for_host(self, host, imported_file):
pa | ss
def playbook_on_not_import_for_host(self, host, missing_file):
pass
def playbook_on_play_ | start(self, pattern):
pass
def playbook_on_stats(self, stats):
pass |
grimfang/lext | core/level/builder.py | Python | mit | 26,314 | 0.00456 | #!/usr/bin/python
# System imports
# Panda Engine imports
from direct.showbase.DirectObject import DirectObject
from panda3d.core import *
from pandac.PandaModules import *
from panda3d.bullet import *
from direct.task.Task import Task
from direct.interval.IntervalGlobal import *
# Game imports
#----------------------------------------------------------------------#
# Blender Tagging:
# levelName = The level name duh
# levelDesc = short description
# ground = makes collision plane; Type = t-mesh
# box = static cube/box
# physic_box = dynamic cube
# physic_sensor = switches, doors, traps/hidden paths should glow up your gun or aura
# light = | create light (may need sub tag for light type, but for now its just poin | tlights)
# pickup = pickable object
# exit = exit point
# start = spawn point
# wall = basic wall visual
# col_wall = collision shape
# physic_lift = physics based object that moves up or down
# physic_door
# complex_object
# size = Boxes have sizes s,m,l
# Make a proper bitmask list
# 0x01 = w/e...
class LevelBuilder():
def __init__(self, _level):
self.level = _level
self.eventMgr = self.level.game.eventMgr
# Object types in levels
self.objectTypes = {"ground": self.buildGround,
"box": self.buildBox,
"light": self.buildLight,
"pickup": self.buildPickupObject,
"exit": self.buildExitPoint,
"start": self.buildStartPoint,
"wall": self.buildWall,
"col_wall": self.buildColWall,
"physic_box": self.buildPhysicBox,
"physic_fuse": self.buildPhysicFuse,
"physic_sensor": self.buildPhysicSensor,
"level_name": self.setLevelName,
"level_desc": self.setLevelDesc,
"physic_lift": self.buildLift,
"physic_door":self.buildPhysicDoor,
"complex_object": self.buildComplexObject
}
## Event Method
def parseEggFile(self, _levelFileName):
# parse the level for setup
self.levelModel = loader.loadModel(_levelFileName)
# Find all objects
self.objects = self.levelModel.findAllMatches('**')
for object in self.objects:
for type in self.objectTypes:
if object.hasTag(type):
self.buildLevel(object, type, self.levelModel)
#print type, object
def buildLevel(self, _object, _type, _levelRoot):
## Actual Level construction
self.objectTypes[_type](_object, _levelRoot)
def setLevelName(self, _object, _levelRoot):
self.level.levelName = _object.getTag("level_name")
def setLevelDesc(self, _object, _levelRoot):
self.level.levelDesc = _object.getTag("level_desc")
## Builder Methods
def buildGround(self, _object, _levelRoot):
groundType = _object.getTag("type")
if groundType == "t-mesh":
tmpMesh = BulletTriangleMesh()
node = _object.node()
if node.isGeomNode():
tmpMesh.addGeom(node.getGeom(0))
else:
return
body = BulletRigidBodyNode(_object.getTag("ground"))
body.addShape(BulletTriangleMeshShape(tmpMesh, dynamic=False))
body.setMass(0)
np = self.level.game.physicsParentNode.attachNewNode(body)
np.setCollideMask(BitMask32.allOn())
np.setScale(_object.getScale(_levelRoot))
np.setPos(_object.getPos(_levelRoot))
np.setHpr(_object.getHpr(_levelRoot))
self.level.game.physicsMgr.physicsWorld.attachRigidBody(body)
## Set the visual
_object.reparentTo(self.level.game.levelParentNode)
_object.setPos(_object.getPos(_levelRoot))
_object.setScale(_object.getScale(_levelRoot))
_object.setHpr(_object.getHpr(_levelRoot))
self.level.avoidObjects.append(_object.getTag("ground"))
else:
shape = BulletPlaneShape(Vec3(0, 0, 0.1), 1)
node = BulletRigidBodyNode(_object.getTag("ground"))
node.addShape(shape)
np = self.level.game.physicsParentNode.attachNewNode(node)
np.setPos(0, 0, -1)
np.setCollideMask(BitMask32.allOn())
self.level.game.physicsMgr.physicsWorld.attachRigidBody(node)
_object.reparentTo(self.level.game.levelParentNode)
_object.setPos(0, 0, 0)
#_object.setDepthOffset(1)
self.level.avoidObjects.append(_object.getTag("ground"))
# These are static boxes.
def buildBox(self, _object, _levelRoot):
size = _object.getTag("size")
if size == "s":
shape = BulletBoxShape(Vec3(.2, .2, .2))
node = BulletRigidBodyNode('box')
node.addShape(shape)
np = self.level.game.physicsParentNode.attachNewNode(node)
np.setCollideMask(BitMask32.allOn())
np.setPos(_object.getPos(_levelRoot))
np.setHpr(_object.getHpr())
_object.reparentTo(self.level.game.levelParentNode)
_object.setScale(.2, .2, .2)
self.level.game.physicsMgr.physicsWorld.attachRigidBody(node)
if size == "m":
shape = BulletBoxShape(Vec3(.5, .5, .5))
node = BulletRigidBodyNode('box')
node.addShape(shape)
np = self.level.game.physicsParentNode.attachNewNode(node)
np.setCollideMask(BitMask32.allOn())
np.setPos(_object.getPos(_levelRoot))
np.setHpr(_object.getHpr())
_object.reparentTo(self.level.game.levelParentNode)
_object.setScale(.5, .5, .5)
self.level.game.physicsMgr.physicsWorld.attachRigidBody(node)
if size == "l":
shape = BulletBoxShape(Vec3(1, 1, 1))
node = BulletRigidBodyNode('box')
node.addShape(shape)
np = self.level.game.physicsParentNode.attachNewNode(node)
np.setCollideMask(BitMask32.allOn())
np.setPos(_object.getPos(_levelRoot))
np.setHpr(_object.getHpr())
_object.reparentTo(self.level.game.levelParentNode)
_object.setScale(1, 1, 1)
self.level.game.physicsMgr.physicsWorld.attachRigidBody(node)
def buildPhysicBox(self, _object, _levelRoot):
# Make this more custom. for custom sizes.. or make a new method for handling those types
# Large 1,1,1 Medium .5,.5,.5 Small .1,.1,.1
# Get object size
size = _object.getTag("size")
if size == "s":
shape = BulletBoxShape(Vec3(.2, .2, .2))
node = BulletRigidBodyNode(_object.getTag("physic_box")+str(self.level.physicObjCount))
node.addShape(shape)
if _object.getTag("mass"):
node.setMass(int(_object.getTag("mass")))
else:
node.setMass(1)
np = self.level.game.physicsParentNode.attachNewNode(node)
np.setCollideMask(BitMask32.allOn())
np.setPos(_object.getPos())
self.level.game.physicsMgr.physicsWorld.attachRigidBody(node)
## Set the visual
_object.reparentTo(np)
_object.setPos(0, 0, 0)
_object.setScale(.2)
_object.setHpr(0, 0, 0)
## Add the physic_box to the physicObjects list for gravity handling
self.level.physicObjects[_object.getTag("physic_box")+str(self.level.physicObjCount)] = np
self.level.physicObjCount += 1
if size == "m":
shape = BulletBoxShape(Vec3(1, 1, 1))
node = BulletRigidBodyNode(_object.getTag("physic_box")+str(self.l |
pyokagan/pyglreg | setup.py | Python | mit | 1,068 | 0 | from setuptools | import setup
setup(name='glreg',
version='0.9.0a3',
| description='OpenGL XML API registry parser',
long_description=open('README.rst').read(),
url='https://github.com/pyokagan/pyglreg',
author='Paul Tan',
author_email='pyokagan@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='opengl',
py_modules=['glreg'],
entry_points={
'console_scripts': [
'glreg=glreg:main'
]
})
|
eort/OpenSesame | libopensesame/sketchpad_elements/_noise.py | Python | gpl-3.0 | 1,670 | 0.021557 | #-*- coding:utf-8 -*-
"""
This file is part of OpenSesame.
OpenSesame is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenSesame is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenSesame. If not, see <http://www.gnu.org/licenses/>.
"""
from libopensesame.py3compat import *
from libopensesame.sketchpad_elements._base_element import base_element
class noise(base_element):
"""
desc:
A gabor-patch element for the sketchpad.
"""
def __init__(self, sketchpad, string):
"""
desc:
Constructor.
arguments:
sketchpad: A sketchpad object.
string: A definition string.
"""
defaults = [
(u'x', None),
(u'y', None),
(u'env', u'gaussian'),
(u'size', 96),
(u'stdev', 12),
(u'color1', u'white'),
(u'color2', u'black'),
(u'bgmode', u'avg')
]
super(noise, self).__init__(sketchpad, string, defaults=defaults)
def draw(self):
"""
desc:
Draws the element to the canvas of | the sketchpad.
"""
properties = self.eval_properties()
return self.canvas.no | ise_patch(properties[u'x'], properties[u'y'],
env=properties[u'env'], size=properties[u'size'],
stdev=properties[u'stdev'], col1=properties[u'color1'],
col2=properties[u'color2'], bgmode=properties[u'bgmode'])
|
RoboticaBrasil/-ComputerVision | camera.py | Python | apache-2.0 | 336 | 0 | From SimpleCV import Camera
# Initialize the camera
cam = Camera()
# Loop to cont | inuo | usly get images
while True:
# Get Image from camera
img = cam.getImage()
# Make image black and white
img = img.binarize()
# Draw the text "Hello World" on image
img.drawText("Hello World!")
# Show the image
img.show()
|
thorgate/django-esteid | esteid/mobileid/types.py | Python | bsd-3-clause | 2,083 | 0.00096 | import re
from collections import namedtuple
from typing import Optional
from esteid import settings
from esteid.constants import Languages
from esteid.exceptions import InvalidIdCode, InvalidParameter
from esteid.signing.types import InterimSessionData
from esteid.types import PredictableDict
from esteid.validators import id_code_ee_is_valid
| PHONE_NUMBER_REGEXP = settings.MOBILE_ID_PHONE_NUMBER_REGEXP
AuthenticateResult = namedtuple(
| "AuthenticateResult",
[
"session_id",
"hash_type",
"hash_value",
"verification_code",
"hash_value_b64",
],
)
AuthenticateStatusResult = namedtuple(
"AuthenticateStatusResult",
[
"certificate", # DER-encoded certificate
"certificate_b64", # Base64-encoded DER-encoded certificate
],
)
SignResult = namedtuple(
"SignResult",
[
"session_id",
"digest",
"verification_code",
],
)
# Note: MobileID doesn't return a certificate for SignStatus. It is set from a previous call to `/certificate`
SignStatusResult = namedtuple(
"SignStatusResult",
[
"signature",
"signature_algorithm",
"certificate",
],
)
class UserInput(PredictableDict):
phone_number: str
id_code: str
language: Optional[str]
def is_valid(self, raise_exception=True):
result = super().is_valid(raise_exception=raise_exception)
if result:
if not self.phone_number or PHONE_NUMBER_REGEXP and not re.match(PHONE_NUMBER_REGEXP, self.phone_number):
if not raise_exception:
return False
raise InvalidParameter(param="phone_number")
if not id_code_ee_is_valid(self.id_code):
if not raise_exception:
return False
raise InvalidIdCode
if not (self.get("language") and self.language in Languages.ALL):
self.language = settings.MOBILE_ID_DEFAULT_LANGUAGE
return result
class MobileIdSessionData(InterimSessionData):
session_id: str
|
comparemetrics/GoogleAppsAccountManager | src/GoogleAppsAccountManager/group/data.py | Python | gpl-2.0 | 1,308 | 0.005352 | # -*- coding: utf-8 -*-
#
# GoogleAppsAccountManager: group/data
# Copyright (C) 2012-2013 KAMEI Yutaka
#
# License: GNU General Public License version 2 or later
# Date: 2012-12-28, since 2012-12-28
#
from GoogleAppsAccountManager import XMLNS_ATOM, XMLNS_APPS
from GoogleAppsAccountManager import data, errors
class GroupFeed(data.Feed):
def __init__(self, status, xml_string):
data.Feed.__init__(self, status, xml_string, GroupEntry)
class GroupEntry(data.Entry):
def getGroupId(self):
group_id = self.getValueFromPropertyElement("groupId")
return group_id.encode("utf-8")
def getGroupName(self):
group_name = self.getValueFromPropertyElement("groupName")
return group_name.encode("utf-8")
def getGroupDescription(self):
group_description = self.getValueFromPropertyElement("description")
return group_description.encode("utf-8")
def setGroupName(self, new_name_utf8):
return self.setValueToPropertyElement(
"groupName"
, new_name_utf8.decode("utf-8")
)
def setGroupD | escription(self, new_description_utf8):
return self.setValueToPropertyElement(
"description"
, new_description_utf8.decod | e("utf-8")
)
|
wardi/ckanext-bcgov | ckanext/bcgov/plugin.py | Python | agpl-3.0 | 17,626 | 0.007886 | # Copyright 2015, Province of British Columbia
# License: https://github.com/bcgov/ckanext-bcgov/blob/master/license
from ckan.common import c, _
import pylons.config as config
import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
from routes.mapper import SubMapper
from ckanext.bcgov.util.util import (get_edc_tags,
edc_type_label,
get_state_values,
get_username,
get_user_orgs,
get_user_role_orgs,
get_user_orgs_id,
get_user_toporgs,
get_organization_branches,
get_all_orgs
)
from ckanext.bcgov.util.helpers import (get_suborg_sector,
get_user_dataset_num,
get_package_data,
is_license_open,
get_record_type_label,
get_suborgs,
record_is_viewable,
get_facets_selected,
get_facets_unselected,
get_sectors_list,
get_dataset_type,
get_organizations,
get_organization_title,
get_espg_id,
get_edc_org,
get_iso_topic_values,
get_eas_login_url,
get_fqdn,
get_environment_name,
get_version,
get_bcgov_commit_id,
resource_prefix,
)
class SchemaPlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IConfigurer)
plugins.implements(plugins.IRoutes, inherit=True)
plugins.implements(plugins.ITemplateHelpers, inherit=False)
plugins.implements(plugins.IPackageController, inherit=True)
plugins.implements(plugins.IFacets, inherit=True)
plugins.implements(plugins.IActions, inherit=True)
def get_helpers(self):
return {
"dataset_type" : get_dataset_type,
"edc_tags" : get_edc_tags,
"edc_orgs" : get_organizations,
"edc_org_branches" : get_organization_branches,
"edc_org_title" : get_organization_title,
"edc_type_label" : edc_type_label,
"edc_state_values" : get_state_values,
"edc_username": get_username,
"get_sector" : get_suborg_sector,
"get_user_orgs" : get_user_orgs,
"get_user_orgs_id" : get_user_orgs_id,
| "get_user_toporgs": get_user_toporgs,
"get_suborg_sector" : get_suborg_sector,
"get_user_dataset_num" : get_user_dataset_num,
"get_edc_package" : get_package_data,
"is_license_open" : is_license_open,
"record_type_label" : get_record_type_label,
"get_suborgs": get_suborgs,
"record_is_viewable": record_is_vi | ewable,
"get_espg_id" : get_espg_id,
"get_user_role_orgs" : get_user_role_orgs,
"get_all_orgs" : get_all_orgs,
"get_facets_selected": get_facets_selected,
"get_facets_unselected" : get_facets_unselected,
"get_sectors_list": get_sectors_list,
"get_edc_org" : get_edc_org,
"get_iso_topic_values" : get_iso_topic_values,
"get_eas_login_url": get_eas_login_url,
"get_fqdn": get_fqdn,
"get_environment_name": get_environment_name,
"get_version": get_version,
"get_bcgov_commit_id": get_bcgov_commit_id,
"googleanalytics_resource_prefix": resource_prefix,
}
def update_config(self, config):
toolkit.add_public_directory(config, 'public')
toolkit.add_template_directory(config, 'templates')
toolkit.add_resource('fanstatic', 'edc_resource')
toolkit.add_resource('public/scripts', 'theme_scripts')
#Customizing action mapping
def before_map(self, map):
from routes.mapper import SubMapper
package_controller = 'ckanext.bcgov.controllers.package:EDCPackageController'
user_controller = 'ckanext.bcgov.controllers.user:EDCUserController'
org_controller = 'ckanext.bcgov.controllers.organization:EDCOrganizationController'
site_map_controller = 'ckanext.bcgov.controllers.site_map:GsaSitemapController'
api_controller = 'ckanext.bcgov.controllers.api:EDCApiController'
# map.redirect('/', '/dataset')
map.connect('package_index', '/', controller=package_controller, action='index')
map.connect('/dataset/add', controller=package_controller, action='typeSelect')
with SubMapper(map, controller=package_controller) as m:
m.connect('add dataset', '/dataset/new', action='new')
#m.connect('dataset_edit', '/dataset/edit/{id}', action='edc_edit',ckan_icon='edit')
m.connect('search', '/dataset', action='search', highlight_actions='index search')
m.connect('dataset_read', '/dataset/{id}', action='read', ckan_icon='sitemap')
m.connect('duplicate', '/dataset/duplicate/{id}', action='duplicate')
m.connect('/dataset/{id}/resource/{resource_id}', action='resource_read')
m.connect('/dataset/{id}/resource_delete/{resource_id}', action='resource_delete')
m.connect('/authorization-error', action='auth_error')
m.connect('resource_edit', '/dataset/{id}/resource_edit/{resource_id}', action='resource_edit', ckan_icon='edit')
m.connect('new_resource', '/dataset/new_resource/{id}', action='new_resource')
with SubMapper(map, controller=user_controller) as m:
m.connect('user_dashboard_unpublished', '/dashboard/unpublished',
action='dashboard_unpublished', ckan_icon='group')
m.connect('/user/edit', action='edit')
m.connect('/user/activity/{id}/{offset}', action='activity')
m.connect('user_activity_stream', '/user/activity/{id}',
action='activity', ckan_icon='time')
m.connect('user_dashboard', '/dashboard', action='dashboard',
ckan_icon='list')
m.connect('user_dashboard_datasets', '/dashboard/datasets',
action='dashboard_datasets', ckan_icon='sitemap')
m.connect('user_dashboard_organizations', '/dashboard/organizations',
action='dashboard_organizations', ckan_icon='building')
m.connect('/dashboard/{offset}', action='dashboard')
m.connect('user_follow', '/user/follow/{id}', action='follow')
m.connect('/user/unfollow/{id}', action='unfollow')
m.connect('user_followers', '/user/followers/{id:.*}',
action='followers', ckan_icon='group')
m.connect('user_edit', '/user/edit/{id:.*}', action='edit',
ckan_icon='cog')
m.connect('user_delete', '/user/delete/{id}', action='delete')
m.connect('/user/reset/{id:.*}', action='perform_reset')
m.connect('register', '/user/register', action='register')
m.connect('login', '/user/login', action='login')
m.connect('/user/ |
francislpx/myblog | blog/urls.py | Python | gpl-3.0 | 423 | 0.002364 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^post/(?P<pk>[0-9]+)/$', views.detail, name='detail'),
ur | l(r'^archive/(?P<year>[0-9]{4})/(?P<month>[0-9]{1,2})/$', views.archives, name='archive'),
url(r'^category/(?P<pk>[0-9]+)/$', views.categories, name='category'),
url(r'^tag/(?P<pk>[0-9]+)/$', views.get_posts_by_tag, | name='tag'),
]
|
hlzz/dotfiles | graphics/VTK-7.0.0/Imaging/Core/Testing/Python/TestROIStencil.py | Python | bsd-3-clause | 3,470 | 0.010663 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# A script to test the vtkROIStencilSource
reader = vtk.vtkPNGReader()
reader.SetDataSpacing(0.8,0.8,1.5)
reader.SetDataOrigin(0.0,0.0,0.0)
reader.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/fullhead15.png")
shiftScale = vtk.vtkImageShiftScale()
shiftScale.SetInputConnection(reader.GetOutputPort())
shiftScale.SetScale(0.2)
shiftScale.Update()
roiStencil1 = vtk.vtkROIStencilSource()
roiStencil1.SetShapeToEllipsoid()
roiStencil1.SetBounds(20,300,80,150,0,0)
roiStencil1.SetInformationInput(reader.GetOutput())
roiStencil2 = vtk.vtkROIStencilSource()
roiStencil2.SetShapeToCylinderX()
roiStencil2.SetBounds(20,300,80,150,0,0)
roiStencil2.SetInformationInput(reader.GetOutput())
roiStencil3 = vtk.vtkROIStencilSource()
roiStencil3.SetShapeToCylinderZ()
roiStencil3.SetBounds(20,300,80,150,0,0)
roiStencil3.SetInformationInput(reader.GetOutput())
roiStencil4 = vtk.vtkROIStencilSource()
roiStencil4.SetShapeToBox()
roiStencil4.SetBounds(20,300,80,150,0,0)
roiStencil4.SetInformationInput(reader.GetOutput())
stencil1 = vtk.vtkImageStencil()
stencil1.SetInputConnection(reader.GetOutputPort())
stencil1.SetBackgroundInputData(shiftScale.GetOutput())
stencil1.SetStencilConnection(roiStencil1.GetOutputPort())
stencil2 = vtk.vtkImageStencil()
stencil2.SetInputConnection(reader.GetOutputPort())
stencil2.SetBackgroundInputData(shiftScale.GetOutput())
stencil2.SetStencilConnection(roiStencil2.GetOutputPort())
stencil3 = vtk.vtkImageStencil()
stencil3.SetInputConnection(reader.GetOutputPort())
stencil3.SetBackgroundInputData(shiftScale.GetOutput())
stencil3.SetStencilConnection(roiStencil3.GetOutputPort())
stencil4 = vtk.vtkImageStencil()
stencil4.SetInputConnection(reader.GetOutputPort())
stencil4.SetBackgroundInputData(shiftScale.GetOutput())
stencil4.SetStencilConnection(roiStencil4.GetOutputPort())
mapper1 = vtk.vtkImageMapper()
mapper1.SetInputConnection(stencil1.GetOutputPort())
mapper1.SetColorWindow(2000)
mapper1.SetColorLevel(1000)
mapper1.SetZSlice(0)
mapper2 = vtk.vtkImageMapper()
mapper2.SetInputConnection(stencil2.GetOutputPort())
mapper2.SetColorWindow(2000)
mapper2.SetColorLevel(1000)
mapper2.SetZSlice(0)
mapper3 = vtk.vtkImageMapper()
mapper3.SetInputConnection(stencil3.GetOutputPort())
mapper3.SetColorWindow(2000)
mapper3.SetColorLevel(1000)
mapper3.SetZSlice(0)
mapper4 = vtk.vtkImageMapper()
mapper4.SetInputConnection(stencil4.GetOutputPort())
mapper4.SetColorWindow(2000)
mapper4.SetColorLevel(1000)
mapper4.SetZSlice(0)
actor1 = vtk.vtkActor2D()
actor1.SetMapper(mapper1)
actor2 = vtk.vtkActor2D()
actor2.SetMapper(mapper2)
actor3 = vtk.vtkActor2D()
actor3.SetMapper(mapper3)
actor4 = vtk.vtkActor2D()
actor4.SetMapper(mapper4)
imager1 = vtk.vtkRenderer()
imager1.AddActor2D(actor1)
imager1.SetViewport(0.5,0.0,1.0,0.5)
imager2 = vtk.vtkRenderer()
imager2.AddActor2D(actor2)
imager2.SetViewport(0.0,0.0,0.5,0.5)
imager3 = vtk.vtkRenderer()
imager3.AddActor2D(actor3)
imager3.SetViewport(0.5,0.5,1.0,1.0)
imager4 = vtk.vtkRenderer()
imager4.AddActor2D(actor4)
imager4.SetViewport(0.0,0.5,0.5,1.0)
imgWin = vtk.vtkRenderWindow()
imgWin.AddRen | derer(imager1)
imgWin.AddRenderer(imager2)
imgWin.AddRenderer(imager3)
imgWin.AddRenderer(imager4)
imgWin.SetSize(512,512)
imgWin.Render()
# --- end of scri | pt --
|
ilikesounds/jt_portfolio | jt_portfolio/wsgi.py | Python | mit | 582 | 0.003436 | """
WSGI config for jt_portfolio project.
It exposes the WSGI callable | as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangopr | oject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jt_portfolio.settings")
application = get_wsgi_application()
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
prometx/shivuxbot | shivuxbot-scratch_pub.py | Python | mit | 725 | 0.005517 | #!/usr/bin/python3.5
import requests
import urllib.request
import urllib.parse
import json
import pprint
from bittrex import Bittrex
"""
MARKET_SET = {
'getopenorders',
'cancel',
'sellmarket',
'selllimit',
'buymarket',
'buylimit'
}
ACCOUNT_SET = {
'getbalances',
'getbalance',
'getdepositaddress',
| 'withdraw',
'getorderhistory',
'getorder',
'getdeposithistory',
'getwithdrawalhistory'
}
"""
api_key = '<bittrex api key>'
api_secret = '<bittrex api secret>'
#url = 'https://bittrex.com/api/v1.1/account/getbalances'
bittrex = Bittrex(api_key,api_secret)
call_result = call.api_query( | 'account','getopenbalances')
json_data = requests.get(call_result).json()
|
frappe/frappe | frappe/tests/test_base_document.py | Python | mit | 476 | 0.021008 | import unittest
from frappe.model.base_document import BaseDocument
class TestBaseDocument(unittest.TestCase):
def test_docstatus(self):
doc = BaseDocument({"docstatus": 0})
self.assertTrue(doc | .docstatus.is_draft())
self.assertEquals(doc.docstatus, 0)
doc.docstatus = 1
self.assertTrue(doc.docstatus.is_submitted())
self.assertEquals(doc.docstatus, 1)
| doc.docstatus = 2
self.assertTrue(doc.docstatus.is_cancelled())
self.assertEquals(doc.docstatus, 2)
|
gnulinooks/sympy | sympy/test_external/test_numpy.py | Python | bsd-3-clause | 7,620 | 0.015879 | # This testfile tests SymPy <-> NumPy compatibility
# Don't test any SymPy features here. Just pure interaction with NumPy.
# Always write regular SymPy tests for anything, that can be tested in pure
# Python (without numpy). Here we test everything, that a user may need when
# using SymPy with NumPy
try:
from numpy import array, matrix, ndarray
import numpy
except ImportError:
#py.test will not execute any tests now
disabled = True
from sympy import Rational, Symbol, list2numpy, sin, Real, Matrix, lambdify
import sympy
from sympy import mpmath
mpmath.mp.dps = 16
sin02 = mpmath.mpf("0.198669330795061215459412627")
# first, systematically check, that all operations are implemented and don't
# raise and exception
def test_systematic_basic():
def s(sympy_object, numpy_array):
x = sympy_object + numpy_array
x = numpy_array + sympy_object
x = sympy_object - numpy_array
x = numpy_array - sympy_object
x = sympy_object * numpy_array
x = numpy_array * sympy_object
x = sympy_object / numpy_array
x = numpy_array / sympy_object
x = sympy_object ** numpy_array
x = numpy_array ** sympy_object
x = Symbol("x")
y = Symbol("y")
sympy_objs = [
Rational(2),
Real("1.3"),
x,
y,
pow(x,y)*y,
5,
5.5,
]
numpy_objs = [
array([1]),
array([3, 8, -1]),
array([x, x**2, Rational(5)]),
array([x/y*sin(y), 5, Rational(5)]),
]
for x in sympy_objs:
for y in numpy_objs:
s(x,y)
# now some random tests, that test particular problems and that also
# check that the results of the operations are correct
def test_basics():
one = Rational(1)
zero = Rational(0)
x = Symbol("x")
assert array(1) == array(one)
assert array([one]) == array([one])
assert array([x]) == array([x])
assert array(x) == array(Symbol("x"))
assert array(one+x) == array(1+x)
X = array([one, zero, zero])
assert (X == array([one, zero, zero])).all()
assert (X == array([one, 0, 0])).all()
def test_arrays():
one = Rational(1)
zero = Rational(0)
X = array([one, zero, zero])
Y = one*X
X = array([Symbol("a")+Rational(1,2)])
Y = X+X
assert Y == array([1+2*Symbol("a")])
Y = Y + 1
assert Y == array([2+2*Symbol("a")])
Y = X-X
assert Y == array([0])
def test_conversion1():
x = Symbol("x")
a = list2numpy([x**2, x])
#looks like an array?
assert isinstance(a, ndarray)
assert a[0] == x**2
assert a[1] == x
assert len(a) == 2
#yes, it's the array
def test_conversion2():
x = Symbol("x")
a = 2*list2numpy([x**2, x])
b = list2numpy([2*x**2, 2*x])
assert (a == b).all()
one = Rational(1)
zero = Rational(0)
X = list2numpy([one, zero, zero])
Y = one*X
X = list2numpy([Symbol("a")+Rational(1,2)])
Y = X+X
assert Y == array([1+2*Symbol("a")])
Y = Y + 1
assert Y == array([2+2*Symbol("a")])
Y = X-X
assert Y == array([0])
def test_list2numpy():
x = Symbol("x")
assert (array([x**2, x]) == list2numpy([x**2, x])).all()
def test_Matrix1():
x = Symbol("x")
m = Matrix([[x, x**2], [5, 2/x]])
assert (array(m.subs(x, 2)) == array([[2, 4],[5, 1]])).all()
m = Matrix([[sin(x), x**2], [5, 2/x]])
assert (array(m.subs(x, 2)) == array([[sin(2), 4],[5, 1]])).all()
def test_Matrix2():
x = Symbol("x")
m = Matrix([[x, x**2], [5, 2/x]])
assert (matrix(m.subs(x, 2)) == matrix([[2, 4],[5, 1]])).all()
m = Matrix([[sin(x), x**2], [5, 2/x]])
assert (matrix(m.subs(x, 2)) == matrix([[sin(2), 4],[5, 1]])).all()
def test_Matrix3():
x = Symbol("x")
a = array([[2, 4],[5, 1]])
assert Matrix(a) == Matrix([[2, 4], [5, 1]])
assert Matrix(a) != Matrix([[2, 4], [5, 2]])
a = array([[sin(2), 4], [5, 1]])
assert Matrix(a) == Matrix([[sin(2), 4],[5, 1]])
assert Matrix(a) != Matrix([[sin(0), 4],[5, 1]])
def test_Matrix4():
x = Symbol("x")
a = matrix([[2, 4],[5, 1]])
assert Matrix(a) == Matrix([[2, 4], [5, 1]])
a | ssert Matrix(a) != Matrix([[2, 4], [5, 2]])
a = matrix([[sin(2), 4], [5, 1]])
assert Matrix(a) == Matrix([[sin(2), 4],[5, 1]])
assert Matrix(a) != Matrix([[sin(0), 4],[5, 1]])
def test_Matrix_sum():
x, y, z = Symbol('x'), Symbol('y'), Symbo | l('z')
M = Matrix([[1,2,3],[x,y,x],[2*y,-50,z*x]])
m = matrix([[2,3,4],[x,5,6],[x,y,z**2]])
assert M+m == Matrix([[3,5,7],[2*x,y+5,x+6],[2*y+x,y-50,z*x+z**2]])
assert m+M == Matrix([[3,5,7],[2*x,y+5,x+6],[2*y+x,y-50,z*x+z**2]])
assert M+m == M.add(m)
def test_Matrix_mul():
x, y, z = Symbol('x'), Symbol('y'), Symbol('z')
M = Matrix([[1,2,3],[x,y,x]])
m = matrix([[2,4],[x,6],[x,z**2]])
assert M*m == Matrix([
[ 2 + 5*x, 16 + 3*z**2],
[2*x + x*y + x**2, 4*x + 6*y + x*z**2],
])
assert m*M == Matrix([
[ 2 + 4*x, 4 + 4*y, 6 + 4*x],
[ 7*x, 2*x + 6*y, 9*x],
[x + x*z**2, 2*x + y*z**2, 3*x + x*z**2],
])
def test_Matrix_array():
class matarray(object):
def __array__(self):
from numpy import array
return array([[1,2,3],[4,5,6],[7,8,9]])
matarr = matarray()
assert Matrix(matarr) == Matrix([[1,2,3],[4,5,6],[7,8,9]])
def test_issue629():
x = Symbol("x")
assert (Rational(1,2)*array([2*x, 0]) == array([x, 0])).all()
assert (Rational(1,2)+array([2*x, 0]) == array([2*x+Rational(1,2), Rational(1,2)])).all()
assert (Real("0.5")*array([2*x, 0]) == array([Real("1.0")*x, 0])).all()
assert (Real("0.5")+array([2*x, 0]) == array([2*x+Real("0.5"), Real("0.5")])).all()
def test_lambdify():
x = Symbol("x")
f = lambdify(x, sin(x), "numpy")
prec = 1e-15
assert -prec < f(0.2) - sin02 < prec
try:
f(x) # if this succeeds, it can't be a numpy function
raise Exception
except AttributeError:
pass
def test_lambdify_matrix():
x = Symbol("x")
f = lambdify(x, Matrix([[x, 2*x],[1, 2]]), "numpy")
assert (f(1) == matrix([[1,2],[1,2]])).all()
def test_lambdify_matrix_multi_input():
x,y,z=sympy.symbols('x,y,z')
M=sympy.Matrix([[x**2, x*y, x*z],
[y*x, y**2, y*z],
[z*x, z*y, z**2]])
f = lambdify((x,y,z), M, "numpy")
xh,yh,zh = 1.0, 2.0, 3.0
expected = matrix([[xh**2, xh*yh, xh*zh],
[yh*xh, yh**2, yh*zh],
[zh*xh, zh*yh, zh**2]])
actual = f(xh,yh,zh)
assert numpy.allclose(actual,expected)
def test_lambdify_matrix_vec_input():
X=sympy.DeferredVector('X')
M=Matrix([[X[0]**2, X[0]*X[1], X[0]*X[2]],
[X[1]*X[0], X[1]**2, X[1]*X[2]],
[X[2]*X[0], X[2]*X[1], X[2]**2]])
f = lambdify(X, M, "numpy")
Xh = array([1.0, 2.0, 3.0])
expected = matrix([[Xh[0]**2, Xh[0]*Xh[1], Xh[0]*Xh[2]],
[Xh[1]*Xh[0], Xh[1]**2, Xh[1]*Xh[2]],
[Xh[2]*Xh[0], Xh[2]*Xh[1], Xh[2]**2]])
actual = f(Xh)
assert numpy.allclose(actual,expected)
def test_lambdify_transl():
from sympy.utilities.lambdify import NUMPY_TRANSLATIONS
for sym, mat in NUMPY_TRANSLATIONS.iteritems():
assert sym in sympy.functions.__dict__ or sym in ("Matrix", )
assert mat in numpy.__dict__
|
fedora-copr/dnf-plugins-core | plugins/repoclosure.py | Python | gpl-2.0 | 5,281 | 0.000568 | # repoclosure.py
# DNF plugin adding a command to display a list of unresolved dependencies
# for repositories.
#
# Copyright (C) 2015 Igor Gnatenko
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _
import dnf.cli
class RepoClosure(dnf.Plugin):
name = "repoclosure"
def __init__(self, base, cli):
super(RepoClosure, self).__init__(base, cli)
if cli is None:
return
cli.register_command(RepoClosureCommand)
class RepoClosureCommand(dnf.cli.Command):
aliases = ("repoclosure",)
summary = _("Display a list of unresolved dependencies for repositories")
def configure(self):
demands = self.cli.demands
demands.sack_activation = True
demands.available_repos = True
if self.opts.repo:
for repo in self.base.repos.all():
if repo.id not in self.opts.repo and repo.id not in self.opts.check:
repo.disable()
else:
repo.enable()
def run(self):
if self.opts.arches:
unresolved = self._get_unresolved(self.opts.arches)
else:
unresolved = self._get_unresolved()
for pkg in sorted(unresolved.keys()):
print("package: {} from {}".format(str(pkg), pkg.reponame))
print(" unresolved deps:")
for dep in unresolved[pkg]:
print(" {}".format(dep))
if len(unresolved) > 0:
msg = _("Repoclosure ended with unresolved dependencies.")
raise dnf.exceptions.Error(msg)
def _get | _unresolved(self, arch=None):
unresolved = {}
deps = set()
available = self.base.sack.query().available()
if self.base.conf.best and not self.opts.check:
available = available.latest()
| elif self.opts.newest or self.base.conf.best:
available = available.filter(latest=True)
if arch is not None:
available = available.filter(arch=arch)
pkgs = set()
if self.opts.pkglist:
available.apply()
for pkg in self.opts.pkglist:
for pkgs_filtered in available.filter(name=pkg):
pkgs.add(pkgs_filtered)
else:
for pkgs_filtered in available:
pkgs.add(pkgs_filtered)
if self.opts.check:
checkpkgs = set()
available.apply()
for repo in self.opts.check:
for pkgs_filtered in available.filter(reponame=repo):
checkpkgs.add(pkgs_filtered)
pkgs.intersection_update(checkpkgs)
# --best not applied earlier due to --check, so do it now
if self.base.conf.best:
available = available.latest()
for pkg in pkgs:
unresolved[pkg] = set()
for req in pkg.requires:
reqname = str(req)
# XXX: https://bugzilla.redhat.com/show_bug.cgi?id=1186721
if reqname.startswith("solvable:") or \
reqname.startswith("rpmlib("):
continue
deps.add(req)
unresolved[pkg].add(req)
available.apply()
unresolved_deps = set(x for x in deps if not available.filter(provides=x))
unresolved_transition = {k: set(x for x in v if x in unresolved_deps)
for k, v in unresolved.items()}
return {k: v for k, v in unresolved_transition.items() if v}
@staticmethod
def set_argparser(parser):
parser.add_argument("--arch", default=[], action="append", dest='arches',
help=_("check packages of the given archs, can be "
"specified multiple times"))
parser.add_argument("--check", default=[], action="append",
help=_("Specify repositories to check"))
parser.add_argument("-n", "--newest", action="store_true",
help=_("Check only the newest packages in the "
"repos"))
parser.add_argument("--pkg", default=[], action="append",
help=_("Check closure for this package only"),
dest="pkglist")
|
LangmuirSim/langmuir | LangmuirPython/ga2d/generators/iso.py | Python | gpl-2.0 | 2,422 | 0.00289 | # -*- coding: utf-8 -*-
"""
iso.py
======
.. argparse::
:module: iso
:func: create_parser
:prog: iso.py
.. moduleauthor:: Geoff Hutchison <geoffh@pitt.edu>
"""
import argparse
import os
from scipy import misc, ndimage
import numpy as np
desc = """
Generate isotropic two-phase 2D systems using Gaussian noise
"""
def create_parser():
parser = argparse. | ArgumentParser()
parser.description = desc
parser.add_argument(dest='width', default=256, type=int, metavar='dim.x',
nargs='?', help='dim.x')
parser.add_argument(dest='height', default=256, type=int, metavar='dim.y',
nargs='?', help='dim.y')
parser.add_argument(dest='radius', default=3, type=int, metavar='sigma',
| nargs='?', help='radius of Gaussian blur')
parser.add_argument('--seed', default=None, type=int, metavar='int',
help='random number seed')
parser.add_argument(dest='ofile', default=None, type=str, nargs='?',
metavar='output', help='output file')
return parser
def get_arguments(args=None):
parser = create_parser()
opts = parser.parse_args(args)
return opts
def makeIsotropic(width, height, radius=4):
"""
Generate a 2D isotropic two-phase system using Gaussian random noise.
Before thresholding, the random noise is convoluted using a Gaussian blur
kernel of sigma "radius". The resulting data will have a 50:50 mixture as
a binary array (0, 1)
:param width: width of the resulting data
:param height: height of the resulting data
:param radius: size of the Gaussian blur kernel
:type width: int
:type height: int
:type radius: float
:return: morphology data
:rtype: :py:class:`numpy.ndarray`
"""
# floating point between 0.0 - 1.0
noise = np.random.random( (width, height) )
# blur (vectorized code from ndimage)
scaled = ndimage.gaussian_filter(noise, sigma=radius)
# threshold, since the resulting blurred data may not have 50:50 mix
return scaled > scaled.mean()
if __name__ == '__main__':
work = os.getcwd()
opts = get_arguments()
if opts.seed:
np.random.seed(seed=opts.seed)
if opts.ofile is None:
opts.ofile = "iso-s%d-%d.png" % (opts.seed, opts.radius)
output = makeIsotropic(opts.width, opts.height, opts.radius)
misc.imsave(opts.ofile, output)
|
llvtt/mongo-python-driver | tools/fail_if_no_c.py | Python | apache-2.0 | 842 | 0.002375 | # Copyright 2009-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of th | e License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fail if the C extension module doesn't exist.
Only really intended to be used by internal | build scripts.
"""
import sys
sys.path[0:0] = [""]
import bson
import pymongo
if not pymongo.has_c() or not bson.has_c():
sys.exit("could not load C extensions")
|
phantomas1234/fbaproject | ifba/GlpkWrap/knockouts.py | Python | mit | 1,226 | 0.00571 | #!/usr/bin/env python
# encoding: utf-8
"""
knockouts.py
Created by Nikolaus Sonnenschein on 2008-02-25.
Copyright (c) 2008 Jacobs University of Bremen. All rights reserved.
"""
from metabolism import Metabolism
from util import ImportCplex
from ifba.glpki.glpki import *
class KnockOut(object):
"""A class putting the necessary functionality for GeneKnockOuts to
the Metabolism class."""
def __init__(self, lp):
# super(KnockOut, self).__init__(lp)
self.lp = lp
self.lp.smcp.presolve = GLP_OFF
def knockOut(self, gene):
"""Knocks out a gene."""
self.lp.modifyColumnBounds({gene: (0., 0.)})
| def knockOuts(self, listOfGenes):
"""Knocks out a list of genes."""
for gene in listOfGenes:
self.knockOut(gene)
if __name__ == '__main__':
def init(path):
struct = ImportCplex(path)
return Metabolism(struct)
def main():
import util
ecoli = init('test_data/model.lp')
ecoli.simplex()
print ecoli.getObjVal()
KnockOut(ecoli).knockOuts(['R("R_PGK")', 'R("R_PGK_Rev")'])
ecoli.simpl | ex()
print ecoli.getObjVal()
main() |
jedman/dedalus-leakylid | dedalus_boussinesq.py | Python | gpl-2.0 | 12,959 | 0.027317 | import numpy as np
import matplotlib.pyplot as plt
import h5py
from dedalus import public as de
from dedalus.extras import flow_tools
import time
import argparse
plt.rcParams['image.cmap'] = 'RdBu_r'
parser = argparse.ArgumentParser(description='simulate a Boussinesq pulse')
parser.add_argument('k', metavar = 'k', type = int, help='forcing wavenumber in the horizontal')
parser.add_argument('m', metavar = 'm', type = int, help='forcing wavenumber in the vertical')
parser.add_argument('eps', metavar = 'eps', type = float, help='epsilon, the ratio of buoyancy frequency in troposphere and stratosphere')
parser.add_argument('-nh','--non-hstat', dest='hstat', action='store_false')
parser.add_argument('-p','--pulse', dest='pulse', action='store_true')
parser.add_argument('-pl', '--pulse-len', dest = 'pulse_len' , type = float)
parser.set_defaults(pulse_len=100)
parser.set_defaults(hstat=True)
parser.set_defaults(pulse=False)
args = parser.parse_args()
PULSE = args.pulse
HYDROSTATIC = args.hstat
print('pulse_len is ', args.pulse_len)
if HYDROSTATIC == True:
print('using hydrostatic boussinesq solver')
else:
print('using non-hydrostatic boussinesq solver')
if PULSE == True:
print('solving for gaussian forcing')
else:
print('solving for cosine forcing (single k)')
import logging
root = logging.root
for h in root.handlers:
h.setLevel("INFO")
logger = logging.getLogger(__name__)
Lx, Lz = (2000000, 10000) # domain size in meters
nx, nz = (144, 256) # number of points in each direction
#Lx, Lz = (4000000, 10000) # domain size in meters
#nx, nz = (4*64, 144) # number of points in each direction
# parameters (some of these should be set via command line args)
stop_time = 20000. # simulation stop time (seconds)
pulse_len = args.pulse_len # seconds of forcing
N1 = 0.01 # buoyancy frequency in the troposphere (1/s)
eps = args.eps # ratio of N1/N2
N2 = N1/eps # buoyancy frequency in the stratosphere
m = args.m # vertical mode number
k = args.k # horizontal mode number
model_top = 8. * Lz # lid height
if eps < 0.4:
model_top = 4. * Lz # increases resolution near the jump
sim_name = 'k'+ str(k) +'m' + str(m)
print('simulation name is', sim_name)
print('effective forcing horizontal wavelength is' , 2.*Lx/k/1000., 'kilometers')
print('effective forcing vertical wavelength is' , 2.*Lz/m/1000., 'kilometers')
print('stratification ratio N1/N2 is' , N1/N2 )
lambda_x = 2.*Lx/k # for defining width of pulse
# Create bases and domain
x_basis = de.Fourier('x', nx, interval=(-Lx/2., Lx/2.), dealias = 3/2)
# compound z basis -- better to resolve jump condition?
#zb1 = de.Chebyshev('z1',int(nz/4), interval=(0, Lz+1000), dealias=3/2)
#zb2 = de.Chebyshev('z2', nz, interval=(Lz+1000,model_top), dealias = 3/2)
#z_basis = de.Compound('z',(zb1,zb2), dealias = 3/2)
#
z_basis = de.Chebyshev('z', nz, interval= (0, model_top), dealias = 3/2)
domain = de | .Domai | n([x_basis, z_basis], grid_dtype=np.float64)
x, z = domain.grids(scales=1)
xd, zd = domain.grids(scales=domain.dealias)
# set up problem
problem = de.IVP(domain, variables=['p','u','B','w'])
problem.parameters['rho'] = 1. #kg/m^3
#problem.parameters['Nsq'] = 0.0001 #1/s; constant Nsq
# non-constant coefficient N^2
ncc = domain.new_field(name='Nsq')
ncc['g'] = N1**2
strat = np.where( z > Lz)
ncc['g'][:,strat] = N2**2
ncc.meta['x']['constant'] = True
problem.parameters['Nsq'] = ncc
# mask (for analysis)
mask = domain.new_field(name = 'mask')
mask['g'] = 1
mask['g'][:,strat] = 0
mask.meta['x']['constant'] = True
problem.parameters['mask'] = mask
# experimental source term
# following Daniel's 12/24/14 explanation in the forums
# https://groups.google.com/forum/#!topic/dedalus-users/BqTjYZzqHHw
if PULSE == True:
def forcing(solver):
# if using dealiasing, it's important to apply the forcing on the dealiased doman (xd,zd)
if solver.sim_time < pulse_len:
# f = 0.0001*np.sin(m * np.pi*zd/Lz)*np.exp(-16.*(xd*xd)/((lambda_x)**2)) #pulse with "effective wavelength" lambda_x
f = 0.0001*np.sin(m * np.pi*zd/Lz)*np.exp(-16.*(xd*xd)/((lambda_x)**2)) + 0.0001*np.sin(2.*m * np.pi*zd/Lz)*np.exp(-16.*(xd*xd)/((lambda_x)**2))
strat = np.where(zd>Lz)
f[:,strat] = 0.
# subtract the horizontal mean at each level so there's no k=0
fprof = np.mean(f, axis = 0 )
ftmp = np.repeat(fprof, xd.shape[0])
fmask = ftmp.reshape(zd.shape[1],xd.shape[0])
f = f - fmask.T
else:
f = 0.
return f
else:
def forcing(solver):
# if using dealiasing, it's important to apply the forcing on the dealiased doman (xd,zd)
if solver.sim_time < pulse_len:
#f = 0.001*np.sin(np.pi*zd/Lz)*np.exp(-16.*(xd*xd)/((lambda_x)**2)) #pulse with "effective wavelength" lambda_x
f = 0.001*np.sin(m * np.pi*zd/Lz)*np.cos(k* np.pi* xd /Lx) # cosine wave
strat = np.where(zd>Lz)
f[:,strat] = 0.
else:
f = 0.
return f
forcing_func = de.operators.GeneralFunction(domain,'g',forcing, args=[])
forcing_func.build_metadata()
#forcing_func.meta = ncc.meta # just tricking it for now, this metadata is wrong
# let's make a general parameter and use that metadata instead
dummy = domain.new_field(name='dum')
dummy['g'] = 1.
forcing_func.meta = dummy.meta
problem.parameters['forcing_func'] = forcing_func
# need to add 'meta' attribute for General Function class
# otherwise system fails consistency check
# system to solve (2D, linearized, hydrostatic boussinesq)
problem.add_equation("dt(u) + 1/rho*dx(p) = 0")
problem.add_equation("dt(B) + Nsq*w = forcing_func")
problem.add_equation("dx(u) + dz(w) = 0")
if HYDROSTATIC == True:
problem.add_equation("B - 1/rho*dz(p) = 0")
else:
problem.add_equation("B - 1/rho*dz(p) - dt(w) = 0")
# fourier direction has periodic bc, chebyshev has a lid
problem.add_bc("left(w) = 0") # refers to the first end point in chebyshev direction
problem.add_bc("right(w) = 0", condition="(nx != 0)") # rigid lid, condition note for k = 0 mode
problem.add_bc("integ(p,'z') = 0", condition="(nx == 0)") # pressure gauge condition for k = 0
# build solver
ts = de.timesteppers.RK443 # arbitrary choice of time stepper
solver = problem.build_solver(ts)
# tell the forcing function what its arg is (clunky)
forcing_func.args = [solver]
forcing_func.original_args = [solver]
# initial conditions
x, z = domain.grids(scales=1)
u = solver.state['u']
w = solver.state['w']
p = solver.state['p']
B = solver.state['B'] # zero for everything
solver.stop_sim_time = stop_time
solver.stop_wall_time = np.inf
solver.stop_iteration = np.inf
# CFL conditions
initial_dt = 0.8*Lz/nz
cfl = flow_tools.CFL(solver,initial_dt,safety=0.8, max_change=1.5, min_change=0.5, max_dt=400)
# too large of a timestep makes things rather diffusive
cfl.add_velocities(('u','w'))
# analysis
# fields to record
analysis = solver.evaluator.add_file_handler(sim_name, sim_dt=50, max_writes=300)
analysis.add_task('B', name = 'buoyancy' )
analysis.add_task('w', name = 'vertical velocity')
analysis.add_task('u', name = 'horizontal velocity')
analysis.add_task('p', name = 'pressure')
analysis.add_task('Nsq')
analysis.add_task('0.5*(w*w + u*u * B*B/Nsq)', name = 'total e snap')
analysis.add_task('0.5*mask*(w*w + u*u * B*B/Nsq)', layout = 'c', name = 'total e coeffs')
analysis.add_task('-dx(u)', name = 'convergence')
# profiles
analysis.add_task("integ(0.5 * B*B/Nsq, 'x')", name='pe profile')
analysis.add_task("integ(0.5 * (u*u + w*w) , 'x')", name='ke profile')
analysis.add_task("integ(0.5 * (u*u + w*w + B*B/Nsq ),'x')", name='total e profile')
analysis.add_task("integ(B,'x')", name = 'b profile')
analysis.add_task("integ(B * mask,'x')", name = 'mask test')
# 1d fields
analysis.add_task('mask')
analysis.add_task("integ(B * mask)", name = 'tropo b')
analysis.add_task("integ(0.5 * mask *(u*u + w*w + B*B/Nsq ))", name = 'tropo energy') # use mask to integrate over troposphere only
analysis.add_task("integ(0.5 * (u*u + w*w + B*B/Nsq ))", name='total e')
try:
logger.info( |
mdl29/tidutyzef | serveur/adminws.py | Python | lgpl-3.0 | 2,061 | 0.001941 | """Contain the socket handler for players"""
from game import Game
from websocket import WebSocketHandler
from zone import Zone
import errcode
class AdminWs(WebSocketHandler):
"""The socket handler for websocket"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.callable_from_j | son = {"setParams": self.set_params,
"login": self.login,
"logout": self.logout,
"getParams": self.get_params,
"startGame": sel | f.start_game}
def open(self, *args, **kwargs):
super().open(*args, **kwargs)
self.login()
@staticmethod
def start_game():
"""start the game"""
Game().start_game()
@staticmethod
def set_params(**kwargs):
"""set params of the game"""
params = Game().params
map_ = kwargs.get('map', None)
if map_:
params.map_center = (map_['lat'], map_['lng'])
zones = kwargs.get('zones', [])
for zone in zones:
Game().create_zone(zone['team'], tuple(zone['pos']), zone['radius'], zone['id'], Zone)
timeout = kwargs.get('time')
params.game_timeout = timeout
def get_params(self):
"""send to admin all params"""
pass
def login(self):
"""Login player and look if username and team are valids"""
if Game().admin:
self.send(errcode.USERNAME_ALREADY_SET)
self.close()
else:
Game().admin = self
self.logged = True
def logout(self):
"""logout player and remove it from game"""
self.close()
def on_close(self):
print("Admin is exiting...")
self.logged = False
Game().admin = None
def send(self, msg):
super().send(msg)
print('Send to Admin : {}'.format(msg))
def on_message(self, msg):
print('Send by Admin : {}'.format(msg))
super().on_message(msg)
|
wkschwartz/django | tests/gis_tests/geos_tests/test_mutable_list.py | Python | bsd-3-clause | 14,848 | 0.000673 | # Copyright (c) 2008-2009 Aryeh Leib Taurog, http://www.aryehleib.com
# All rights reserved.
#
# Modified from original contribution by Aryeh Leib Taurog, which was
# released under the New BSD license.
import unittest
from django.contrib.gis.geos.mutable_list import ListMixin
class UserListA(ListMixin):
_mytype = tuple
def __init__(self, i_list, *args, **kwargs):
self._list = self._mytype(i_list)
super().__init__(*args, **kwargs)
def __len__(self):
return len(self._list)
def __str__(self):
return str(self._list)
def __repr__(self):
return repr(self._list)
def _set_list(self, length, items):
# this would work:
# self._list = self._mytype(items)
# but then we wouldn't be testing length parameter
itemList = [' | x'] * length
for i, v in enumerate(items):
itemList[i] = v
self._list = self._mytype(itemList)
def _get_single_external(self, index):
return self._list[index]
class UserListB(UserListA):
_mytyp | e = list
def _set_single(self, index, value):
self._list[index] = value
def nextRange(length):
nextRange.start += 100
return range(nextRange.start, nextRange.start + length)
nextRange.start = 0
class ListMixinTest(unittest.TestCase):
"""
Tests base class ListMixin by comparing a list clone which is
a ListMixin subclass with a real Python list.
"""
limit = 3
listType = UserListA
def lists_of_len(self, length=None):
if length is None:
length = self.limit
pl = list(range(length))
return pl, self.listType(pl)
def limits_plus(self, b):
return range(-self.limit - b, self.limit + b)
def step_range(self):
return [*range(-1 - self.limit, 0), *range(1, 1 + self.limit)]
def test01_getslice(self):
'Slice retrieval'
pl, ul = self.lists_of_len()
for i in self.limits_plus(1):
self.assertEqual(pl[i:], ul[i:], 'slice [%d:]' % (i))
self.assertEqual(pl[:i], ul[:i], 'slice [:%d]' % (i))
for j in self.limits_plus(1):
self.assertEqual(pl[i:j], ul[i:j], 'slice [%d:%d]' % (i, j))
for k in self.step_range():
self.assertEqual(pl[i:j:k], ul[i:j:k], 'slice [%d:%d:%d]' % (i, j, k))
for k in self.step_range():
self.assertEqual(pl[i::k], ul[i::k], 'slice [%d::%d]' % (i, k))
self.assertEqual(pl[:i:k], ul[:i:k], 'slice [:%d:%d]' % (i, k))
for k in self.step_range():
self.assertEqual(pl[::k], ul[::k], 'slice [::%d]' % (k))
def test02_setslice(self):
'Slice assignment'
def setfcn(x, i, j, k, L):
x[i:j:k] = range(L)
pl, ul = self.lists_of_len()
for slen in range(self.limit + 1):
ssl = nextRange(slen)
ul[:] = ssl
pl[:] = ssl
self.assertEqual(pl, ul[:], 'set slice [:]')
for i in self.limits_plus(1):
ssl = nextRange(slen)
ul[i:] = ssl
pl[i:] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:]' % (i))
ssl = nextRange(slen)
ul[:i] = ssl
pl[:i] = ssl
self.assertEqual(pl, ul[:], 'set slice [:%d]' % (i))
for j in self.limits_plus(1):
ssl = nextRange(slen)
ul[i:j] = ssl
pl[i:j] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:%d]' % (i, j))
for k in self.step_range():
ssl = nextRange(len(ul[i:j:k]))
ul[i:j:k] = ssl
pl[i:j:k] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:%d:%d]' % (i, j, k))
sliceLen = len(ul[i:j:k])
with self.assertRaises(ValueError):
setfcn(ul, i, j, k, sliceLen + 1)
if sliceLen > 2:
with self.assertRaises(ValueError):
setfcn(ul, i, j, k, sliceLen - 1)
for k in self.step_range():
ssl = nextRange(len(ul[i::k]))
ul[i::k] = ssl
pl[i::k] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d::%d]' % (i, k))
ssl = nextRange(len(ul[:i:k]))
ul[:i:k] = ssl
pl[:i:k] = ssl
self.assertEqual(pl, ul[:], 'set slice [:%d:%d]' % (i, k))
for k in self.step_range():
ssl = nextRange(len(ul[::k]))
ul[::k] = ssl
pl[::k] = ssl
self.assertEqual(pl, ul[:], 'set slice [::%d]' % (k))
def test03_delslice(self):
'Delete slice'
for Len in range(self.limit):
pl, ul = self.lists_of_len(Len)
del pl[:]
del ul[:]
self.assertEqual(pl[:], ul[:], 'del slice [:]')
for i in range(-Len - 1, Len + 1):
pl, ul = self.lists_of_len(Len)
del pl[i:]
del ul[i:]
self.assertEqual(pl[:], ul[:], 'del slice [%d:]' % (i))
pl, ul = self.lists_of_len(Len)
del pl[:i]
del ul[:i]
self.assertEqual(pl[:], ul[:], 'del slice [:%d]' % (i))
for j in range(-Len - 1, Len + 1):
pl, ul = self.lists_of_len(Len)
del pl[i:j]
del ul[i:j]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d]' % (i, j))
for k in [*range(-Len - 1, 0), *range(1, Len)]:
pl, ul = self.lists_of_len(Len)
del pl[i:j:k]
del ul[i:j:k]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d:%d]' % (i, j, k))
for k in [*range(-Len - 1, 0), *range(1, Len)]:
pl, ul = self.lists_of_len(Len)
del pl[:i:k]
del ul[:i:k]
self.assertEqual(pl[:], ul[:], 'del slice [:%d:%d]' % (i, k))
pl, ul = self.lists_of_len(Len)
del pl[i::k]
del ul[i::k]
self.assertEqual(pl[:], ul[:], 'del slice [%d::%d]' % (i, k))
for k in [*range(-Len - 1, 0), *range(1, Len)]:
pl, ul = self.lists_of_len(Len)
del pl[::k]
del ul[::k]
self.assertEqual(pl[:], ul[:], 'del slice [::%d]' % (k))
def test04_get_set_del_single(self):
'Get/set/delete single item'
pl, ul = self.lists_of_len()
for i in self.limits_plus(0):
self.assertEqual(pl[i], ul[i], 'get single item [%d]' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
pl[i] = 100
ul[i] = 100
self.assertEqual(pl[:], ul[:], 'set single item [%d]' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
del pl[i]
del ul[i]
self.assertEqual(pl[:], ul[:], 'del single item [%d]' % i)
def test05_out_of_range_exceptions(self):
'Out of range exceptions'
def setfcn(x, i):
x[i] = 20
def getfcn(x, i):
return x[i]
def delfcn(x, i):
del x[i]
pl, ul = self.lists_of_len()
for i in (-1 - self.limit, self.limit):
with self.assertRaises(IndexError): # 'set index %d' % i)
setfcn(ul, i)
with self.assertRaises(IndexError): # 'get index %d' % i)
getfcn(ul, i)
with self.assertRaises(IndexError): # 'del index %d' % i)
delfcn(ul, i)
def test06_list_methods(self):
'List methods'
pl, ul = self.lists_of_len()
pl.append(40)
ul.append(40)
self.ass |
rlutz/xorn | tests/cpython/storage/ob_equality.py | Python | gpl-2.0 | 1,119 | 0 | # Copyright (C) 2013-2021 Roland Lutz
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import xorn.storag | e
rev = xorn.storage.Revision()
ob0 = rev.add_object(xorn.storage.Line())
ob1, | = rev.get_objects()
ob2 = rev.add_object(xorn.storage.Line())
assert ob0 is not ob1
assert ob0 == ob1
assert hash(ob0) == hash(ob1)
assert ob0 is not ob2
assert ob0 != ob2
assert hash(ob0) != hash(ob2)
assert ob1 is not ob2
assert ob1 != ob2
assert hash(ob1) != hash(ob2)
|
diego-d5000/MisValesMd | env/lib/python2.7/site-packages/django/contrib/gis/geos/prototypes/coordseq.py | Python | mit | 3,266 | 0.001225 | from ctypes import POINTER, c_double, c_int, c_uint
from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR
from django.contrib.gis.geos.prototypes.errcheck import (
GEOSException, last_arg_byref,
)
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
# ## Error-checking routines specific to coordinate sequences. ##
def check_cs_ptr(result, func, cargs):
"Error checking on routines that return Geometries."
if not result:
raise GEOSException(
'Error encountered checking Coordinate Sequence returned from GEOS '
'C function "%s".' % func.__name__
)
return result
def check_cs_op(result, func, cargs):
"Checks the status code of a coordinate sequence operation."
if result == 0:
raise GEOSException('Could not set value on coordinate sequence')
else:
return result
def check_cs_get(result, func, cargs):
"Checking the coordinate sequence retrieval."
check_cs_op(result, func, cargs)
# Object in by reference, return its value.
return last_arg_byref(cargs)
# ## Coordinate sequence prototype generation functions. ##
def cs_int(func):
"For coordinate sequence routines that return an integer."
func.argtypes = [CS_PTR, POINTER(c_uint)]
func.restype = c_int
func.errcheck = check_cs_get
return func
def cs_operation(func, ordinate=False, get=False):
"For coordinate sequence operations."
if get:
# Get routines get double parameter passed-in by reference.
func.errcheck = check_cs_get
dbl_param = POINTER(c_double)
else:
func.errcheck = check_cs_op
dbl_param = c_double
if ordinate:
# Get/Set ordinate routines have an extra uint parameter.
func.argtypes = [CS_PTR, c_uint, c_uint, dbl_param]
else:
func.argtypes = [CS_PTR, c_uint, dbl_param]
func.restype = c_int
return func
def cs_output(func, argtypes):
"For routines that return a coordinate sequence."
func.argtypes = argtypes
func.restype = CS_PTR
func.errcheck = check_cs_ptr
return func
# ## Coordinate Sequence ctypes prototypes ##
# Coordinate Sequence constructors & cloning.
cs_clone = cs_output(GEOSFunc('GEOSCoordSeq_clone'), [CS_PTR])
create_cs = cs_output(GEOSFunc('GEOSCoordSeq_create'), [c_uint, c_uint])
get_cs = cs_ou | tput(GEOSFunc('GEOSGeom_getCoordSeq'), [GEOM_PTR])
# Getting, setting ordinate
cs_getordinate = cs_operation(GEOSFunc('GEOSCoordSeq_getOrdinate'), ordinate=True, get=True)
cs_setordinate = cs_operatio | n(GEOSFunc('GEOSCoordSeq_setOrdinate'), ordinate=True)
# For getting, x, y, z
cs_getx = cs_operation(GEOSFunc('GEOSCoordSeq_getX'), get=True)
cs_gety = cs_operation(GEOSFunc('GEOSCoordSeq_getY'), get=True)
cs_getz = cs_operation(GEOSFunc('GEOSCoordSeq_getZ'), get=True)
# For setting, x, y, z
cs_setx = cs_operation(GEOSFunc('GEOSCoordSeq_setX'))
cs_sety = cs_operation(GEOSFunc('GEOSCoordSeq_setY'))
cs_setz = cs_operation(GEOSFunc('GEOSCoordSeq_setZ'))
# These routines return size & dimensions.
cs_getsize = cs_int(GEOSFunc('GEOSCoordSeq_getSize'))
cs_getdims = cs_int(GEOSFunc('GEOSCoordSeq_getDimensions'))
|
strubell/Parser | lib/rnn_cells/gru_cell.py | Python | apache-2.0 | 2,289 | 0.009174 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2016 Timothy Dozat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the L | icense.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from lib.rnn_cells.base_cell import BaseCell
from lib import linalg
#***************************************************************
class GRUCell(BaseCell):
""""""
#========================================== | ===================
def __call__(self, inputs, state, scope=None):
""""""
with tf.variable_scope(scope or type(self).__name__):
cell_tm1, hidden_tm1 = tf.split(axis=1, num_or_size_splits=2, value=state)
with tf.variable_scope('Gates'):
linear = linalg.linear([inputs, hidden_tm1],
self.output_size,
add_bias=True,
n_splits=2,
moving_params=self.moving_params)
update_act, reset_act = linear
update_gate = linalg.sigmoid(update_act-self.forget_bias)
reset_gate = linalg.sigmoid(reset_act)
reset_state = reset_gate * hidden_tm1
with tf.variable_scope('Candidate'):
hidden_act = linalg.linear([inputs, reset_state],
self.output_size,
add_bias=True,
moving_params=self.moving_params)
hidden_tilde = self.recur_func(hidden_act)
cell_t = update_gate * cell_tm1 + (1-update_gate) * hidden_tilde
return cell_t, tf.concat(axis=1, values=[cell_t, cell_t])
#=============================================================
@property
def state_size(self):
return self.output_size * 2
|
tompko/Patrician | parse_log.py | Python | mit | 5,821 | 0.030579 | import sys
import json
import webbrowser
# List similar commands so they end up in the same accordion
list_groups = [["usermove", "time", "otim"]]
groups = {}
for lg in list_groups:
for g in lg:
groups[g] = lg
white_piece_bitboards = [("White Pawn", "wp", "P"),
("White Knight","wn", "N"),
("White Bishop","wb", "B"),
("White Rook","wr", "R"),
("White Queen","wq", "Q"),
("White King","wk", "K")]
black_piece_bitboards = [("Black Pawn","bp", "p"),
("Black Knight","bn", "n"),
("Black Bishop","bb", "b"),
("Black Rook","br", "r"),
("Black Queen","bq", "q"),
("Black King","bk", "k")]
piece_list = ["p","P","n","N","b","B","r","R","q","Q","k","K"]
def bitboard_to_fen(bitboard, symbol):
"""Generate a FEN from a given bitboard, using <symbol> to represent occupied squares"""
bitboard = int(bitboard, 16)
fen = ""
for ra in range(7, -1, -1):
for fi in range(8):
if bitboard & (1 << (ra*8 + fi)):
fen += symbol
else:
fen += "."
fen += "/"
for i in range(8, 0, -1):
fen = fen.replace("."*i, str(i))
return fen[:-1]
def move_to_fen(move):
fen = ""
for ra in range(7, -1, -1):
for fi in range(8):
square_index = ra*8 + fi
if square_index == move["to"]:
if "capture" in move["flags"]:
fen += "({0})".format(piece_list[move["capturedPiece"]])
else:
fen += "(.)"
elif square_index == move["from"]:
fen += "{0}".format(piece_list[move["piece"]])
else:
fen += "."
fen += "/"
for i in range(8,0,-1):
fen = fen.replace("."*i, str(i))
return fen[:-1]
def generate_log(path):
"""Generate an html log from the debug .dat log file written out by Patrician"""
with open("logs/log.html", "w") as log_file:
with open("logs/header.html") as header:
log_file.write(header.read())
with open(path) as dat_file:
line = dat_file.readline()
while line:
log_entry = json.loads(line.strip())
log_type = log_entry["log_type"]
title = log_entry["log_type"].title().replace("_", " ")
if log_type == "input" or log_type == "xboard_input":
# Write out this log entry and any following that share the same command
# or command group
entry_token = log_entry["data"].split()[0]
title += " - {0}".format(entry_token)
log_file.write('\t<h3><a href="#">{0}</a></h3>\n'.format(title))
log_file.write('\t<div>\n')
log_file.write('\t\t<p>{0}</p>\n'.format(log_entry["data"]))
line = dat_file.readline()
if not line: continue
log_entry = json.loads(line.strip())
while log_type == log_entry["log_type"] and \
(log_entry["data"].split()[0] == entry_token or\
log_entry["data"].split()[0] in groups.get(entry_token, [])):
log_file.write('\t\t<p>{0}</p>\n'.format(log_entry["data"]))
line = dat_file.readline()
log_entry = json.loads(line.strip())
log_file.write('\t</div>\n')
elif log_type == "xboard_unrecognised":
# Write out the log entry as an error
log_file.write('\t<h3 class="error"><a href="#">{0}</a></h3>\n'.format(title))
log_file.write('\t<div>\n')
log_file.write('\t\t{0}\n'.forma | t(log_entry["data"]))
log_file.write('\t</div>\n')
line = dat_file.readline()
elif log_type == "board":
board = json.loads(log_entry["data"])
log_file.write('\t< | h3><a href="#">Board</a></h3>\n')
log_file.write('\t<div>\n')
log_file.write('\t\t<div>')
for pb in white_piece_bitboards:
log_file.write('\t\t<div style="float: left">\n')
log_file.write('\t\t\t{0}<br />\n'.format(pb[0]))
fen = bitboard_to_fen(board[pb[1]], pb[2])
log_file.write('\t\t\t<c:chess>{0}</c:chess>\n'.format(fen))
log_file.write('\t\t</div>\n')
log_file.write("\t\t</div>")
log_file.write('\t\t<div>')
for pb in black_piece_bitboards:
log_file.write('\t\t<div style="float: left">\n')
log_file.write('\t\t\t{0}<br />\n'.format(pb[0]))
fen = bitboard_to_fen(board[pb[1]], pb[2])
log_file.write('\t\t\t<c:chess>{0}</c:chess>\n'.format(fen))
log_file.write('\t\t</div>\n')
log_file.write("\t\t</div>")
occupied = bitboard_to_fen(board["oo"], "+")
log_file.write('\t\t<div style="float: left">\n')
log_file.write('\t\t\tOccupied<br />\n')
log_file.write('\t\t\t<c:chess>{0}</c:chess>\n'.format(occupied))
log_file.write('\t\t</div>\n')
empty = bitboard_to_fen(board["ee"], "+")
log_file.write('\t\t<div style="float: left">\n')
log_file.write('\t\t\tEmpty<br />\n')
log_file.write('\t\t\t<c:chess>{0}</c:chess>\n'.format(empty))
log_file.write('\t\t</div>\n')
enpassant = bitboard_to_fen(board["ep"], "+")
log_file.write('\t\t<div style="float: left">\n')
log_file.write('\t\t\tEn Passant<br />\n')
log_file.write('\t\t\t<c:chess>{0}</c:chess>\n'.format(enpassant))
log_file.write('\t\t</div>\n')
log_file.write('\t\t<div>\n')
log_file.write('\t\tSide to move: {0}\n'.format(board["si"]))
log_file.write('\t\t</div>')
log_file.write('\t</div>\n')
line = dat_file.readline()
elif log_type == "move":
move = json.loads(log_entry["data"])
log_file.write('\t<h3><a href="#">{0}</a></h3>\n'.format(title))
log_file.write('\t<div>\n')
move_board = move_to_fen(move)
log_file.write('\t\t<c:chess>{0}</c:chess>\n'.format(move_board))
log_file.write('\t\t{0}\n'.format(log_entry["data"]))
log_file.write('\t</div>\n')
line = dat_file.readline()
else:
raise ValueError("Unrecognised log entry: {0}".format(log_entry["log_type"]))
with open("logs/footer.html") as footer:
log_file.write(footer.read())
if __name__ == "__main__":
print "Generating log from:", sys.argv[1]
generate_log(sys.argv[1])
webbrowser.open("logs/log.html")
|
efforia/eos-dashboard | pandora-hub/pandora/wsgi.py | Python | lgpl-3.0 | 391 | 0 | """
WSGI config for efforia project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more | information on this file, | see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'efforia.settings')
application = get_wsgi_application()
|
adsabs/mission-control | mc/config.py | Python | mit | 1,990 | 0.001508 | GITHUB_SIGNATURE_HEADER = 'X-Hub-Signature'
GITHUB_SECRET = 'redacted'
GITHUB_COMMIT_API = 'https://api.github.com/repos/adsabs/{repo}/git/commits/{hash}'
GITHUB_TAG_FIND_API = 'https://api.github.com/repos/adsabs/{repo}/git/refs/tags/{tag}'
GITHUB_TAG_GET_API = 'https://api.github.com/repos/adsabs/{repo}/git/tags/{hash}'
AWS_REGION = 'us-east-1'
AWS_ACCESS_KEY = 'redacted'
AWS_SECRET_KEY = 'redac | ted'
WATCHED_REPOS = [
'adsws',
'solr-se | rvice',
'export_service',
'graphics_service',
'recommender_service',
'citation_helper_service',
'metrics_service',
'vis-services',
'biblib-service',
'orcid-service',
'myads',
'object_service',
'harbour-service'
]
# Local dependencies for the testing environment
DOCKER_BRIDGE = '172.17.42.1'
DEPENDENCIES = {
'POSTGRES': {
'USERNAME': 'postgres',
'PORT': 5432,
'HOST': 'localhost',
'IMAGE': 'postgres:9.3'
},
'CONSUL': {
'PORT': 8500,
'IMAGE': 'adsabs/consul:v1.0.0'
},
'REDIS': {
'PORT': 6379,
'IMAGE': 'redis:2.8.21'
},
'GUNICORN': {
'PORT': 80
},
'REGISTRATOR': {
'IMAGE': 'gliderlabs/registrator:latest'
},
'SOLR': {
'PORT': 8983,
'IMAGE': 'adsabs/montysolr:v48.1.0.3'
}
}
MC_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'default': {
'format': '%(levelname)s\t%(process)d '
'[%(asctime)s]:\t%(message)s',
'datefmt': '%m/%d/%Y %H:%M:%S',
}
},
'handlers': {
'console': {
'formatter': 'default',
'level': 'DEBUG',
'class': 'logging.StreamHandler'
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
SQLALCHEMY_DATABASE_URI = 'sqlite://'
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
Azure/azure-sdk-for-python | sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2021_01_01_preview/models/_event_hub_management_client_enums.py | Python | mit | 4,954 | 0.005248 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class AccessRights(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
MANAGE = "Manage"
SEND = "Send"
LISTEN = "Listen"
class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of identity that created the resource.
"""
USER = "User"
APPLICATION = "Application"
MANAGED_IDENTITY = "ManagedIdentity"
KEY = "Key"
class DefaultAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Default Action for Network Rule Set
"""
ALLOW = "Allow"
DENY = "Deny"
class EncodingCaptureDescription(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Enumerates the possible values for the encoding format of capture description. Note:
'AvroDeflate' will be deprecated in New API Version
"""
AVRO = "Avro"
AVRO_DEFLATE = "AvroDeflate"
class EndPointProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Provisioning state of the Private Endpoint Connection.
"""
CREATING = "Creating"
UPDATING = "Updating"
DELETING = "Deleting"
SUCCEEDED = "Succeeded"
CANCELED = "Canceled"
FAILED = "Failed"
class EntityStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Enumerates the possible values for the status of the Event Hub.
"""
ACTIVE = "Active"
DISABLED = "Disabled"
RESTORING = "Restoring"
SEND_DISABLED = "SendDisabled"
RECEIVE_DISABLED = "ReceiveDisabled"
CREATING = "Creating"
DELETING = "Deleting"
RENAMING = "Renaming"
UNKNOWN = "Unknown"
class KeyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The access key to regenerate.
"""
PRIMARY_KEY = "PrimaryKey"
SECONDARY_KEY = "SecondaryKey"
class ManagedServiceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Type of managed service identity.
"""
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned, UserAssigned"
NONE = "None"
class NetworkRuleIPAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IP Filter Action
"""
ALLOW = "Allow"
class PrivateLinkConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Status of the connection.
"""
PENDING = "Pending"
APPROVED = "Approved"
REJECTED = "Rejected"
DISCONNECTED = "Disconnected"
class ProvisioningStateDR(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Provisioning state of the Alias(Disaster Recovery configuration) - possible values 'Accepted'
or 'Succeeded' or 'Failed'
"""
ACCEPTED = "Accepted"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
class RoleDisasterRecovery(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""role of namespace in GEO D | R - possible values 'Primary' or 'PrimaryNotRe | plicating' or
'Secondary'
"""
PRIMARY = "Primary"
PRIMARY_NOT_REPLICATING = "PrimaryNotReplicating"
SECONDARY = "Secondary"
class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Name of this SKU.
"""
BASIC = "Basic"
STANDARD = "Standard"
PREMIUM = "Premium"
class SkuTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The billing tier of this particular SKU.
"""
BASIC = "Basic"
STANDARD = "Standard"
PREMIUM = "Premium"
class UnavailableReason(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Specifies the reason for the unavailability of the service.
"""
NONE = "None"
INVALID_NAME = "InvalidName"
SUBSCRIPTION_IS_DISABLED = "SubscriptionIsDisabled"
NAME_IN_USE = "NameInUse"
NAME_IN_LOCKDOWN = "NameInLockdown"
TOO_MANY_NAMESPACE_IN_CURRENT_SUBSCRIPTION = "TooManyNamespaceInCurrentSubscription"
|
yellottyellott/chat-parser | chat_parser/serializers.py | Python | mit | 117 | 0 | import | json
class JSONSerializer(object):
def seri | alize(self, data):
return json.dumps(data, indent=2)
|
juandc/platzi-courses | Python-Django-2016/Python/Reto/Otros-Competidores/Juan-David-Castro/pptls.py | Python | mit | 4,317 | 0.00417 | #!/usr/bin/env python
# -*-coding:utf-8-*-
import time
from time import sleep
import random
depo, sus, tab, user_puntos, pc_puntos = ["piedra", "papel", "tijera", "lagarto", "spock"], "-" * 35, " " * 4, 0, 0
print """Hola! Bienvenido al juego Piedra Papel Tijera Lagarto Spock!\nEstas son las reglas:\n Las tijeras cortan el papel\n El papel cubre a la piedra\n La piedra aplasta al lagarto\n El lagarto envenena a Spock\n Spock destroza las tijeras\n Las tijeras decapitan al lagarto\n El lagarto se come el papel\n El papel refuta a Spock\n Spock vaporiza la piedra\n Y como es habitual... la piedra aplasta las tijeras.\nRecuerda que si escribes algun valor incorrecto pierdes un punto!\nEl primero en llegar a 10 puntos gana!
"""
sleep(2)
print "\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
sleep(1)
while (pc_puntos < 10 and user_puntos < 10):
tu = raw_input("Que eliges? Piedra, papel, tijera, lagarto o Spock:\n('marcador' para ver los puntos)(Control + C para salir)\n\n(Escribe en minusculas)" + tab)
pc = random.choice(depo)
sleep(0.5)
if tu in depo:
print (("\nElegiste {}\nComputadora eligio {}\nAsi que:").format(tu, pc))
elif tu not in depo and tu != "marcador":
print "\nEscribe un valor correcto!\nPierdes un punto"
if tu == pc:
print '\n Es un Empate...\n'
elif tu == 'piedra' and pc == 'tijera':
user_puntos = user_puntos + 1
print "\n Ganaste! Como es habitual... la piedra aplasta las tijeras.\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'papel' and pc == 'piedra':
user_puntos = user_puntos + 1
print "\n Ganaste! Papel cubre a la piedra\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'tijera' and pc == 'papel':
user_puntos = user_puntos + 1
print "\n Ganaste! Tijeras cortan el papel\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'piedra' and pc == 'lagarto':
user_puntos = user_puntos + 1
print "\n Ganaste! La piedra aplasta al lagarto\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'lagarto' and pc == 'spock':
user_puntos = user_puntos + 1
print "\n Ganaste | ! Lagarto envenena Spock\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'spock' and pc == 'tijera':
user_puntos = user_puntos + 1
print "\n Ganaste! Spock destroza las tijeras\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'tijera' and pc == 'lagarto': |
user_puntos = user_puntos + 1
print "\n Ganaste! Las tijeras decapitan al lagarto\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'lagarto' and pc == 'papel':
user_puntos = user_puntos + 1
print "\n Ganaste! El lagarto se come el papel\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'papel' and pc == 'spock':
user_puntos = user_puntos + 1
print "\n Ganaste! El papel refuta a Spock\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == 'spock' and pc == 'piedra':
user_puntos = user_puntos + 1
print "\n Ganaste! Spock vaporiza la piedra\nGanas un punto!!!\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
elif tu == "marcador" and pc == pc:
print "\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(user_puntos, pc_puntos)
sleep(0.5)
else:
pc_puntos = pc_puntos + 1
print "\n Lo siento, perdiste: {} le gana a {} \n{}\nPierdes un punto...\nTus puntos son:{}\nY los puntos de la pc son:{}\n".format(pc, tu, sus, user_puntos, pc_puntos)
print "Acabo el juego...\nEl ganador es...\n "
sleep(2)
if pc_puntos == 10:
print "La computadora!\nGracias por jugar!"
else:
print "Tu!\nGracias por jugar!\nVuelve Pronto!"
|
loulich/Couchpotato | libs/guessit/transfo/guess_video_rexps.py | Python | gpl-3.0 | 1,837 | 0.000544 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2012 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import Guess
from guessit.transfo import SingleNodeGuesser
from guessit.patterns import video_rexps, sep
import re
import logging
log = logging.getLogger(__name__)
def guess_video_rexps(string):
string = '-' + string + '-'
for rexp, confidence, span_adjust in video_rexps:
match = re.search(sep + rexp + sep, string, re.IGNORECASE)
if match:
metadata = match.groupdict()
# is this the better place to put it? (maybe, as it is at least
# the soonest that we can catch it)
if metadata.get('cdNumberTotal', -1) is None:
del metadata['cdNumberTotal']
| span = (match.start() + span_adjust[0],
match.end() + span_adjust[1] - 2)
return (Guess(metadata, co | nfidence=confidence, raw=string[span[0]:span[1]]),
span)
return None, None
def process(mtree):
SingleNodeGuesser(guess_video_rexps, None, log).process(mtree)
|
sirex/internet-voting-registration | ivreg/views.py | Python | mit | 2,644 | 0.000758 | import json
from django.http import JsonResponse, Http404
from django.shortcuts import render, redirect
from django.views.decorators.csrf import csrf_exempt
from ivreg.models import Voter
from ivreg.forms import RegistrationForm, ValidationForm, VerifyForm
from ivreg.services import generate_candidate_codes, generate_ballot_id, generate_request_id, verify_vote
CANDIDATES = [
'Darth Vader',
'Yoda',
'Luke Skywalker',
]
def index(request):
return render(request, 'index.html')
@csrf_exempt
def registration(request):
if request.method == "POST":
if request.content_type == 'application/json':
data = json.loads(request.body.decode('utf-8'))
else:
data = request.POST
form = RegistrationForm(data)
if for | m.is_valid():
voter = Voter.objects.create(
request_id=generate_request_id(),
v | oter_id=form.cleaned_data['voter_id'],
ballot_id=generate_ballot_id(),
candidates=json.dumps(generate_candidate_codes(CANDIDATES))
)
if request.content_type == 'application/json':
return JsonResponse({'redirect': request.build_absolute_uri(voter.get_absolute_url())})
else:
return redirect(voter)
else:
form = RegistrationForm()
return render(request, 'registration.html', {
'form': form,
})
@csrf_exempt
def validate(request):
if request.method == "POST":
form = ValidationForm(request.POST)
if form.is_valid():
return render(request, 'validation.html', {
'back': form.cleaned_data['back'],
'voter': form.cleaned_data['voter'],
})
else:
return render(request, 'validation.html', {
'form': form,
})
else:
raise Http404
def ballot(request, request_id):
ballot = Voter.objects.get(request_id=request_id.upper())
candidates = json.loads(ballot.candidates)
return render(request, 'ballot.html', {
'ballot': ballot,
'candidates': [(x, candidates[x]) for x in CANDIDATES],
})
def verify(request):
result = None
if request.method == 'POST':
form = VerifyForm(request.POST)
if form.is_valid():
if verify_vote(form.cleaned_data):
result = 'Jūsų balsas įskaitytas tinkmai.'
else:
result = 'Nepavyko rasti jūsų balso.'
else:
form = VerifyForm()
return render(request, 'verify.html', {
'form': form,
'result': result,
})
|
juruen/cavalieri | ci/ws-test/test-ws.py | Python | mit | 671 | 0.005961 | #!/usr/bin/env python
import multiprocessing
import subprocess
import sys
import time
client_processes = 5
ws_processes = 20;
total_processes = client_processes + ws_processes
events = "5000"
ws_client = "./ws-read-events.py"
riemann_client = "./gener | ate-events.py"
def work(i):
if i < client_processes:
return subprocess.call([riemann_client], shell=False)
else:
time.sleep(10)
return subprocess.call([ws_client, "localhost:5556", events],
shell=False)
pool = multiprocessing.Pool(processes=total_processes)
result = pool.map(work, rang | e(total_processes))
print result
sys.exit(len([i for i in result if i > 0]))
|
itsmeolivia/code_eval | practice/lc235.py | Python | mit | 645 | 0 | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param {Tre | eNode} root
# @param {TreeNode} p
# @param {TreeNode} q
# @return {TreeNode}
def lowestCommonAncestor(self, root, p, q):
if not root or not p or not q:
return None
if max(p.val, q.val) < root.val:
return self.lowestCommonAncestor(root.left, p, q)
elif min(p.val, q.val) > root.val:
return self.low | estCommonAncestor(root.right, p, q)
else:
return root
|
ericdill/bluesky | bluesky/simulators.py | Python | bsd-3-clause | 2,750 | 0 | from warnings import warn
from bluesky.preprocessors import print_summary_wrapper
def plot_raster_path(plan, x_motor, y_motor, ax=None, probe_size=None, lw=2):
"""Plot the raster path for this plan
Parameters
----------
plan : iterable
Must yield `Msg` objects and not be a co-routine
x_motor, y_motor : str
Names of the x and y motors
ax : matplotlib.axes.Axes
The axes to plot to, if none, make new figure + axes
probe_size : float, optional
If not None, use as radius of probe (in same units as motor positions)
lw : float, optional
Width of lines drawn between points
"""
import matplotlib.pyplot as plt
from matplotlib import collections as mcollections
from matplotlib import patches as mpatches
if ax is None:
ax = plt.subplots()[1]
ax.set_aspect('equal')
cur_x = cur_y = None
traj = []
for msg in plan:
cmd = msg.command
if cmd == 'set':
if msg.obj.name == x_motor:
cur_x = msg.args[ | 0]
if msg.obj.name == y_motor:
cur_y = msg.args[0]
elif cmd == 'save':
traj.append((cur_x, cur_y))
x, y = zip(*traj)
path, = ax.plot(x, y, marker='', linestyle='-', lw=lw)
ax.set_xlabel(x_motor)
| ax.set_ylabel(y_motor)
if probe_size is None:
read_points = ax.scatter(x, y, marker='o', lw=lw)
else:
circles = [mpatches.Circle((_x, _y), probe_size,
facecolor='black', alpha=0.5)
for _x, _y in traj]
read_points = mcollections.PatchCollection(circles,
match_original=True)
ax.add_collection(read_points)
return {'path': path, 'events': read_points}
def summarize_plan(plan):
"""Print summary of plan
Prints a minimal version of the plan, showing only moves and
where events are created.
Parameters
----------
plan : iterable
Must yield `Msg` objects
"""
for msg in print_summary_wrapper(plan):
...
print_summary = summarize_plan # back-compat
def check_limits(plan):
"""
Check that a plan will not move devices outside of their limits.
Parameters
----------
plan : iterable
Must yield `Msg` objects
"""
ignore = []
for msg in plan:
if msg.command == 'set' and msg.obj not in ignore:
if hasattr(msg.obj, "check_value"):
msg.obj.check_value(msg.args[0])
else:
warn(f"{msg.obj.name} has no check_value() method"
f" to check if {msg.args[0]} is within its limits.")
ignore.append(msg.obj)
|
google/grumpy | third_party/stdlib/test/test_list.py | Python | apache-2.0 | 2,707 | 0.000739 | import sys
import unittest
from test import test_support, list_tests
class ListTest(list_tests.CommonTest):
type2test = list
def test_basic(self):
self.assertEqual(list([]), [])
l0_3 = [0, 1, 2, 3]
l0_3_bis = list(l0_3)
self.assertEqual(l0_3, l0_3_bis)
self.assertTrue(l0_3 is not l0_3_bis)
self.assertEqual(list(()), [])
self.assertEqual(list((0, 1, 2, 3)), [0, 1, 2, 3])
self.assertEqual(list(''), [])
self.assertEqual(list('spam'), ['s', 'p', 'a', 'm'])
if sys.maxsize == 0x7fffffff:
# This test can currently only work on 32-bit machines.
# XXX If/when PySequence_Length() returns a ssize_t, it should be
# XXX re-enabled.
# Verify clearing of bug #556025.
# This assumes that the max data size (sys.maxint) == max
# address size this also assumes that the address size is at
# least 4 bytes with 8 byte addresses, the bug is not well
# tested
#
# Note: This test is expected to SEGV under Cygwin 1.3.12 or
# earlier due to a newlib bug. See the following mailing list
# thread for the details:
# http://sources.redhat.com/m | l/newlib/2002/msg00369.html
self.assertRaises(MemoryError, list, xrange(sys.maxint // 2))
# This code used to segfault in Py2.4a3
x = []
x.extend(-y for y in x)
self.assertEqual(x, [])
def test_truth(self):
super(ListTest, self).test_truth()
self.assertTrue(not [])
self.assertTrue([42])
def test_identity(self):
self.ass | ertTrue([] is not [])
def test_len(self):
super(ListTest, self).test_len()
self.assertEqual(len([]), 0)
self.assertEqual(len([0]), 1)
self.assertEqual(len([0, 1, 2]), 3)
@unittest.expectedFailure
def test_overflow(self):
lst = [4, 5, 6, 7]
n = int((sys.maxsize*2+2) // len(lst))
def mul(a, b): return a * b
def imul(a, b): a *= b
self.assertRaises((MemoryError, OverflowError), mul, lst, n)
self.assertRaises((MemoryError, OverflowError), imul, lst, n)
def test_main(verbose=None):
test_support.run_unittest(ListTest)
# verify reference counting
# import sys
# if verbose and hasattr(sys, "gettotalrefcount"):
# import gc
# counts = [None] * 5
# for i in xrange(len(counts)):
# test_support.run_unittest(ListTest)
# gc.collect()
# counts[i] = sys.gettotalrefcount()
# print counts
if __name__ == "__main__":
test_main(verbose=True)
|
macborowy/dajsiepoznac-feed | DajSiePoznacFeed-Server/crawler/src/scrapper/feed.py | Python | mit | 4,500 | 0.003343 | # -*- coding: utf-8 -*-
import logging
import types
import dateutil.parser
import feedparser
import pytz
import http
from html_sanitizer import HTMLSanitizer
def getFeed(url):
current_feed = []
content = http.get(url)
feed = feedparser.parse(content) # even if content is None feedparser returns object with empty entries list
for item in feed.entries:
parsed = FeedParser.parse(item)
current_feed.append(parsed)
logging.info("Downloaded %d posts." % len(current_feed))
return current_feed
def filterExistingFeeds(feeds, latest_feed):
filtered = []
if feeds is not None and len(feeds) > 0:
if latest_feed is not None:
for feed in feeds:
logging.info("Comparing downloaded and latest feed date - (%s, %s)" % (feed["published"], latest_feed))
if feed["published"] is not None and feed["published"] > latest_feed:
filtered.append(feed)
else:
filtered = feeds
logging.info("After filtering there is %d posts to store." % len(filtered))
return filtered
class FeedParser():
@staticmethod
def parse(item):
link = FeedParser._getFirstOf(item, ["link", "id"])
title = FeedParser._getFirstOf(item, ["title"])
summary = FeedParser._getFirstOf(item, ["summary"])
published = FeedParser._getFirstOf(item, ["published", "updated"])
categories = FeedParser._getFirstOf(item, ["tags"])
# for everyone using BlogEngine.NET (this item contains last betag:tag item for single feed item)
betag = FeedParser._getFirstOf(item, ["betag"])
categories_names = FeedParser._getNames(categories)
categories_names.append(FeedParser._encode(betag))
datetime_published = DateParser.parse(published)
sanitized_summary = HTMLSanitizer.sanitize_and_parse(summary)
return {
"link": link,
"published": datetime_published,
"title": FeedParser._encode(title),
"summary": sanitized_summary,
"categories": categories_names
}
@staticmethod
def _getFirstOf(feed_entry, attributes):
if attributes is not None:
for attr in attributes:
if hasattr(feed_entry, attr):
return feed_entry[attr]
@staticmethod
def _encode(value_to_encode):
if type(value_to_encode) is types.UnicodeType:
return value_to_encode.encode("UTF-8")
return value_to_encode
@staticmethod
def _getNames(categories):
result = []
if categories is not None:
for category in categories:
if "term" in category.keys():
result.append | (FeedParser._encode(category["term"]))
return result
class Da | teParser():
@staticmethod
def parse(date):
try:
result = dateutil.parser.parse(date).astimezone(tz=pytz.UTC).replace(tzinfo=None)
except ValueError:
try:
result = dateutil.parser.parse(date, parserinfo=DateParser.PolishParserInfo()) \
.astimezone(tz=pytz.UTC) \
.replace(tzinfo=None)
except ValueError as e:
logging.error("Unknown date string format. Provided date: %s" % date.encode("utf-8"))
raise
return result
class PolishParserInfo(dateutil.parser.parserinfo):
MONTHS = [(u'Sty', u'Styczeń'), (u'Lut', u'Luty'), (u'Mar', u'Marzec'), (u'Kwi', u'Kwiecień'), (u'Maj', u'Maj'),
(u'Cze', u'Czerwiec'), (u'Lip', u'Lipiec'), (u'Sie', u'Sierpień'), (u'Wrz', u'Wrzesień'),
(u'Paź', u'Październik'), (u'Lis', u'Listopad'), (u'Gru', u'Grudzień')]
WEEKDAYS = [(u'Pn', u'Pon', u'Poniedziałek'), (u'Wt', u'Wto', u'Wtorek'), (u'Śr', u'Śro', u'Środa'),
(u'Cz', u'Czw', u'Czwartek'), (u'Pt', u'Pią', u'Piątek'), (u'So', u'Sob', u'Sobota'),
(u'N', u'Nd', u'Nie', u'Niedziela')]
# By default this method checks if name has length greater or equal 3
# and I need to override this method because weekday abbreviations in Poland might have one letter like 'N' (Sunday)
def weekday(self, name):
if len(name) >= 1:
try:
return self._weekdays[name.lower()]
except KeyError:
pass
return None |
Leo-G/T | app/users/test_users.py | Python | mit | 4,864 | 0.003906 | import unittest
import os
import sys
# Add app path to module path
sys.path.append(os.path.dirname(os.path.realpath(__file__).rsplit('/', 2)[0]))
from app import create_app
from app.users.models import Users
app = create_app('config')
class TestUsers(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def test_read(self):
self.app = app.test_client()
rv = self.app.get('/users/')
assert "Users" in rv.data.decode('utf-8')
def test_01_add(self):
rv = self.app.post('/users/add', data=dict(
email='testing@flask.pocoo.com',
password='test string',
name='test string',
address="""How to build CRUD app with Python, Flask, SQLAlchemy and MySQL
In this post I will briefly describe,
how you can you build a database driven CRUD (Create, Read, Update, Delete) app on Linux with Python,
Flask, SQLAlchemy and MySQL. I used this process to create a blog and hence the examples below will
describe how to store and modify posts in a MySQL database. You can also download the complete source
code from https://github.com/Leo-g/Flask-Skeleton/
Software Versions
Python 2.7
Flask 0.11
Flask-SQLAlchemy 2.0
Flask-Migrate 1.3
MySQL-python 1.2
Foundation 5
Mariadb 10
Before you continue if you have not built an application on Linux with Flask or Python then
I recommend you read Creating your first Linux App with Python and Flask.
Read more at http://techarena51.com/index.php/flask-sqlalchemy-tutorial/""",
is_active='False',
creation_time='2015-12-22T03:12:58.019077+00:00',
modification_time='2015-12-22T03:12:58.019077+00:00',
role='35678',), follow_redirects=True)
assert 'Add was successful' in rv.data.decode('utf-8')
def test_02_Update(self):
with app.app_context():
id = Users.query.first().id
rv = self.app.post(
'/users/update/{}'.format(id), data=dict(
email='testing@flask.pocoo.com',
password='test string',
name='test string',
address="""How to build CRUD app with Python, Flask, SQLAlchemy and MySQL
In this post I will briefly describe,
how you can you build a database driven CRUD (Create, Read, Update, Delete) app on Linux with Python,
Flask, SQLAlchemy and MySQL. I used this process to create a blog and hence the examples below will
describe how to store and modify posts in a MySQL database. You can also download the complete source
code from https://github.com/Leo-g/Flask-Skeleton/
Software Versions
Python 2.7
Flask 0.11
Flask-SQLAlchemy 2.0
Flask-Migrate 1.3
MySQL-python 1.2
Foundation 5
Mariadb 10
Before you continue if you have not built an application on Linux with Flask or Python then
I recommend you read Creating your first Linux App with Python and Flask.
Read more at http://techarena51.com/index.php/flask-sqlalchemy-tutorial/""",
is_active='False',
creation_time='2015-12-22T03:12:58.019077+00:00',
modification_time='2015-12-22T03:12:58.019077+00:00',
role='35678',), follow_redirects=True)
assert 'Update was successful' in rv.data.decode('utf-8')
def test_03_delete(self):
with app.app_context():
id = Users.query.first().id
| rv = self.app.post | (
'users/delete/{}'.format(id), follow_redirects=True)
assert 'Delete was successful' in rv.data.decode('utf-8')
if __name__ == '__main__':
unittest.main()
|
BehavioralInsightsTeam/edx-platform | lms/djangoapps/edxnotes/tests.py | Python | agpl-3.0 | 55,193 | 0.002555 | """
Tests for the EdxNotes app.
"""
import json
import urlparse
from contextlib import contextmanager
from datetime import datetime
from unittest import skipUnless
import mock
import ddt
import jwt
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
from django.test.client import RequestFactory
from django.test.utils import override_settings
from edx_oauth2_provider.tests.factories import ClientFactory
from mock import MagicMock, patch
from nose.plugins.attrib import attr
from provider.oauth2.models import Client
from courseware.model_data import FieldDataCache
from courseware.module_render import get_module_for_descriptor
from courseware.tabs import get_course_tab_list
from edxmako.shortcuts import render_to_string
from edxnotes import helpers
from edxnotes.decorators import edxnotes
from edxnotes.exceptions import EdxNotesParseError, EdxNotesServiceUnavailable
from edxnotes.plugins import EdxNotesTab
from openedx.core.djangoapps.user_api.models import RetirementState, UserRetirementStatus
from openedx.core.lib.token_utils import JwtBuilder
from student.tests.factories import CourseEnrollmentFactory, SuperuserFactory, UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.tabs import CourseTab
FEATURES = settings.FEATURES.copy()
NOTES_API_EMPTY_RESPONSE = {
"total": 0,
"rows": [],
"current_page": 1,
"start": 0,
"next": None,
"previous": None,
"num_pages": 0,
}
NOTES_VIEW_EMPTY_RESPONSE = {
"count": 0,
"results": [],
"current_page": 1,
"start": 0,
"next": None,
"previous": None,
"num_pages": 0,
}
def enable_edxnotes_for_the_course(course, user_id):
"""
Enable EdxNotes for the course.
"""
course.tabs.append(CourseTab.load("edxnotes"))
modulestore().update_item(course, user_id)
@edxnotes
class TestProblem(object):
"""
Test class (fake problem) decorated by edxnotes decorator.
The purpose of this class is to imitate any problem.
"""
def __init__(self, course, user=None):
self.system = MagicMock(is_author_mode=False)
self.scope_ids = MagicMock(usage_id="test_usage_id")
user = user or UserFactory()
self.runtime = MagicMock(course_id=course.id, get_real_user=lambda __: user)
self.descriptor = MagicMock()
self.descriptor.runtime.modulestore.get_course.return_value = course
def get_html(self):
"""
Imitate get_html in module.
"""
return "original_get_html"
@attr(shard=3)
@skipUnless(settings.FEATURES["ENABLE_EDXNOTES"], "EdxNotes feature needs to be enabled.")
class EdxNotesDecoratorTest(ModuleStoreTestCa | se):
"""
Tests for edxnotes decorator.
"""
def setUp(self):
super(EdxNotesDecoratorTest, self).setUp()
ClientFactory(name="edx-notes")
# Using old mongo because of locator comparison issues (see longer
# note below in EdxNotesH | elpersTest setUp.
self.course = CourseFactory(edxnotes=True, default_store=ModuleStoreEnum.Type.mongo)
self.user = UserFactory()
self.client.login(username=self.user.username, password=UserFactory._DEFAULT_PASSWORD)
self.problem = TestProblem(self.course, self.user)
@patch.dict("django.conf.settings.FEATURES", {'ENABLE_EDXNOTES': True})
@patch("edxnotes.helpers.get_public_endpoint", autospec=True)
@patch("edxnotes.helpers.get_token_url", autospec=True)
@patch("edxnotes.helpers.get_edxnotes_id_token", autospec=True)
@patch("edxnotes.helpers.generate_uid", autospec=True)
def test_edxnotes_enabled(self, mock_generate_uid, mock_get_id_token, mock_get_token_url, mock_get_endpoint):
"""
Tests if get_html is wrapped when feature flag is on and edxnotes are
enabled for the course.
"""
course = CourseFactory(edxnotes=True)
enrollment = CourseEnrollmentFactory(course_id=course.id)
user = enrollment.user
problem = TestProblem(course, user)
mock_generate_uid.return_value = "uid"
mock_get_id_token.return_value = "token"
mock_get_token_url.return_value = "/tokenUrl"
mock_get_endpoint.return_value = "/endpoint"
enable_edxnotes_for_the_course(course, user.id)
expected_context = {
"content": "original_get_html",
"uid": "uid",
"edxnotes_visibility": "true",
"params": {
"usageId": "test_usage_id",
"courseId": course.id,
"token": "token",
"tokenUrl": "/tokenUrl",
"endpoint": "/endpoint",
"debug": settings.DEBUG,
"eventStringLimit": settings.TRACK_MAX_EVENT / 6,
},
}
self.assertEqual(
problem.get_html(),
render_to_string("edxnotes_wrapper.html", expected_context),
)
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_EDXNOTES": True})
def test_edxnotes_disabled_if_edxnotes_flag_is_false(self):
"""
Tests that get_html is wrapped when feature flag is on, but edxnotes are
disabled for the course.
"""
self.course.edxnotes = False
self.assertEqual("original_get_html", self.problem.get_html())
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_EDXNOTES": False})
def test_edxnotes_disabled(self):
"""
Tests that get_html is not wrapped when feature flag is off.
"""
self.assertEqual("original_get_html", self.problem.get_html())
def test_edxnotes_studio(self):
"""
Tests that get_html is not wrapped when problem is rendered in Studio.
"""
self.problem.system.is_author_mode = True
self.assertEqual("original_get_html", self.problem.get_html())
def test_edxnotes_harvard_notes_enabled(self):
"""
Tests that get_html is not wrapped when Harvard Annotation Tool is enabled.
"""
self.course.advanced_modules = ["videoannotation", "imageannotation", "textannotation"]
enable_edxnotes_for_the_course(self.course, self.user.id)
self.assertEqual("original_get_html", self.problem.get_html())
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_EDXNOTES": True})
def test_anonymous_user(self):
user = AnonymousUser()
problem = TestProblem(self.course, user)
enable_edxnotes_for_the_course(self.course, None)
assert problem.get_html() == "original_get_html"
@attr(shard=3)
@skipUnless(settings.FEATURES["ENABLE_EDXNOTES"], "EdxNotes feature needs to be enabled.")
@ddt.ddt
class EdxNotesHelpersTest(ModuleStoreTestCase):
"""
Tests for EdxNotes helpers.
"""
def setUp(self):
"""
Setup a dummy course content.
"""
super(EdxNotesHelpersTest, self).setUp()
# There are many tests that are comparing locators as returned from helper methods. When using
# the split modulestore, some of those locators have version and branch information, but the
# comparison values do not. This needs further investigation in order to enable these tests
# with the split modulestore.
with self.store.default_store(ModuleStoreEnum.Type.mongo):
ClientFactory(name="edx-notes")
self.course = CourseFactory.create()
self.chapter = ItemFactory.create(category="chapter", parent_location=self.course.location)
self.chapter_2 = ItemFactory.create(category="chapter", parent_location=self.course.location)
self.sequential = ItemFactory.create(category="sequential", parent_location=self.chapter.location)
self.vertical = ItemFactory.create(category="vertical", parent_location=self.sequential.location)
self.html_module_1 = ItemFactory.create(category="htm |
bswartz/manila | manila/share/drivers/nexenta/ns4/nexenta_nas.py | Python | apache-2.0 | 5,591 | 0 | # Copyright 2016 Nexenta Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from manila import exception
from manila.i18n import _
from manila.share import driver
from manila.share.drivers.nexenta.ns4 import nexenta_nfs_helper
from manila.share.drivers.nexenta import options
VERSION = '1.0'
LOG = log.getLogger(__name__)
class NexentaNasDriver(driver.ShareDriver):
"""Nexenta Share Driver.
Executes commands relating to Shares.
API version history:
1.0 - Initial version.
"""
def __init__(self, *args, **kwargs):
"""Do initialization."""
LOG.debug('Initializing Nexenta driver.')
super(NexentaNasDriver, self).__init__(False, *args, **kwargs)
self.configuration = kwargs.get('configuration')
if self.configuration:
self.configuration.append_config_values(
options.nexenta_connection_opts)
self.configuration.append_config_values(
options.nexenta_nfs_opts)
self.configuration.append_config_values(
options.nexenta_dataset_opts)
self.helper = nexenta_nfs_helper.NFSHelper(self.configuration)
else:
raise exception.BadConfigurationException(
reason=_('Nexenta configuration missing.'))
@property
def share_backend_name(self):
if n | ot hasattr(self, '_share_ | backend_name'):
self._share_backend_name = None
if self.configuration:
self._share_backend_name = self.configuration.safe_get(
'share_backend_name')
if not self._share_backend_name:
self._share_backend_name = 'NexentaStor4'
return self._share_backend_name
def do_setup(self, context):
"""Any initialization the Nexenta NAS driver does while starting."""
LOG.debug('Setting up the NexentaStor4 plugin.')
return self.helper.do_setup()
def check_for_setup_error(self):
"""Returns an error if prerequisites aren't met."""
self.helper.check_for_setup_error()
def create_share(self, context, share, share_server=None):
"""Create a share."""
LOG.debug('Creating share %s.', share['name'])
return self.helper.create_filesystem(share)
def create_share_from_snapshot(self, context, share, snapshot,
share_server=None):
"""Is called to create share from snapshot."""
LOG.debug('Creating share from snapshot %s.', snapshot['name'])
return self.helper.create_share_from_snapshot(share, snapshot)
def delete_share(self, context, share, share_server=None):
"""Delete a share."""
LOG.debug('Deleting share %s.', share['name'])
self.helper.delete_share(share['name'])
def extend_share(self, share, new_size, share_server=None):
"""Extends a share."""
LOG.debug('Extending share %(name)s to %(size)sG.', {
'name': share['name'], 'size': new_size})
self.helper.set_quota(share['name'], new_size)
def create_snapshot(self, context, snapshot, share_server=None):
"""Create a snapshot."""
LOG.debug('Creating a snapshot of share %s.', snapshot['share_name'])
snap_id = self.helper.create_snapshot(
snapshot['share_name'], snapshot['name'])
LOG.info('Created snapshot %s.', snap_id)
def delete_snapshot(self, context, snapshot, share_server=None):
"""Delete a snapshot."""
LOG.debug('Deleting snapshot %(shr_name)s@%(snap_name)s.', {
'shr_name': snapshot['share_name'],
'snap_name': snapshot['name']})
self.helper.delete_snapshot(snapshot['share_name'], snapshot['name'])
def update_access(self, context, share, access_rules, add_rules,
delete_rules, share_server=None):
"""Update access rules for given share.
:param context: The `context.RequestContext` object for the request
:param share: Share that will have its access rules updated.
:param access_rules: All access rules for given share. This list
is enough to update the access rules for given share.
:param add_rules: Empty List or List of access rules which should be
added. access_rules already contains these rules. Not used by this
driver.
:param delete_rules: Empty List or List of access rules which should be
removed. access_rules doesn't contain these rules. Not used by
this driver.
:param share_server: Data structure with share server information.
Not used by this driver.
"""
self.helper.update_access(share['name'], access_rules)
def _update_share_stats(self, data=None):
super(NexentaNasDriver, self)._update_share_stats()
data = self.helper.update_share_stats()
data['driver_version'] = VERSION
data['share_backend_name'] = self.share_backend_name
self._stats.update(data)
|
IgowWang/ML_python3 | tensorgo/mnist/mnist.py | Python | gpl-3.0 | 3,726 | 0.000281 | __author__ = 'igor'
"""
构建 mnist network
构建 Graph
1.inference() - Builds the model as far as is required for running the network
forward to make predictions.
2.loss() -Adds to the inference model the layers required to generate loss
3.training() - Adds to the loss model the Ops required to generate and
apply gradients.
"""
import os.path
import math
import tensorflow.python.platform
import tensorflow as tf
# THE MNIST dataset has 10 classes
NUM_CLASSES = 10
# MNIST 的图像是28×28 pixedls
IMAGE_SIZE = 28
# 特征的维度
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE
def inference(images, hidden1_units, hidden2_units):
'''
构建 MNIST model,向前传播
:param images: Image placeholder,输入
:param hidden1_units: 第一个隐藏层的大小
:param hidden2_units: 第二个隐藏层的大小
:return:
softmax_linear:Output tensor with the computed logits.
'''
# Hidden 1
with tf.name_scope("hidden1"):
weights = tf.Variable( # 输入层到输出层的weights
tf.truncated_normal([IMAGE_PIXELS, hidden1_units],
stddev=1.0 / math.sqrt(float(IMAGE_PIXELS))),
name="weights")
biases = tf.Variable(
tf.zeros([hidden1_units]),
name='biases'
)
hidden1 = tf.nn.relu(tf.matmul(images, weights) + biases) # 激活函数是rectifier
# Hidden 2
with tf.name_scope('hidden2'):
weights = tf.Variable(
tf.truncated_normal([hidden1_units, hidden2_units],
stddev=1.0 / math.sqrt(float(hidden1_units))),
name='weights')
biases = tf.Variable(tf.zeros([hidden2_units]),
name='biases')
hidden2 = tf.nn.relu(tf.matmul(hidden1, weights) + biases)
# Linear
with tf.name_scope('soft_max_linear'):
| weights = tf.Variable(
tf.truncated_normal([hidden2_units, NUM_CLASSES],
stddev=1.0 / math.sqrt(float(hidden2_units))),
name='weights')
biases = t | f.Variable(tf.zeros([NUM_CLASSES]),
name='biases')
logits = tf.matmul(hidden2, weights) + biases # 激活层是横等函数
return logits
def loss(logits, labels):
'''
从logits 和labels 计算损失
:param logits: Logits tensor,float-[batch_size,NUM_CLASS]
:param labels: Labels tensor,int32-[batch_size]
:return:Loss tensor
'''
# 用one-hot的方式对labels_placeholder进行编码
batch_size = tf.size(labels)
labels = tf.expand_dims(labels, 1)
indices = tf.expand_dims(tf.range(0, batch_size, 1), 1)
concated = tf.concat(1, [indices, labels])
one_hot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, NUM_CLASSES]), 1.0, 0.0)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits,
one_hot_labels,
name='xentropy')
loss = tf.reduce_mean(cross_entropy, name='xentropy_mean')
return loss
def training(loss, learning_rate):
'''
设置 training Ops
:param loss:
:param learning_rate:
:return:
'''
tf.scalar_summary(loss.op.name, loss)
# 梯度下降
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
global_step = tf.Variable(0, name='global_step', trainable=False)
train_op = optimizer.minimize(loss, global_step=global_step)
return train_op
def evalution(logits, labels):
correct = tf.nn.in_top_k(logits, labels, 1)
return tf.reduce_sum(tf.cast(correct, tf.int32))
if __name__ == '__main__':
pass
|
nirizr/rematch | server/rematch/wsgi.py | Python | gpl-3.0 | 318 | 0 | """
WSGI config for rematch-server project.
It exposes the WSGI callable as a module-le | vel variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/dep | loyment/wsgi/
"""
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
Trinak/SuperPong | superPong/actors/components/ballAIComponent.py | Python | gpl-3.0 | 1,814 | 0.006615 | '''
Created on Oct 30, 2014
@author: Arrington
'''
from pyHopeEngine import engineCommon as ECOM
from pyHopeEngine.actors.components.aiComponent import AIComponent
from superPong.actors.ballAI.ballProcesses.ballChooseStateProcess import BallChooseStateProcess
from superPong.actors.ballAI.pongBallBrain import MainBallBrain, BasicBallBrain
class BallAIComponent(AIComponent):
def __init__(self):
super().__init__()
self.currentState = None
self.brain = None
self.chooseStateProcess = None
def init(self, element):
brainElement = element.find("Brain")
self.setBrain(brainElement.text)
self.chooseStateProcess = BallChooseStateProcess(self)
ECOM.engine.baseLogic.processManager.addProcess(self.chooseStateProcess)
def postInit(self):
self.currentState = self.brain.init(self.owner)
self.currentState.init()
def setBrain(self, n | ame):
if name == "MainBallBrain":
self.brain = MainBallBrain()
elif name == "BasicBallBrain":
self.brain = BasicBallBrain()
def setState(self, state):
self.currentState.cleanUp()
self.cu | rrentState = state(self.owner)
self.currentState.init()
def chooseState(self):
if self.brain is not None:
state = self.brain.think()
if state is not None:
self.setState(state)
def update(self):
if self.currentState is not None:
self.currentState.update()
def cleanUp(self):
super().cleanUp()
self.brain.cleanUp()
self.brain = None
self.currentState.cleanUp()
self.currentState = None
self.chooseStateProcess.succeed()
self.chooseStateProcess = None |
nagyistoce/devide | modules/vtk_basic/vtkMergeFilter.py | Python | bsd-3-clause | 550 | 0.003636 | # class generated by DeVIDE::createDeVIDEModuleFromVTKObject
from module_kits.vtk_kit.mixins import SimpleVTKClassModuleBase
import vtk
class vtkMergeFilter(Simp | leVTKClassModuleBase):
def __init__(self, module_manager):
SimpleVTKClassMo | duleBase.__init__(
self, module_manager,
vtk.vtkMergeFilter(), 'Processing.',
('vtkDataSet', 'vtkDataSet', 'vtkDataSet', 'vtkDataSet', 'vtkDataSet', 'vtkDataSet'), ('vtkDataSet',),
replaceDoc=True,
inputFunctions=None, outputFunctions=None)
|
Eszti/4lang | scripts/get_defs.py | Python | mit | 241 | 0 | import json
import sys |
data = json.load(sys.stdin)
for e in data.itervalues():
if e['senses'] and e['senses'][0] | ['definition']:
print u"{0}\t{1}".format(
e['hw'], e['senses'][0]['definition']['sen']).encode('utf-8')
|
hibou107/algocpp | telephone.py | Python | mit | 1,349 | 0.002965 | import sys, math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
class Tree(object):
def __repr__(self):
return self.val
def __init__(self, val=None):
self.val = val
self.childs = []
def add_number(self, number):
if not number:
return
for child in self.childs:
if number[0] == child.val:
del number[0]
child.add_number(number)
return
new_child = Tree(number[0])
self.childs.append(new_child)
del number[0]
new_child.add_number(number)
def calculate(self):
plus = 1 if self.val else 0
return plus + sum([child.calculate() for child in self.childs])
def show(self, order=''):
print order + str(self.val)
order += ' '
for child in self.childs:
child.show(order)
# N = int(raw_input())
# for i in xrange(N):
# telephone = raw_input()
# # Write an action using print
# # To debug: print >> sys.stderr, "Debug messages..."
# print "number" # | The number of elements (referencing a number) stored in the structure.
if __name__ == '__main__':
t = Tree()
t.add_number(list('0123456789'))
t.add_number(list('0123'))
print t.calculate()
t.s | how()
|
wazo-pbx/xivo-auth | wazo_auth/plugins/external_auth/google/schemas.py | Python | gpl-3.0 | 422 | 0 | # Copyright 2019 The Wazo Authors (see the AUTHORS file)
# SPDX-License-Identifier: GPL-3.0-or-late | r
from wazo_auth | import schemas
from xivo.mallow import fields
from xivo.mallow.validate import Length
class GoogleSchema(schemas.BaseSchema):
scope = fields.List(fields.String(validate=Length(min=1, max=512)))
access_token = fields.String(dump_only=True)
token_expiration = fields.Integer(dump_only=True)
|
gkc1000/pyscf | examples/ao2mo/20-eri_grad_hess.py | Python | apache-2.0 | 2,223 | 0.013045 | #!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import tempfi | le
import numpy
import h5py
from pyscf import gto, scf, ao2mo
'''
Integral transform | ation for irregular operators
'''
mol = gto.M(
verbose = 0,
atom = [
[8 , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ],
basis = 'ccpvdz',
)
mf = scf.RHF(mol)
e = mf.scf()
print('E = %.15g, ref -76.0267656731' % e)
#
# Given four MOs, compute the MO-integral gradients
#
gradtmp = tempfile.NamedTemporaryFile()
nocc = mol.nelectron // 2
nvir = len(mf.mo_energy) - nocc
co = mf.mo_coeff[:,:nocc]
cv = mf.mo_coeff[:,nocc:]
# Note the AO integrals cint2e_ip1_sph have 3 components (x,y,z) and only have
# permutation symmetry k>=l.
ao2mo.kernel(mol, (co,cv,co,cv), gradtmp.name, intor='cint2e_ip1_sph',
aosym='s2kl')#, verbose=5)
feri = h5py.File(gradtmp.name, 'r')
grad = feri['eri_mo']
print('gradient integrals (d/dR i j|kl) have shape %s == (3,%dx%d,%dx%d)'
% (str(grad.shape), nocc,nvir,nocc,nvir))
#
# Hessian integrals have 9 components
# 1 d/dX d/dX
# 2 d/dX d/dY
# 3 d/dX d/dZ
# 4 d/dY d/dX
# 5 d/dY d/dY
# 6 d/dY d/dZ
# 7 d/dZ d/dX
# 8 d/dZ d/dY
# 9 d/dZ d/dZ
#
orb = mf.mo_coeff
hesstmp = tempfile.NamedTemporaryFile()
ao2mo.kernel(mol, orb, hesstmp.name, intor='cint2e_ipvip1_sph',
dataname='hessints1', aosym='s4')
with ao2mo.load(hesstmp, 'hessints1') as eri:
print('(d/dR i d/dR j| kl) have shape %s due to the 4-fold permutation '
'symmetry i >= j, k >= l' % str(eri.shape))
ao2mo.kernel(mol, orb, hesstmp.name, intor='cint2e_ipip1_sph',
dataname='hessints2', aosym='s2kl')
feri = h5py.File(hesstmp.name, 'r')
print('(d/dR d/dR i j| kl) have shape %s due to the 2-fold permutation '
'symmetry k >= l' % str(feri['hessints2'].shape))
feri.close()
with ao2mo.load(ao2mo.kernel(mol, orb, hesstmp.name, intor='cint2e_ip1ip2_sph',
aosym='s1')) as eri:
print('(d/dR i j|d/dR k l) have shape %s because there is no permutation '
'symmetry' % str(eri.shape))
|
genonfire/portality | accounts/migrations/0001_initial.py | Python | mit | 730 | 0.00137 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django | .db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(setti | ngs.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('point', models.IntegerField(default=1)),
('lastcall', models.DateTimeField(auto_now_add=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.