repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
EUDAT-B2SHARE/invenio-old
|
modules/miscutil/lib/mailutils.py
|
Python
|
gpl-2.0
| 17,746
| 0.00231
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Invenio mail sending utilities. send_email() is the main API function
people should be using; just check out its docstring.
"""
__revision__ = "$Id$"
from time import sleep
import re
import os
import sys
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email import Encoders
from email.MIMEImage import MIMEImage
from email.Utils import formatdate
from cStringIO import StringIO
from flask import g
from formatter import DumbWriter, AbstractFormatter
from flask.ext.email.message import EmailMultiAlternatives, EmailMessage
from invenio.config import \
CFG_EMAIL_BACKEND, \
CFG_SITE_SUPPORT_EMAIL, \
CFG_SITE_URL, \
CFG_SITE_LANG, \
CFG_SITE_ADMIN_EMAIL, \
CFG_MISCUTIL_SMTP_HOST, \
CFG_MISCUTIL_SMTP_PORT, \
CFG_VERSION, \
CFG_DEVEL_SITE, \
CFG_LOGDIR
from invenio.config import CFG_MISCUTIL_SMTP_HOST, CFG_MISCUTIL_SMTP_PORT
try:
from invenio.config import \
CFG_MISCUTIL_SMTP_USER,\
CFG_MISCUTIL_SMTP_PASS,\
CFG_MISCUTIL_SMTP_TLS
except ImportError:
CFG_MISCUTIL_SMTP_USER = ''
CFG_MISCUTIL_SMTP_PASS = ''
CFG_MISCUTIL_SMTP_TLS = False
from invenio.errorlib import register_exception
from invenio.miscutil_config import InvenioMiscUtilError
from invenio.jinja2utils import render_template_to_string
from invenio.webinterface_handler_flask_utils import unicodifier
def initialize_email_backend(app):
"""
Prepare application config from Invenio configuration.
@see: https://flask-email.readthedocs.org/en/latest/#configuration
"""
app.config['DEFAULT_FROM_EMAIL'] = CFG_SITE_SUPPORT_EMAIL
app.config['SERVER_EMAIL'] = CFG_SITE_ADMIN_EMAIL
app.config['ADMINS'] = (CFG_SITE_ADMIN_EMAIL, )
app.config['MANAGERS'] = (CFG_SITE_SUPPORT_EMAIL, )
if app.config.get('EMAIL_BACKEND') is None:
if app.config.get('CFG_EMAIL_BACKEND') or CFG_EMAIL_BACKEND:
app.config['EMAIL_BACKEND'] = app.config.get('CFG_EMAIL_BACKEND',
CFG_EMAIL_BACKEND)
elif CFG_MISCUTIL_SMTP_HOST and CFG_MISCUTIL_SMTP_PORT:
app.config['EMAIL_BACKEND'] = 'flask.ext.email.backends.smtp.Mail'
# Defaults to 'flask.ext.email.backends.locmem.Mail'
app.config['EMAIL_HOST'] = CFG_MISCUTIL_SMTP_HOST
app.config['EMAIL_PORT'] = CFG_MISCUTIL_SMTP_PORT
app.config['EMAIL_HOST_USER'] = CFG_MISCUTIL_SMTP_USER
app.config['EMAIL_HOST_PASSWORD'] = CFG_MISCUTIL_SMTP_PASS
app.config['EMAIL_USE_TLS'] = CFG_MISCUTIL_SMTP_TLS
# app.config['EMAIL_USE_SSL']: defaults to False
app.config['EMAIL_FILE_PATH'] = CFG_LOGDIR
def scheduled_send_email(fromaddr,
toaddr,
subject="",
content="",
header=None,
footer=None,
copy_to_admin=0,
attempt_times=1,
attempt_sleeptime=10,
user=None,
other_bibtasklet_arguments=None,
replytoaddr=""):
"""
Like send_email, but send an email via the bibsched
infrastructure.
@param fromaddr: sender
@type fromaddr: string
@param toaddr: list of receivers
@type toaddr: string (comma separated) or list of strings
@param subject: the subject
@param content: the body of the message
@param header: optional header, otherwise default is used
@param footer: optional footer, otherwise default is used
@param copy_to_admin: set to 1 in order to send email the admins
@param attempt_times: try at least n times before giving up sending
@param attempt_sleeptime: number of seconds to sleep between two attempts
@param user: the user name to user when scheduling the bibtasklet. If
None, the sender will be used
@param other_bibtasklet_arguments: other arguments to append to the list
of arguments to the call of task_low_level_submission
@param replytoaddr: [string or list-of-strings] to be used for the
reply-to header of the email (if string, then
receivers are separated by ',')
@return: the scheduled bibtasklet
"""
from invenio.bibtask import task_low_level_submission
if not isinstance(toaddr, (unicode, str)):
|
toaddr = ','.jo
|
in(toaddr)
if not isinstance(replytoaddr, (unicode, str)):
replytoaddr = ','.join(replytoaddr)
toaddr = remove_temporary_emails(toaddr)
if user is None:
user = fromaddr
if other_bibtasklet_arguments is None:
other_bibtasklet_arguments = []
else:
other_bibtasklet_arguments = list(other_bibtasklet_arguments)
if not header is None:
other_bibtasklet_arguments.extend(("-a", "header=%s" % header))
if not footer is None:
other_bibtasklet_arguments.extend(("-a", "footer=%s" % footer))
return task_low_level_submission(
"bibtasklet", user, "-T", "bst_send_email",
"-a", "fromaddr=%s" % fromaddr,
"-a", "toaddr=%s" % toaddr,
"-a", "replytoaddr=%s" % replytoaddr,
"-a", "subject=%s" % subject,
"-a", "content=%s" % content,
"-a", "copy_to_admin=%s" % copy_to_admin,
"-a", "attempt_times=%s" % attempt_times,
"-a", "attempt_sleeptime=%s" % attempt_sleeptime,
*other_bibtasklet_arguments)
def send_email(fromaddr,
toaddr,
subject="",
content="",
html_content='',
html_images=None,
header=None,
footer=None,
html_header=None,
html_footer=None,
copy_to_admin=0,
attempt_times=1,
attempt_sleeptime=10,
debug_level=0,
ln=CFG_SITE_LANG,
charset=None,
replytoaddr="",
attachments=None
):
"""Send a forged email to TOADDR from FROMADDR with message created from subjet, content and possibly
header and footer.
@param fromaddr: [string] sender
@param toaddr: [string or list-of-strings] list of receivers (if string, then
receivers are separated by ',')
@param subject: [string] subject of the email
@param content: [string] content of the email
@param html_content: [string] html version of the email
@param html_images: [dict] dictionary of image id, image path
@param header: [string] header to add, None for the Default
@param footer: [string] footer to add, None for the Default
@param html_header: [string] header to add to the html part, None for the Default
@param html_footer: [string] footer to add to the html part, None for the Default
@param copy_to_admin: [int] if 1 add CFG_SITE_ADMIN_EMAIL in receivers
@param attempt_times: [int] number of tries
@param attempt_sleeptime: [int] seconds in between tries
@param debug_level: [int] debug level
@param ln: [string] invenio language
@param charset: [string] the content charset. By default is None which means
to try to encode the email as ascii, then latin1 then utf-8.
@param replytoaddr: [string or list-of-strings] to be used for the
|
dita-programming/dita-access
|
view/ui_sign_in.py
|
Python
|
gpl-2.0
| 6,113
| 0.014068
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'sign_in.ui'
#
# Created: Mon Jun 22 00:34:42 2015
# by: PyQt5 UI code generator 5.2.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_sign_inDialog(object):
def setupUi(self, sign_inDialog):
sign_inDialog.setObjectName("sign_inDialog")
sign_inDialog.setWindowModality(QtCore.Qt.ApplicationModal)
sign_inDialog.resize(400, 300)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(sign_inDialog.sizePolicy().hasHeightForWidth())
sign_inDialog.setSizePolicy(sizePolicy)
sign_inDialog.setStyleSheet("QDialog {\n"
" background: white;\n"
" border: 1px solid #29668f;\n"
"}\n"
"QPushButton {\n"
" background:-webkit-gradient(linear, left top, left bottom, color-stop(0.05, #599bb3), color-stop(1, #408c99));\n"
" background:-moz-linear-gradient(top, #599bb3 5%, #408c99 100%);\n"
" background:-webkit-linear-gradient(top, #599bb3 5%, #408c99 100%);\n"
" background:-o-linear-gradient(top, #599bb3 5%, #408c99 100%);\n"
" background:-ms-linear-gradient(top, #599bb3 5%, #408c99 100%);\n"
" background:linear-gradient(to bottom, #599bb3 5%, #408c99 100%);\n"
" background-color:#599bb3;\n"
" border-radius:6px;\n"
" border:1px solid #29668f;\n"
" color:#ffffff;\n"
" font-family:arial;\n"
" font-size:15px;\n"
" font-weight:bold;\n"
" padding:6px 24px;\n"
" text-decoration:none;\n"
"}\n"
"QPushButton:hover {\n"
" background:-webkit-gradient(linear, left top, left bottom, color-stop(0.05, #408c99), color-stop(1, #599bb3));\n"
" background:-moz-linear-gradient(top, #408c99 5%, #599bb3 100%);\n"
" background:-webkit-linear-gradient(top, #408c99 5%, #599bb3 100%);\n"
" background:-o-linear-gradient(top, #408c99 5%, #599bb3 100%);\n"
" background:-ms-linear-gradient(top, #408c99 5%, #599bb3 100%);\n"
" background:linear-gradient(to bottom, #408c99 5%, #599bb3 100%);\n"
" background-color:#408c99;\n"
"}\n"
"QPushButton:active {\n"
" position:relative;\n"
" top:1px;\n"
"}\n"
"QLineEdit {\n"
" border-radius:6px;\n"
" border:1px solid #29668f;\n"
" font-weight:bold;\n"
" padding:6px 24px;\n"
"}\n"
"QLineEdit:disabled {\n"
" background: #8C8C8C;\n"
" border-radius:6px;\n"
" border:1px solid #29668f;\n"
" font-weight:bold;\n"
" padding:6px 24px;\n"
"}\n"
"QLabel{\n"
" font-family:arial;\n"
" font-size:15px;\n"
" font-weight:bold;\n"
" color:#599bb3;\n"
" padding:6px 6px;\n"
" text-decoration:none;\n"
"}\n"
"QLabel:disabled{\n"
" font-family:arial;\n"
" font-size:15px;\n"
" font-weight:bold;\n"
" color:grey;\n"
" padding:6px 6px;\n"
" text-decoration:none;\n"
"}\n"
"\n"
"QCheckBox{\n"
" font-family:arial;\n"
" font-size:15px;\n"
" font-weight:bold;\n"
" color:#599bb3;\n"
" padding:6px 6px;\n"
" text-decoration:none;\n"
"}\n"
"\n"
"\n"
"")
self.gridLayout_2 = QtWidgets.QGridLayout(sign_inDialog)
self.gridLayout_2.setObjectName("gridLayout_2")
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_2.addItem(spacerItem, 3, 1, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 1, 2, 1, 1)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 1, 0, 1, 1)
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.txt_serial = QtWidgets.QLineEdit(sign_inDialog)
self.txt_serial.setEnabled(True)
self.txt_serial.setObjectName("txt_serial")
self.gridLayout.addWidget(self.txt_serial, 0, 1, 1, 1)
self.txt_id = QtWidgets.QLineEdit(sign_inDialog)
self.txt_id.setObjectName("txt_id")
self.gridLayout.addWidget(self.txt_id, 1, 1, 1, 1)
self.lb_serial = QtWidgets.QLabel(sign_inDialog)
self.lb_serial.setEnabled(True)
self.lb_serial.setObjectName("lb_serial")
self.gridLayout.addWidget(self.lb_serial, 0, 0, 1, 1)
self.lb_id = QtWidgets.QLabel(sign_inDialog)
self.lb_id.setObjectName("lb_id")
self.gridLayout.addWidget(self.lb_id, 1, 0, 1, 1)
self.gridLayout_2.addLayout(self.gridLayout, 1, 1, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(sign_inDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout_2.addWidget(self.buttonBox, 4, 1, 1, 1)
spacerItem3 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_2.addItem(spacerItem3, 0, 1, 1, 1)
self.cbx_laptop = QtWidgets.QCheckBox(sign_inDialog)
self.cbx_laptop.setLayoutDirection(QtCore.Qt.RightToLeft)
self.cbx_laptop.setObjectName("cbx_laptop")
self.gridLayout_2.addWidget(self.cbx_laptop, 2, 1, 1, 1)
self.retranslateUi(sign_inDialog)
self.buttonBox.accepted.connect(sign_inDialog.accept)
self.buttonBox.rejected.connect(sign_inDialog.
|
reject)
QtCore.QMetaObject.connectSlotsByName(sign_inDialog)
def retranslateUi(self, sign_inDialog):
_translate = QtCore.QCoreApplication.tra
|
nslate
sign_inDialog.setWindowTitle(_translate("sign_inDialog", "Sign In Dialog"))
self.lb_serial.setText(_translate("sign_inDialog", "Serial No."))
self.lb_id.setText(_translate("sign_inDialog", "ID No."))
self.cbx_laptop.setText(_translate("sign_inDialog", "No laptop?"))
|
bitmazk/django-libs
|
django_libs/tests/widget_tests.py
|
Python
|
mit
| 488
| 0
|
"""Tests for the widgets of the ``django_libs`` app."""
fro
|
m django.test import TestCase
from ..widgets import ColorPickerWidget
class ColorPickerWidgetTestCase(TestCase):
"""Tests for the ``ColorPickerWidget`` widget."""
longMessage = True
def setUp(self):
self.widget = ColorPickerWidget()
def test_render_tag(self):
self.assertIn('value="ffffff"', self.widget.render('field', 'ffffff'),
msg=('Should render the
|
input form.'))
|
vabs22/zulip
|
zilencer/views.py
|
Python
|
apache-2.0
| 4,479
| 0.003572
|
from __future__ import absolute_import
from django.utils.translation import ugettext as _
from django.utils import timezone
from django.http import HttpResponse, HttpRequest
from zilencer.models import Deployment, RemotePushDeviceToken, RemoteZulipServer
from zerver.decorator import has_request_variables, REQ
from zerver.lib.error_notify import do_report_error
from zerver.lib.push_notifications import send_android_push_notification, \
send_apple_push_notification
from zerver.lib.request import JsonableError
from zerver.lib.response import json_error, json_success
from zerver.lib.validator import check_dict, check_int
from zerver.models import UserProfile, PushDeviceToken, Realm
from zerver.views.push_notifications import validate_token
from typing import Any, Dict, Optional, Union, Text, cast
def validate_entity(entity):
# type: (Union[UserProfile, RemoteZulipServer]) -> None
if not isinstance(entity, RemoteZulipServer):
raise JsonableError(_("Must validate with valid Zulip server API key"))
def validate_bouncer_token_request(entity, token, kind):
# type: (Union[UserProfile, RemoteZulipServer], bytes, int) -> None
if kind not in [RemotePushDeviceToken.APNS, RemotePushDeviceToken.GCM]:
raise JsonableError(_("Invalid token type"))
validate_entity(entity)
validate_token(token, kind)
@has_request_variables
def report_error(request, deployment, type=REQ(), report=REQ(validator=check_dict([]))):
# type: (HttpRequest, Deployment, Text, Dict[str, Any]) -> HttpResponse
return do_report_error(deployment.name, type, report)
@has_request_variables
def remote_server_register_push(request, entity, user_id=REQ(),
token=REQ(), token_kind=REQ(validator=check_int), ios_app_id=None):
# type: (HttpRequest, Union[UserProfile, RemoteZulipServer], int, bytes, int, Optional[Text]) -> HttpResponse
validate_bouncer_token_request(entity, token, token_kind)
server = cast(RemoteZulipServer, entity)
# If a user logged out on a device and failed to unregister,
# we should delete any other user associations for this token
# & RemoteServer pair
RemotePushDeviceToken.objects.filter(
token=token, kind=token_kind, server=server).exclude(user_id=user_id).delete()
# Save or update
remote_token, created = RemotePushDeviceToken.objects.update_or_create(
user_id=user_id,
server=serv
|
er,
kind=token_kind,
token=token,
defaults=dict(
ios_app_id=ios_app_id,
last_updated=timezone.now()))
return json_success()
@has_request_variables
def remote_server_
|
unregister_push(request, entity, token=REQ(),
token_kind=REQ(validator=check_int), ios_app_id=None):
# type: (HttpRequest, Union[UserProfile, RemoteZulipServer], bytes, int, Optional[Text]) -> HttpResponse
validate_bouncer_token_request(entity, token, token_kind)
server = cast(RemoteZulipServer, entity)
deleted = RemotePushDeviceToken.objects.filter(token=token,
kind=token_kind,
server=server).delete()
if deleted[0] == 0:
return json_error(_("Token does not exist"))
return json_success()
@has_request_variables
def remote_server_notify_push(request, # type: HttpRequest
entity, # type: Union[UserProfile, RemoteZulipServer]
payload=REQ(argument_type='body') # type: Dict[str, Any]
):
# type: (...) -> HttpResponse
validate_entity(entity)
server = cast(RemoteZulipServer, entity)
user_id = payload['user_id']
gcm_payload = payload['gcm_payload']
apns_payload = payload['apns_payload']
android_devices = list(RemotePushDeviceToken.objects.filter(
user_id=user_id,
kind=RemotePushDeviceToken.GCM,
server=server
))
apple_devices = list(RemotePushDeviceToken.objects.filter(
user_id=user_id,
kind=RemotePushDeviceToken.APNS,
server=server
))
if android_devices:
send_android_push_notification(android_devices, gcm_payload, remote=True)
# TODO: set badge count in a better way
if apple_devices:
send_apple_push_notification(user_id, apple_devices,
badge=1, zulip=apns_payload)
return json_success()
|
matthieu-meaux/DLLM
|
modules/DLLM/DLLMKernel/DLLMMesh.py
|
Python
|
gpl-2.0
| 3,858
| 0.023587
|
# -*-mode: python; py-indent-offset: 4; tab-width: 8; coding: iso-8859-1 -*-
# DLLM (non-linear Differentiated Lifting Line Model, open source software)
#
# Copyright (C) 2013-2015 Airbus Group SAS
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# https://github.com/matthieu-meaux/DLLM.git
#
# @author : Francois Gallard
# @author : Matthieu MEAUX
#
import numpy
from numpy import array, transpose, outer, ones, zeros, copy, divide, diag, dot
from numpy.linalg import norm, solve
class DLLMMesh:
"""
Class that deals with geometry for the lifting mine wing solver
"""
def __init__(self, LLW,verbose = 0):
self.__LLW = LLW
self.__verbose = verbose
self.__ndv = self.get_geom().get_ndv()
self.__N = None
self.__K = None
self.__dK_dchi = None
self.recompute()
#-- Accessors
def get_airfoils(self):
return self.__LLW.get_airfoils()
def get_tag(self):
return self.__LLW.get_tag()
def get_geom(self):
return self.__LLW.get_geom()
def get_OC(self):
return self.__LLW.get_OC()
def g
|
et_grad_active(self):
return self.__LLW.get_grad_active()
def get_K(sel
|
f):
return self.__K
def get_dK_dchi(self):
return self.__dK_dchi
#-- Methods
def recompute(self):
self.__N = self.get_geom().get_n_sect()
# Set computational geometry
self.__K = None
self.__dK_dchi = None
self.__setGeom()
def __setGeom(self):
'''
Sets the geometry of the wing, builds the lifting line metric matrix
'''
#V = self.get_OC().get_V()
eta = self.get_geom().get_eta()[1,:]
y = self.get_geom().get_XYZ()[1,:]
YminEta=transpose(outer(ones([self.__N+1]),y))-outer(ones([self.__N]),eta)
Kmetric=divide(ones([self.__N,self.__N+1]),YminEta)
Kmetric/=4.*numpy.pi
DdGammaDy_DGamma = zeros([self.__N+1,self.__N])
DdGammaDy_DGamma[0:self.__N,:] = diag(ones([self.__N]))
DdGammaDy_DGamma[self.__N,:] = 0.0
DdGammaDy_DGamma[1:self.__N+1,:]-= diag(ones([self.__N]))
self.__K = - dot(Kmetric,DdGammaDy_DGamma)
if self.get_grad_active():
eta_grad = self.get_geom().get_eta_grad()[1,:,:]
y_grad = self.get_geom().get_XYZ_grad()[1,:,:]
YminEta_grad=zeros((self.__N,self.__N+1,self.__ndv))
for n in xrange(self.__ndv):
YminEta_grad[:,:,n] = transpose(outer(ones([self.__N+1]),y_grad[:,n]))-outer(ones([self.__N]),eta_grad[:,n])
dKmetric_dchi=zeros((self.__N,self.__N+1,self.__ndv))
for n in xrange(self.__ndv):
dKmetric_dchi[:,:,n]=-YminEta_grad[:,:,n]/YminEta[:,:]**2
dKmetric_dchi/=4.*numpy.pi
self.__dK_dchi = zeros((self.__N,self.__N,self.__ndv))
for n in xrange(self.__ndv):
self.__dK_dchi[:,:,n]=-dot(dKmetric_dchi[:,:,n],DdGammaDy_DGamma)
|
Azure/azure-sdk-for-python
|
sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_reconnect.py
|
Python
|
mit
| 4,935
| 0.003647
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import time
import pytest
import uamqp
from uamqp import authentication, errors, c_uamqp, compat
from azure.eventhub import (
EventData,
EventHubSharedKeyCredential,
EventHubProducerClient,
EventHubConsumerClient
)
from azure.eventhub.exceptions import OperationTimeoutError
@pytest.mark.liveTest
def test_send_with_long_interval_sync(live_eventhub, sleep):
test_partition = "0"
sender = EventHubProducerClient(live_eventhub['hostname'], live_eventhub['event_hub'],
EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']))
with sender:
batch = sender.create_batch(partition_id=test_partition)
batch.add(EventData(b"A single event"))
sender.send_batch(batch)
if sleep:
time.sleep(250)
else:
sender._producers[test_partition]._handler._connection._conn.destroy()
batch = sender.create_batch(partition_id=test_partition)
batch.add(EventData(b"A single event"))
sender.send_batch(batch)
received = []
uri = "sb://{}/{}".format(live_eventhub['hostname'], live_eventhub['event_hub'])
sas_auth = authentication.SASTokenAuth.from_shared_access_key(
uri, live_eventhub['key_name'], live_eventhub['access_key'])
source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format(
live_eventhub['hostname'],
live_eventhub['event_hub'],
live_eventhub['consumer_group'],
test_partition)
receiver = uamqp.ReceiveClient(source, auth=sas_auth, debug=False, timeout=5000, prefetch=500)
try:
receiver.open()
# receive_message_batch() returns immediately once it receives any messages before the max_batch_size
# and timeout reach. Could be 1, 2, or any number between 1 and max_batch_size.
# So call it twice to ensure the two events are received.
received.extend([EventData._from_message(x) for x in receiver.receive_message_batch(max_batch_size=1, timeout=5000)])
received.extend([EventData._from_message(x) for x in receiver.receive_message_batch(max_batch_size=1, timeout=5000)])
finally:
receiver.close()
assert len(received) == 2
assert list(received[0].body)[0] == b"A single event"
@pytest.mark.liveTest
def test_send_connection_idle_timeout_and_reconnect_sync(connstr_receivers):
connection_str, receivers = connstr_receivers
client = EventHubProducerClient.from_connection_string(conn_str=connection_str, idle_timeout=10)
with client:
ed = EventData('data')
sender = client._create_producer(partition_id='0')
with sender:
sender._open_with_retry()
time.sleep(11)
sender._unsent_events = [ed.message]
ed.message.on_send_complete = sender._on_outcome
with pytest.raises((uamqp.errors.ConnectionClose,
uamqp.errors.MessageHandlerError, OperationTimeoutError)):
# Mac may raise OperationTimeoutError or MessageHandlerError
sender._send_event_data()
sender._send_event_data_with_retry()
retry = 0
while retry < 3:
try:
messages = receivers[0].receive_message_batch(max_batch_size=10, timeout=10000)
if messages:
received_ed1 = EventData._from_message(messages[0])
assert received_ed1.body_as_str() == 'data'
break
except compat.TimeoutException:
retry += 1
@pytest.mark.liveTest
def test_receive_connection_idle_timeout_and_reconnect_sync(connstr_senders):
connection_str, senders = connstr_senders
client = EventHubConsumerClient.from_connection_string(
conn_str=connection_str,
consumer_group='$default',
idle_timeout=10
)
def on_event_received(event):
on_event_received.event = event
with client:
consumer = client._create_consumer("$de
|
fault", "0", "-1", on_event_received)
with consumer:
consumer._open()
time.sleep(11)
ed = EventData("Event")
senders[0].send(ed)
consumer._handler.do_work()
assert consumer._handler._connection._state == c_uamqp.ConnectionState.DISCARDING
duration = 10
now_time = time.time()
end_time = now_time + duration
while now_time < end_time:
con
|
sumer.receive()
time.sleep(0.01)
now_time = time.time()
assert on_event_received.event.body_as_str() == "Event"
|
summer-liu/events_cache_scripts
|
report/cache10.py
|
Python
|
mit
| 9,568
| 0.000941
|
from pymongo import MongoClient
import multiprocessing
import threading
import datetime
import time
cache = MongoClient(host='10.8.8.111', port=27017, connect=False)['cache25']
db30 = MongoClient(host='10.8.8.111', port=27017, connect=False)['onionsBackupOnline']
events = db30['events']
userAttr = cache['userAttr']
deviceAttr = cache['deviceAttr']
eventFlow = cache['eventFlow']
tu = cache['tu']
td = cache['td']
NUM_OF_PROCESS = 6
NUM_OF_WORKERS = 10
START_DATE_BEIJING = datetime.datetime(2016, 1, 10, 0)
END_DATE_BEIJING = datetime.datetime(2016, 1, 17, 0)
START_DATE = START_DATE_BEIJING - datetime.timedelta(hours=8)
END_DATE = END_DATE_BEIJING - datetime.timedelta(hours=8)
n = 0
def assign():
global n
if n < num_of_hours:
n += 1
return n
else:
return 'done'
def process(start, end):
global mobile
global num_of_hours
global beginning
mobile = ['android', 'iOS']
num_of_hours = int((end-start).total_seconds() / 3600)
print multiprocessing.current_process().name + ' total hours: %d' % num_of_hours
beginning = start
threads = []
for i in range(NUM_OF_WORKERS):
t = threading.Thread(target=worker)
threads.append(t)
t.start()
for t in threads:
t.join()
print multiprocessing.current_process().name + ' finished...'
def worker():
finish = False
while not finish:
res = assign()
if res == 'done':
finish = True
else:
start_time = beginning + datetime.timedelta(hours=res-1)
end_time = beginning + datetime.timedelta(hours=res)
# eventFlow: device, user, startTime, endTime, eventFlow
pipeline_event = [
{
"$match": {
"serverTime": {"$gte": start_time, "$lt": end_time},
"platform": {"$in": ["web", "app"]},
"device": {"$exists": True, "$nin": ["", None]}
}
},
{
"$sort": {
"serverTime": 1
}
},
{
"$group": {
"_id": {"device": "$device", "user": "$user"},
"startTime": {"$first": "$serverTime"},
"endTime": {"$last": "$serverTime"},
"eventFlow": {"$push": "$eventKey"},
"platform": {"$first": "$platform2"}
}
},
{
"$project": {
"_id": 0,
"device": "$_id.device",
"user": "$_id.user",
"startTime": 1,
"endTime": 1,
"eventFlow": 1,
"platform": {"$toLower": "$platform"}
}
}
]
event_flow = list(events.aggregate(pipeline
|
_event, allowDiskUse=True))
if len(event_flow):
eventFlow.insert_many(event_flow)
# deviceAttr: device, activateDate, recentSession, platform, users
pipeline_device = [
{
"$match":
|
{
# "platform2": {"$in": mobile},
"platform": {"$in": ["web", "app"]},
"device": {"$exists": True, "$nin": ["", None]},
"serverTime": {"$gte": start_time, "$lt": end_time}
}
},
{
"$group": {
"_id": "$device",
"activateDate": {"$min": "$serverTime"},
"recentSession": {"$max": "$serverTime"},
"users": {"$addToSet": "$user"},
"platform": {"$first": "$platform2"}
}
},
{
"$project": {
"_id": 0,
"device": "$_id",
"activateDate": 1,
"recentSession": 1,
"platform": {"$toLower": "$platform"},
"users": 1
}
}
]
device = list(events.aggregate(pipeline_device, allowDiskUse=True))
if device:
deviceAttr.insert_many(device)
# userAttr: user, activateDate, recentPCSession , recentMobileSession
pipeline_pc = [
{
"$match": {
"serverTime": {"$gte": start_time, "$lt": end_time},
"platform": {"$in": ["web", "app"]},
"platform2": 'PC',
"user": {"$exists": True}
}
},
{
"$group": {
"_id": "$user",
"activateDate": {"$min": "$serverTime"},
"recentPCSession": {"$max": "$serverTime"}
}
},
{
"$project": {
"_id": 0,
"user": "$_id",
"activateDate": 1,
"recentPCSession": 1,
}
}
]
pipeline_mobile = [
{
"$match": {
"serverTime": {"$gte": start_time, "$lt": end_time},
"platform": {"$in": ["web", "app"]},
"platform2": {"$in": mobile},
"user": {"$exists": True}
}
},
{
"$group": {
"_id": "$user",
"activateDate": {"$min": "$serverTime"},
"recentMobileSession": {"$max": "$serverTime"}
}
},
{
"$project": {
"_id": 0,
"user": "$_id",
"activateDate": 1,
"recentMobileSession": 1,
}
}
]
users_pc = list(events.aggregate(pipeline_pc, allowDiskUse=True))
users_mobile = list(events.aggregate(pipeline_mobile, allowDiskUse=True))
if users_pc:
userAttr.insert_many(users_pc)
if users_mobile:
userAttr.insert_many(users_mobile)
# print 'Finished processing data from ', start_time, ' to ', end_time
def merge_device():
print 'Start merge device......'
pipeline = [
{
"$match": {
"device": {"$exists": True, "$nin": ["", None]}
}
},
{
"$unwind": {
"path": "$users",
"preserveNullAndEmptyArrays": True
}
},
{
"$group": {
"_id": '$device',
"activateDate": {"$min": "$activateDate"},
"recentSession": {"$max": "$recentSession"},
"users": {"$addToSet": "$users"},
"platform": {"$first": "$platform"}
}
},
{
"$project": {
"_id": 0,
"device": "$_id",
"activateDate": 1,
"recentSession": 1,
"users": 1,
"platform": 1
}
}
]
devices = list(deviceAttr.aggregate(pipeline, allowDiskUse=True))
td.insert_many(devices)
deviceAttr.drop()
deviceAttr.insert_many(devices)
print 'Finished merge device....'
print '----------------------------------'
def merge_user():
print 'Start merge user......'
pipeline_update = [
{
"$group": {
"_id": "$user",
"activateDate": {"$min": "$activateDate"},
"recentPCSes
|
dnxbjyj/python-basic
|
concurrence/multi_threading.py
|
Python
|
mit
| 4,446
| 0.007172
|
# coding:utf-8
# 测试多线程
import threading
import time
from utils import fn_timer
from multiprocessing.dummy import Pool
import requests
from utils import urls
# 耗时任务:听音乐
def music(name):
print 'I am listening to music {0}'.format(name)
time.sleep(1)
# 耗时任务:看电影
def movie(name):
print 'I am watching movie {0}'.format(name)
time.sleep(5)
# 单线程操作:顺序执行听10首音乐,看2部电影
@fn_timer
def single_thread():
for i in range(10):
music(i)
for i in range(2):
movie(i)
# 多线程执行:听10首音乐,看2部电影
@fn_timer
def multi_thread():
# 线程列表
threads = []
for i in range(10):
|
# 创建一个线程,target参数为任务处理函数,args为任务处理函数所需的参数元组
threads.append(threading.Thread(target = music,args = (i,)))
for i in range(2):
threads.append(threading.Thread(target = movie,args = (i,)))
for t in threads:
# 设为守护线程
t.setDaemon(True)
#
|
开始线程
t.start()
for t in threads:
t.join()
# 使用线程池执行:听10首音乐,看2部电影
@fn_timer
def use_pool():
# 设置线程池大小为20,如果不设置,默认值是CPU核心数
pool = Pool(20)
pool.map(movie,range(2))
pool.map(music,range(10))
pool.close()
pool.join()
# 应用:使用单线程下载多个网页的内容
@fn_timer
def download_using_single_thread(urls):
resps = []
for url in urls:
resp = requests.get(url)
resps.append(resp)
return resps
# 应用:使用多线程下载多个网页的内容
@fn_timer
def download_using_multi_thread(urls):
threads = []
for url in urls:
threads.append(threading.Thread(target = requests.get,args = (url,)))
for t in threads:
t.setDaemon(True)
t.start()
for t in threads:
t.join()
# 应用:使用线程池下载多个网页的内容
@fn_timer
def download_using_pool(urls):
pool = Pool(20)
# 第一个参数为函数名,第二个参数一个可迭代对象,为函数所需的参数列表
resps = pool.map(requests.get,urls)
pool.close()
pool.join()
return resps
def main():
# 测试单线程
# single_thread()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:single_thread in 20.14s]
'''
# 测试多线程
# multi_thread()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:multi_thread in 5.02s]
'''
# 测试线程池
# use_pool()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:use_pool in 6.12s]
'''
# 1.使用单线程
# resps = download_using_single_thread(urls)
# print len(resps)
# 输出:
'''
[finished function:download_using_single_thread in 6.18s]
20
'''
# 2. 使用多线程
# download_using_multi_thread(urls)
# 输出:
'''
[finished function:download_using_multi_thread in 0.73s]
'''
# 3.使用线程池
resps = download_using_pool(urls)
print len(resps)
# 输出:
'''
[finished function:download_using_pool in 0.84s]
20
'''
if __name__ == '__main__':
main()
|
bianchimro/django-search-views
|
sample_app/manage.py
|
Python
|
mit
| 808
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "_
|
_main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sample_app.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python
|
2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
jamalex/kolibri
|
kolibri/tasks/management/commands/base.py
|
Python
|
mit
| 2,905
| 0.000688
|
from collections import namedtuple
from django.core.management.base import BaseCommand
from tqdm import tqdm
Progress = namedtuple(
'Progress',
[
'progress_fraction',
'message',
'extra_data',
'level',
]
)
class ProgressTracker():
def __init__(self, total=100, level=0, update_callback=None):
# set default values
self.progress = 0
self.message = ""
self.extra_data = None
# store provided arguments
self.total = total
self.level = level
|
self.update_callback = update_callback
# initialize the tqdm pr
|
ogress bar
self.progressbar = tqdm(total=total)
def update_progress(self, increment=1, message="", extra_data=None):
self.progressbar.update(increment)
self.progress += increment
self.message = message
self.extra_data = extra_data
if callable(self.update_callback):
p = self.get_progress()
self.update_callback(p.progress_fraction, p)
def get_progress(self):
return Progress(
progress_fraction=self.progress / float(self.total),
message=self.message,
extra_data=self.extra_data,
level=self.level,
)
def __enter__(self):
return self.update_progress
def __exit__(self, *exc_details):
if self.progressbar:
self.progressbar.close()
class AsyncCommand(BaseCommand):
"""A management command with added convenience functions for displaying
progress to the user.
Rather than implementing handle() (as is for BaseCommand), subclasses, must
implement handle_async(), which accepts the same arguments as handle().
If ran from the command line, AsynCommand displays a progress bar to the
user. If ran asynchronously through kolibri.tasks.schedule_command(),
AsyncCommand sends results through the Progress class to the main Django
process. Anyone who knows the task id for the command instance can check
the intermediate progress by looking at the task's AsyncResult.result
variable.
"""
def __init__(self, *args, **kwargs):
self.progresstrackers = []
def _update_all_progress(self, progress_fraction, progress):
if callable(self.update_progress):
progress_list = [p.get_progress() for p in self.progresstrackers]
self.update_progress(progress_list[0].progress_fraction, progress_list)
def handle(self, *args, **options):
self.update_progress = options.pop("update_state", None)
return self.handle_async(*args, **options)
def start_progress(self, total=100):
level = len(self.progresstrackers)
tracker = ProgressTracker(total=total, level=level, update_callback=self._update_all_progress)
self.progresstrackers.append(tracker)
return tracker
|
benhoff/ava
|
src/manage.py
|
Python
|
gpl-3.0
| 246
| 0
|
#!/usr/bin/env python
import os
|
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ava.
|
settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
ovcrash/geoip-attack-map
|
DataServer/const.py
|
Python
|
gpl-3.0
| 1,888
| 0.039725
|
META = [{
'lookup': 'city',
'tag': 'city',
'path': ['names','en'],
},{
'lookup': 'continent',
'tag': 'continent',
'path': ['names','en'],
},{
'lookup': 'continent_code',
'tag': 'continent',
'path': ['code'],
},{
'lookup': 'country',
'tag': 'country',
'path': ['names','en'],
},{
'lookup': 'iso_code',
'tag': 'country',
'p
|
ath': ['iso_code'],
},{
'lookup': 'latitude',
'tag': 'location',
'path': ['latitude'],
},{
'lookup': 'longitude',
'tag': 'location',
'path': ['longitude'],
},{
'lookup': 'metro_code',
'tag': 'location',
'path': ['metro_code'],
},{
'looku
|
p': 'postal_code',
'tag': 'postal',
'path': ['code'],
}]
PORTMAP = {
0:"DoS", # Denial of Service
1:"ICMP", # ICMP
20:"FTP", # FTP Data
21:"FTP", # FTP Control
22:"SSH", # SSH
23:"TELNET", # Telnet
25:"EMAIL", # SMTP
43:"WHOIS", # Whois
53:"DNS", # DNS
80:"HTTP", # HTTP
88:"AUTH", # Kerberos
109:"EMAIL", # POP v2
110:"EMAIL", # POP v3
115:"FTP", # SFTP
118:"SQL", # SQL
143:"EMAIL", # IMAP
156:"SQL", # SQL
161:"SNMP", # SNMP
220:"EMAIL", # IMAP v3
389:"AUTH", # LDAP
443:"HTTPS", # HTTPS
445:"SMB", # SMB
636:"AUTH", # LDAP of SSL/TLS
1433:"SQL", # MySQL Server
1434:"SQL", # MySQL Monitor
3306:"SQL", # MySQL
3389:"RDP", # RDP
5900:"RDP", # VNC:0
5901:"RDP", # VNC:1
5902:"RDP", # VNC:2
5903:"RDP", # VNC:3
8080:"HTTP", # HTTP Alternative
}
|
wettenhj/mytardis-swift-uploader
|
openrc.py
|
Python
|
bsd-3-clause
| 994
| 0.001006
|
#!/usr/bin/python
import os
# With the addition of Keystone, to use an openstack cloud you should
# authenticate against keystone, which returns a **Token** and **Service
# Catalog**. The catalog contains the endpoint for all services the
# user/tenant has access to - including nova, glance, keystone, swift.
#
# *NOTE*: Using the 2.0 *auth api* does not mean that compute api is 2.0. We
# will use the 1.1 *compute api*
os.environ['OS_AUTH_URL'] = "https://keystone.rc.nectar.org.au:5000/v2.0/"
# With the addition of Keystone we have standardized on the term **tenant**
# as the entity that owns the resources.
os.environ['OS_TENANT_ID'] = "123456789012345678901234567890"
os.environ['OS_TENANT_NAME'] = "tenant_name"
# In addition to the owning entity (tenant), openstack stores the entity
# performing the action as the **user**.
os.environ['OS_USERNAME'] = "joe.bloggs@uni.edu.au"
# With Keystone you
|
pass the keystone password.
os.environ['OS_PASSWORD'] = "????????????????????
|
"
|
zsjohny/jumpserver
|
apps/assets/api/node.py
|
Python
|
gpl-2.0
| 8,992
| 0.00045
|
# ~*~ coding: utf-8 ~*~
from collections import namedtuple
from rest_framework import status
from rest_framework.serializers import ValidationError
from rest_framework.response import Response
from django.utils.translation import ugettext_lazy as _
from django.shortcuts import get_object_or_404, Http404
from common.utils import get_logger, get_object_or_none
from common.tree import TreeNodeSerializer
from orgs.mixins.api import OrgModelViewSet
from orgs.mixins import generics
from ..hands import IsOrgAdmin
from ..models import Node
from ..tasks import (
update_node_assets_hardware_info_manual,
test_node_assets_connectivity_manual,
)
from .. import serializers
logger = get_logger(__file__)
__all__ = [
'NodeViewSet', 'NodeChildrenApi', 'NodeAssetsApi',
'NodeAddAssetsApi', 'NodeRemoveAssetsApi', 'NodeReplaceAssetsApi',
'NodeAddChildrenApi', 'NodeListAsTreeApi',
'NodeChildrenAsTreeApi',
'NodeTaskCreateApi',
]
class NodeViewSet(OrgModelViewSet):
model = Node
filter_fields = ('value', 'key', 'id')
search_fields = ('value', )
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.NodeSerializer
# 仅支持根节点指直接创建,子节点下的节点需要通过children接口创建
def perform_create(self, serializer):
child_key = Node.org_root().get_next_child_key()
serializer.validated_data["key"] = child_key
serializer.save()
def perform_update(self, serializer):
node = self.get_object()
if node.is_org_root() and node.value != serializer.validated_data['value']:
msg = _("You can't update the root node name")
raise ValidationError({"error": msg})
return super().perform_update(serializer)
def destroy(self, request, *args, **kwargs):
node = self.get_object()
if node.has_children_or_has_assets():
error = _("Deletion failed and the node contains children or assets")
return Response(data={'error': error}, status=status.HTTP_403_FORBIDDEN)
return super().destroy(request, *args, **kwargs)
class NodeListAsTreeApi(generics.ListAPIView):
"""
获取节点列表树
[
{
"id": "",
"name": "",
"pId": "",
"meta": ""
}
]
"""
model = Node
permission_classes = (IsOrgAdmin,)
serializer_class = TreeNodeSerializer
@staticmethod
def to_tree_queryset(queryset):
queryset = [node.as_tree_node() for node in queryset]
return queryset
def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
queryset = self.to_tree_queryset(queryset)
return queryset
class NodeChildrenApi(generics.ListCreateAPIView):
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.NodeSerializer
instance = None
is_initial = False
def initial(self, request, *args, **kwargs):
self.instance = self.get_object()
return super().initial(request, *args, **kwargs)
def perform_create(self, serializer):
data = serializer.validated_data
_id = data.get("id")
value = data.get("value")
if not value:
value = self.instance.get_next_child_preset_name()
node = self.instance.create_child(value=value, _id=_id)
# 避免查询 full value
node._full_value = node.value
serializer.instance = node
def get_object(self):
pk = self.kwargs.get('pk') or self.request.query_params.get('id')
key = self.request.query_params.get("key")
if not pk and not key:
node = Node.org_root()
self.is_initial = True
return node
if pk:
node = get_object_or_404(Node, pk=pk)
else:
node = get_object_or_404(Node, key=key)
return node
def get_queryset(self):
query_all = self.request.query_params.get("all", "0") == "all"
if not self.instance:
return Node.objects.none()
if self.is_initial:
with_self = True
else:
with_self = False
if query_all:
queryset = self.instance.get_all_children(with_self=with_self)
else:
queryset = self.instance.get_children(with_self=with_self)
return queryset
class NodeChildrenAsTreeApi(NodeChildrenApi):
"""
节点子节点作为树返回,
[
{
"id": "",
"name": "",
"pId": "",
"meta": ""
}
]
"""
model = Node
serializer_class = TreeNodeSerializer
http_method_names = ['get']
def get_queryset(self):
queryset = super().get_queryset()
queryse
|
t = [node.as_tree_nod
|
e() for node in queryset]
queryset = self.add_assets_if_need(queryset)
queryset = sorted(queryset)
return queryset
def add_assets_if_need(self, queryset):
include_assets = self.request.query_params.get('assets', '0') == '1'
if not include_assets:
return queryset
assets = self.instance.get_assets().only(
"id", "hostname", "ip", "os",
"org_id", "protocols",
)
for asset in assets:
queryset.append(asset.as_tree_node(self.instance))
return queryset
def check_need_refresh_nodes(self):
if self.request.query_params.get('refresh', '0') == '1':
Node.refresh_nodes()
class NodeAssetsApi(generics.ListAPIView):
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.AssetSerializer
def get_queryset(self):
node_id = self.kwargs.get('pk')
query_all = self.request.query_params.get('all')
instance = get_object_or_404(Node, pk=node_id)
if query_all:
return instance.get_all_assets()
else:
return instance.get_assets()
class NodeAddChildrenApi(generics.UpdateAPIView):
model = Node
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.NodeAddChildrenSerializer
instance = None
def put(self, request, *args, **kwargs):
instance = self.get_object()
nodes_id = request.data.get("nodes")
children = [get_object_or_none(Node, id=pk) for pk in nodes_id]
for node in children:
if not node:
continue
node.parent = instance
return Response("OK")
class NodeAddAssetsApi(generics.UpdateAPIView):
model = Node
serializer_class = serializers.NodeAssetsSerializer
permission_classes = (IsOrgAdmin,)
instance = None
def perform_update(self, serializer):
assets = serializer.validated_data.get('assets')
instance = self.get_object()
instance.assets.add(*tuple(assets))
class NodeRemoveAssetsApi(generics.UpdateAPIView):
model = Node
serializer_class = serializers.NodeAssetsSerializer
permission_classes = (IsOrgAdmin,)
instance = None
def perform_update(self, serializer):
assets = serializer.validated_data.get('assets')
instance = self.get_object()
if instance != Node.org_root():
instance.assets.remove(*tuple(assets))
else:
assets = [asset for asset in assets if asset.nodes.count() > 1]
instance.assets.remove(*tuple(assets))
class NodeReplaceAssetsApi(generics.UpdateAPIView):
model = Node
serializer_class = serializers.NodeAssetsSerializer
permission_classes = (IsOrgAdmin,)
instance = None
def perform_update(self, serializer):
assets = serializer.validated_data.get('assets')
instance = self.get_object()
for asset in assets:
asset.nodes.set([instance])
class NodeTaskCreateApi(generics.CreateAPIView):
model = Node
serializer_class = serializers.NodeTaskSerializer
permission_classes = (IsOrgAdmin,)
def get_object(self):
node_id = self.kwargs.get('pk')
node = get_object_or_none(self.model, id=node_id)
return node
@staticmethod
def set_serializer_data(s, task):
data = getattr(s, '_data', {})
data["task"] = task.id
setattr(s, '_data', data)
@staticmethod
def refresh_nodes_cache():
|
tickbg/skaer
|
naming/test_parser.py
|
Python
|
gpl-3.0
| 2,913
| 0.00309
|
import parser
import unittest
import sys
class TestVideoParser(unittest.TestCase):
def test_parse_video(self):
if sys.platform.startswith('win'):
path = '\\server\\Movies\\Brave (2007)\\Brave (2006).mkv'
else:
path = '/server/Movies/Brave (2007)/Brave (2006).mkv'
video_info = parser.parse_video(path)
self.assertEqual(video_info['name'], 'Brave')
self.assertEqual(video_info['container'], 'mkv')
self.assertEqual(video_info['year'], 2006)
class TestVideoStackParser(unittest.TestCase):
def test_parse_simple_stack(self):
files = (
'Bad Boys (2006) part1.mkv',
'Bad Boys (2006) part2.mkv',
'Bad Boys (2006) part3.mkv',
'Bad Boys (2006) part4.mkv',
'Bad Boys (2006)-trailer.mkv',
)
stack = parser.parse_video_stack(files)
print(stack)
self.assertEqual(len(stack), 1)
def test_parse_dual_stacks(self):
files = (
'Bad Boys (2006) part1.mkv',
'Bad Boys (2006) part2.mkv',
'Bad Boys (2006) part3.mkv',
'Bad Boys (2006) part4.mkv',
'Bad Boys (2006)-trailer.mkv',
'300 (2006) part1.mkv',
'300 (2006) part2.mkv',
'300 (2006) part3.mkv',
'300 (2006)-trailer.mkv'
)
stacks = parser.parse_video_stack(files)
for s in stacks:
print(s)
self.assertEqual(len(stacks), 2)
def test_dirty_names(self):
files
|
= (
"Bad Boys (2006).part1.stv.unrated.multi.1080p.bluray.x264-rough.mkv",
"Bad Boys (2006).part2.stv.unrated.multi.1080p.bluray.x264-rough.mkv",
"Bad Boys (2006).part3.stv.unrated.multi.1080p.blu
|
ray.x264-rough.mkv",
"Bad Boys (2006).part4.stv.unrated.multi.1080p.bluray.x264-rough.mkv",
"Bad Boys (2006)-trailer.mkv"
)
stack = parser.parse_video_stack(files)
print(stack)
self.assertEqual(len(stack), 1)
#TestStackInfo(result.Stacks[0], "Bad Boys (2006).stv.unrated.multi.1080p.bluray.x264-rough", 4);
def test_parse_mixed_expressions(self):
files = (
'Bad Boys (2006) part1.mkv',
'Bad Boys (2006) part2.mkv',
'Bad Boys (2006) part3.mkv',
'Bad Boys (2006) part4.mkv',
'Bad Boys (2006)-trailer.mkv',
'300 (2006) parta.mkv',
'300 (2006) partb.mkv',
'300 (2006) partc.mkv',
'300 (2006) partd.mkv',
'300 (2006)-trailer.mkv',
'300a.mkv',
'300b.mkv',
'300c.mkv',
'300-trailer.mkv'
)
stacks = parser.parse_video_stack(files)
for s in stacks:
print(s)
self.assertEqual(len(stacks), 3)
if __name__ == '__main__':
unittest.main()
|
incuna/feincms-extensions
|
feincms_extensions/render_json.py
|
Python
|
bsd-2-clause
| 1,559
| 0.000641
|
from django.core import checks
from feincms import extensions
class Extension(extensions.Extension):
def handle_model(self):
cls = self.model
def render_json(self, request):
"""Render the feincms regions into a dictionary."""
def region_data(region):
content_list = getattr(self.content, region.key)
return [content.json(request=request) for content in content_list]
regions = self.template.regions
return {region.key: region_data(region) for region in regions}
cls.add_to_class('render_json', render_json)
@classmethod
def check(cls, **kwargs):
errors = super(self.model, cls).check(**kwargs)
errors.extend(cls._check_json_method())
return errors
@classmethod
def _check_json_method(cls, **kwargs):
"""Check all registered content types h
|
ave a `.json` method."""
message = (
'Feincms content has no `json` method, but the ' +
'`render_json` extension is acti
|
ve for model `{}`.'
).format(cls)
for content_type in cls._feincms_content_types:
if not hasattr(content_type, 'json'):
yield checks.Error(
message,
obj=content_type,
id='feincms_extensions.E001',
)
cls.add_to_class('check', check)
cls.add_to_class('_check_json_method', _check_json_method)
|
leppa/home-assistant
|
homeassistant/components/pi_hole/sensor.py
|
Python
|
apache-2.0
| 2,321
| 0.000431
|
"""Support for getting statistical data from a Pi-hole system."""
import logging
from homeassistant.helpers.entity import Entity
from .const import (
ATTR_BLOCKED_DOMAINS,
DOMAIN as PIHOLE_DOMAIN,
SENSOR_DICT,
SENSOR_LIST,
)
LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up t
|
he pi-hole sensor."""
if discovery_info is None:
return
sensors = []
for
|
pi_hole in hass.data[PIHOLE_DOMAIN].values():
for sensor in [
PiHoleSensor(pi_hole, sensor_name) for sensor_name in SENSOR_LIST
]:
sensors.append(sensor)
async_add_entities(sensors, True)
class PiHoleSensor(Entity):
"""Representation of a Pi-hole sensor."""
def __init__(self, pi_hole, sensor_name):
"""Initialize a Pi-hole sensor."""
self.pi_hole = pi_hole
self._name = pi_hole.name
self._condition = sensor_name
variable_info = SENSOR_DICT[sensor_name]
self._condition_name = variable_info[0]
self._unit_of_measurement = variable_info[1]
self._icon = variable_info[2]
self.data = {}
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._condition_name}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the device."""
try:
return round(self.data[self._condition], 2)
except TypeError:
return self.data[self._condition]
@property
def device_state_attributes(self):
"""Return the state attributes of the Pi-Hole."""
return {ATTR_BLOCKED_DOMAINS: self.data["domains_being_blocked"]}
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.pi_hole.available
async def async_update(self):
"""Get the latest data from the Pi-hole API."""
await self.pi_hole.async_update()
self.data = self.pi_hole.api.data
|
appsembler/edx-platform
|
openedx/core/djangoapps/user_api/accounts/tests/test_views.py
|
Python
|
agpl-3.0
| 46,567
| 0.002966
|
# -*- coding: utf-8 -*-
"""
Test cases to cover Accounts-related behaviors of the User API application
"""
import datetime
import hashlib
import json
from copy import deepcopy
import unittest
import ddt
import mock
import pytz
import six
from django.conf import settings
from django.test.testcases import TransactionTestCase
from django.test.utils import override_settings
from django.urls import reverse
from rest_framework.test import APIClient, APITestCase
from six.moves import range
from openedx.core.djangoapps.oauth_dispatch.jwt import create_jwt_for_user
from openedx.core.djangoapps.user_api.accounts import ACCOUNT_VISIBILITY_PREF_KEY
from openedx.core.djangoapps.user_api.models import UserPreference
from openedx.core.djangoapps.user_api.preferences.api import set_user_preference
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from student.models import PendingEmailChange, UserProfile
from student.tests.factories import TEST_PASSWORD, UserFactory
from .. import ALL_USERS_VISIBILITY, CUSTOM_VISIBILITY, PRIVATE_VISIBILITY
TEST_PROFILE_IMAGE_UPLOADED_AT = datetime.datetime(2002, 1, 9, 15, 43, 1, tzinfo=pytz.UTC)
# this is used in one test to check the behavior of profile image url
# generation with a relative url in the config.
TEST_PROFILE_IMAGE_BACKEND = deepcopy(settings.PROFILE_IMAGE_BACKEND)
TEST_PROFILE_IMAGE_BACKEND['options']['base_url'] = '/profile-images/'
TEST_BIO_VALUE = u"Tired mother of twins"
TEST_LANGUAGE_PROFICIENCY_CODE = u"hi"
class UserAPITestCase(APITestCase):
"""
The base class for all tests of the User API
"""
def setUp(self):
super(UserAPITestCase, self).setUp()
self.anonymous_client = APIClient()
self.different_user = UserFactory.create(password=TEST_PASSWORD)
self.different_client = APIClient()
self.staff_user = UserFactory(is_staff=True, password=TEST_PASSWORD)
self.staff_client = APIClient()
self.user = UserFactory.create(password=TEST_PASSWORD) # will be assigned to self.client by default
def login_client(self, api_client, user):
"""Helper method for getting the client and user and logging in. Returns client. """
client = getattr(self, api_client)
user = getattr(self, user)
client.login(username=user.username, password=TEST_PASSWORD)
return client
def send_patch(self, client, js
|
on_data, content_type="application/merge-patch+json", expected_status=200):
"""
Helper method for sending a patch to the server, defaulting to application/merge-patch+json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.patch(self.url, data=json
|
.dumps(json_data), content_type=content_type)
self.assertEqual(expected_status, response.status_code)
return response
def send_get(self, client, query_parameters=None, expected_status=200):
"""
Helper method for sending a GET to the server. Verifies the expected status and returns the response.
"""
url = self.url + '?' + query_parameters if query_parameters else self.url # pylint: disable=no-member
response = client.get(url)
self.assertEqual(expected_status, response.status_code)
return response
# pylint: disable=no-member
def send_put(self, client, json_data, content_type="application/json", expected_status=204):
"""
Helper method for sending a PUT to the server. Verifies the expected status and returns the response.
"""
response = client.put(self.url, data=json.dumps(json_data), content_type=content_type)
self.assertEqual(expected_status, response.status_code)
return response
# pylint: disable=no-member
def send_delete(self, client, expected_status=204):
"""
Helper method for sending a DELETE to the server. Verifies the expected status and returns the response.
"""
response = client.delete(self.url)
self.assertEqual(expected_status, response.status_code)
return response
def create_mock_profile(self, user):
"""
Helper method that creates a mock profile for the specified user
:return:
"""
legacy_profile = UserProfile.objects.get(id=user.id)
legacy_profile.country = "US"
legacy_profile.state = "MA"
legacy_profile.level_of_education = "m"
legacy_profile.year_of_birth = 2000
legacy_profile.goals = "world peace"
legacy_profile.mailing_address = "Park Ave"
legacy_profile.gender = "f"
legacy_profile.bio = TEST_BIO_VALUE
legacy_profile.profile_image_uploaded_at = TEST_PROFILE_IMAGE_UPLOADED_AT
legacy_profile.language_proficiencies.create(code=TEST_LANGUAGE_PROFICIENCY_CODE)
legacy_profile.phone_number = "+18005555555"
legacy_profile.save()
def _verify_profile_image_data(self, data, has_profile_image):
"""
Verify the profile image data in a GET response for self.user
corresponds to whether the user has or hasn't set a profile
image.
"""
template = '{root}/{filename}_{{size}}.{extension}'
if has_profile_image:
url_root = 'http://example-storage.com/profile-images'
filename = hashlib.md5(('secret' + self.user.username).encode('utf-8')).hexdigest()
file_extension = 'jpg'
template += '?v={}'.format(TEST_PROFILE_IMAGE_UPLOADED_AT.strftime("%s"))
else:
url_root = 'http://testserver/static'
filename = 'default'
file_extension = 'png'
template = template.format(root=url_root, filename=filename, extension=file_extension)
self.assertEqual(
data['profile_image'],
{
'has_image': has_profile_image,
'image_url_full': template.format(size=50),
'image_url_small': template.format(size=10),
}
)
@ddt.ddt
@skip_unless_lms
class TestOwnUsernameAPI(CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
ENABLED_CACHES = ['default']
def setUp(self):
super(TestOwnUsernameAPI, self).setUp()
self.url = reverse("own_username_api")
def _verify_get_own_username(self, queries, expected_status=200):
"""
Internal helper to perform the actual assertion
"""
if settings.TAHOE_ALWAYS_SKIP_TEST: # Skip query checks
response = self.send_get(self.client, expected_status=expected_status)
else:
with self.assertNumQueries(queries):
response = self.send_get(self.client, expected_status=expected_status)
if expected_status == 200:
data = response.data
self.assertEqual(1, len(data))
self.assertEqual(self.user.username, data["username"])
def test_get_username(self):
"""
Test that a client (logged in) can get her own username.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self._verify_get_own_username(16)
def test_get_username_inactive(self):
"""
Test that a logged-in client can get their
username, even if inactive.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.user.is_active = False
self.user.save()
self._verify_get_own_username(16)
def test_get_username_not_logged_in(self):
"""
Test that a client (not logged in) gets a 401
when trying to retrieve their username.
"""
# verify that the endpoint is inaccessible when not logged in
self._verify_get_own_username(13, expected_status=401)
@ddt.ddt
@skip_unless_lms
@mock.patch('openedx.core.djangoapps.user_api.accounts.image_helpers._PROFILE_IMAGE_SIZES', [50, 10])
@mock.patch.dict(
'django.conf.settings.PROFILE_IMAGE_SIZES_MAP',
{'full': 50, 'small': 10},
clear=True
)
class TestAccount
|
andyneff/voxel-globe
|
voxel_globe/ingest/views.py
|
Python
|
mit
| 4,787
| 0.022143
|
import distutils.dir_util
import os
from django.shortcuts import render
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
### Rest API setup
import rest_framework.routers
import rest_framework.viewsets
import rest_framework.filters
import voxel_globe.ingest.serializers
from .tools import METADATA_TYPES, PAYLOAD_TYPES
from voxel_globe.ingest import models
router = rest_framework.routers.DefaultRouter()
class IngestViewSet(rest_framework.viewsets.ModelViewSet):
filter_backends = (rest_framework.filters.DjangoFilterBackend,);
filter_fields = ['id', 'name']#, 'directory', 'file'];
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
super(IngestViewSet, self).perform_create(serializer);
def get_queryset(self):
return super(IngestViewSet, self).get_queryset().filter(owner=self.request.user);
def ViewSetFactory(model, serializer):
return type('ViewSet_%s' % model._meta.model_name, (IngestViewSet,), {'queryset':model.objects.all(), 'serializer_class':serializer})
router.register(models.File._meta.model_name, ViewSetFactory(models.File, voxel_globe.ingest.serializers.FileSerializer))
#router.register(models.Directory._meta.model_name, ViewSetFactory(models.Directory, voxel_globe.ingest.serializers.DirectorySerializer))
#router.register(models.Directory._meta.model_name+'_nest', ViewSetFactory(models.Directory, voxel_globe.ingest.serializers.NestFactory(voxel_globe.ingest.serializers.DirectorySerializer)))
router.register(models.UploadSession._meta.model_name, ViewSetFactory(models.UploadSession, voxel_globe.ingest.serializers.UploadSessionSerializer));
#router.register(models.UploadSession._meta.model_name+'_nest', ViewSetFactory(models.UploadSession, voxel_globe.ingest.serializers.NestFactory(voxel_globe.ingest.serializers.UploadSessionSerializer)));
#TODO: Pass upload types, then all the upload type types
#New a new "New session" panel to handle adding all sorts of upload types
def chooseSession(request):
return render_to_response('ingest/html/chooseSession.html',
{'payload_types': PAYLOAD_TYPES,
'metadata_types': METADATA_TYPES},
context_instance=RequestContext(request))
def addFiles(request):
upload_session_id = int(request.GET['upload'])
uploadSession = models.UploadSession.objects.get(id=upload_session_id)
testFile = models.File(name='Newfile', session=uploadSession, owner=request.user);
testFile.save();
return render_to_response('ingest/html/addFiles.html',
{'uploadSession':uploadSession,
'testFile':testFile},
context_instance=RequestContext(request))
def upload(request):
try:
uploadSession_id = request.POST['uploadSession']
except:
uploadSession = models.UploadSession(name='failesafe', owner=request.user)
uploadSession.save();
uploadSession.name = str(uploadSession.id); uploadSession.save();
uploadSession_id = uploadSession.id
try:
testFile_id = request.POST['testFile']
except:
testFile_id = 'failsafe'
s = 'ok<br>'
saveDir = os.path.join(os.environ['VIP_TEMP_DIR'], 'ingest', str(uploadSession_id))
distutils.dir_util.mkpath(saveDir)
for f in request.FILES:
s += request.FILES[f].name
with open(os.path.join(saveDir, request.FILES[f].name), 'wb') as fid:
for c in request.FILES[f].chunks():
fid.write(c)
return HttpResponse(s);
def ingestFolder(request):
from celery.canvas import chain
from vsi.tools.dir_util import mkdtemp
import voxel_globe.ingest.tasks
uploadSession_id = request.POST['uploadSession']
#directories = models.Directory.objects.filter(uploadSession_id = uploadSession_id)
#Code not quite done, using failsafe for now.
uploadSession = models.UploadSession.objects.get(id=uploadSession_id);
sessionDir = os.path.join(os.environ['VIP_TEMP_DIR'], 'ingest', str(uploadSession.id))
#imageDir = os.path.join(os.environ['VIP_IMAGE_SERVER_ROOT'], str(uploadSession.id))
#if os.path.exists(imageDir):
imageDir = mkdtemp(dir=os.environ['VIP_IMAGE_SERVER_ROOT'], prefix='img');
task0 = voxel_globe.ingest.tasks.move_data.si(sessionDir, imageDir)
task1 = PAYLOAD_TYPES[uploadSession.payload_type].ingest.si(uploadSession_id, imageDir)
task2 = METADATA_TY
|
PES[uploadSession.metadata_type].ingest.s(uploadSession_id, imageDir)
task3 = voxel_globe.ingest.tasks.cleanup.si(uploadSession_id)
tasks = task0 | task1 | task2 | task3 #create chain
result = tasks.apply_async()
return render(request, 'ingest/html/ingest_started.html',
{'task_
|
id':result.task_id})
|
lukeroge/CloudbotX
|
plugins/help.py
|
Python
|
gpl-3.0
| 2,905
| 0.002754
|
import asyncio
import re
from operator import attrgetter
from stratus.loader import hook
plugin_info = {
"plugin_category": "core",
"command_category_name": "Informational"
}
@asyncio.coroutine
@hook.command("help", autohelp=False)
def help_command(text, conn, bot, notice, has_permission):
"""[command] - gives help for [command], or lists all available commands if no command is specified
:type text: str
:type conn: stratus.connection.Connection
:type bot: stratus.engine.Stratus
"""
if text:
searching_for = text.lower().strip()
if not re.match(r'^\w+$', searching_for):
notice("Invalid command name '{}'".format(text))
return
else:
searching_for = None
if searching_for:
if searching_for in bot.loader.commands:
doc = bot.loader.commands[searching_for].doc
if doc:
message = "{}{} {}".format(conn.config["command_prefix"], searching_for, doc)
notice(message)
else:
notice("Command {} has no additional documentation.".format(searching_for))
else:
notice("Unknown command '{}'".format(searching_for))
else:
# list of lines to send to the user
lines = []
# current line, containing words to join with " "
current_line = []
# current line length, to count how long the current line will be when joined with " "
current_line_length = 0
for plugin in sorted(set(bot.loader.commands.values()), key=attrgetter("name")):
# use set to remove duplicate commands (from multiple aliases), and sorted to sort by name
if plugin.permissions:
|
# check permissions
allowed = False
|
for perm in plugin.permissions:
if has_permission(perm, notice=False):
allowed = True
break
if not allowed:
# skip adding this command
continue
# add the command to lines sent
command = plugin.name
added_length = len(command) + 2 # + 2 to account for space and comma
if current_line_length + added_length > 450:
# if line limit is reached, add line to lines, and reset
lines.append(", ".join(current_line) + ",")
current_line = []
current_line_length = 0
current_line.append(command)
current_line_length += added_length
if current_line:
# make sure to include the last line
lines.append(", ".join(current_line))
notice("Available commands:")
for line in lines:
notice(line)
notice("For detailed help, use {}help <command>".format(conn.config["command_prefix"]))
|
jhamman/pyresample
|
pyresample/test/test_plot.py
|
Python
|
lgpl-3.0
| 3,440
| 0.002907
|
import unittest
import os
import numpy as np
from pyresample import plot, geometry, utils, kd_tree
try:
import matplotlib
matplotlib.use('Agg')
except ImportError:
pass # Postpone fail to individual tests
def tmp(f):
f.tmp = True
return f
class Test(unittest.TestCase):
filename = os.path.abspath(os.path.join(os.path.dirname(__file__),
'test_files', 'ssmis_swath.npz'))
data = np.load(filename)['data']
lons = data[:, 0].astype(np.float64)
lats = data[:, 1].astype(np.float64)
tb37v = data[:, 2].astype(np.float64)
# screen out the fill values
fvalue = -10000000000.0
valid_fov = (lons != fvalue) * (lats != fvalue) * (tb37v != fvalue)
lons = lons[valid_fov]
lats = lats[valid_fov]
tb37v = tb37v[valid_fov]
def test_ellps2axis(self):
a, b = plot.ellps2axis('WGS84')
self.assertAlmostEqual(a, 6378137.0,
msg='Failed to get semi-major axis of ellipsis')
self.assertAlmostEqual(b, 6356752.3142451793,
msg='Failed to get semi-minor axis of ellipsis')
def test_area_def2basemap(self):
area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
'test_files', 'areas.cfg'), 'ease_sh')[0]
bmap = plot.area_def2basemap(area_def)
self.assertTrue(bmap.rmajor == bmap.rminor and
bmap.rmajor == 6371228.0,
'Failed to create Basemap object')
def test_plate_carreeplot(self):
area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
'test_files', 'areas.cfg'), 'pc_world')[0]
swath_def = geometry.SwathDefinition(self.lons, self.lats)
result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
radius_of_influence=20000,
fill_value=None)
plt = plot._get_quicklook(area_def, result, num_meridians=0,
num_parallels=0)
def test_easeplot(self):
area_def = utils.parse_area_file(os.pa
|
th.join(os.path.dirname(__file__),
'test_files', 'areas.cfg'), 'ease_sh')[0]
swath_def = geometry.SwathDefinition(self.lons, self.lats)
result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
radius_of_influence=20000,
fill_value=None)
plt = plot._get_quicklook(area_def, result)
def test_orthoplot
|
(self):
area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
'test_files', 'areas.cfg'), 'ortho')[0]
swath_def = geometry.SwathDefinition(self.lons, self.lats)
result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
radius_of_influence=20000,
fill_value=None)
plt = plot._get_quicklook(area_def, result)
def suite():
"""The test suite.
"""
loader = unittest.TestLoader()
mysuite = unittest.TestSuite()
mysuite.addTest(loader.loadTestsFromTestCase(Test))
return mysuite
|
mancoast/CPythonPyc_test
|
crash/265_test_re.py
|
Python
|
gpl-3.0
| 37,806
| 0.003095
|
import sys
sys.path = ['.'] + sys.path
from test.test_support import verbose, run_unittest
import re
from re import Scanner
import sys, os, traceback
from weakref import proxy
# Misc tests from Tim Peters' re.doc
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefuly modeled to
# cover most of the code.
import unittest
class ReTests(unittest.TestCase):
def test_weakref(self):
s = 'QabbbcR'
x = re.compile('ab+c')
y = proxy(x)
self.assertEqual(x.findall('QabbbcR'), y.findall('QabbbcR'))
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertEqual(re.search('x', 'aaa'), None)
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertEqual(re.match('a+', 'xxx'), None)
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.ass
|
ertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
|
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
# fails for group followed by other escape
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
# Test for sub() on escaped characters
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_bug_1140(self):
# re.sub(x, y, u'') should return u'', not '', and
# re.sub(x, y, '') should return '', not u''.
# Also:
# re.sub(x, y, unicode(x)) should return unicode(y), and
# re.sub(x, y, str(x)) should return
# str(y) if isinstance(y, str) else unicode(y).
for x in 'x', u'x':
for y in 'y', u'y':
z = re.sub(x, y, u'')
self.assertEqual(z, u'')
self.assertEqual(type(z), unicode)
#
z = re.sub(x, y, '')
self.assertEqual(z, '')
self.assertEqual(type(z), str)
#
z = re.sub(x, y, unicode(x))
self.assertEqual(z, y)
self.assertEqual(type(z), unicode)
#
z = re.sub(x, y, str(x))
self.assertEqual(z, y)
self.assertEqual(type(z), type(y))
def test_bug_1661(self):
# Verify that flags do not get silently ignored with compiled patterns
pattern = re.compile('.')
self.assertRaises(ValueError, re.match, pattern, 'A', re.I)
self.assertRaises(ValueError, re.search, pattern, 'A', re.I)
self.assertRaises(ValueError, re.findall, pattern, 'A', re.I)
self.assertRaises(ValueError, re.compile, pattern, re.I)
def test_bug_3629(self):
# A regex that triggered a bug in the sre-code validator
re.compile("(?P<quote>)(?(quote))")
def test_sub_template_numeric_escape(self):
# bug 776311 and friends
self.assertEqual(re.sub('x', r'\0', 'x'), '\0')
self.assertEqual(re.sub('x', r'\000', 'x'), '\000')
self.assertEqual(re.sub('x', r'\001', 'x'), '\001')
self.assertEqual(re.sub('x', r'\008', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\009', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\111', 'x'), '\111')
self.assertEqual(re.sub('x', r'\117', 'x'), '\117')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\1111')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\111' + '1')
self.assertEqual(re.sub('x', r'\00', 'x'), '\x00')
self.assertEqual(re.sub('x', r'\07', 'x'), '\x07')
self.assertEqual(re.sub('x', r'\08', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\09', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\0a', 'x'), '\0' + 'a')
self.assertEqual(re.sub('x', r'\400', 'x'), '\0')
self.assertEqual(re.sub('x', r'\777', 'x'), '\377')
self.assertRaises(re.error, re.sub, 'x', r'\1', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\8', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\9', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\11', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\18', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\1a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\90', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\99', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\118', 'x') # r'\11' + '8'
self.assertRaises(re.error, re.sub, 'x', r'\11a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\181', 'x') # r'\18' + '1'
self.assertRaises(re.error, re.sub, 'x', r'\800', 'x') # r'\80' + '0'
# in python2.3 (etc), these loop endlessly in sre_parser.py
self.assertEqual(re.sub('(((((((((((x)))))))))))', r'\11', 'x'), 'x')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\118', 'xyz'),
'xz8')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\11a', 'xyz'),
'xza')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
# Test for empty sub() behaviour, see SF bug #462270
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
sel
|
hotpxl/mxnet
|
tests/python/train/test_autograd.py
|
Python
|
apache-2.0
| 2,949
| 0.00373
|
# pylint: skip-file
from __future__ import print_function
import mxnet as mx
from mxnet import gluon
from mxnet.gluon import nn
import numpy as np
import logging
fro
|
m common import get_data
from mxnet import autograd
logging.basicConfig(level=logging.DEBUG)
# define network
|
def get_net():
net = nn.Sequential()
net.add(nn.Dense(128, activation='relu', prefix='fc1_'))
net.add(nn.Dense(64, activation='relu', prefix='fc2_'))
net.add(nn.Dense(10, prefix='fc3_'))
return net
get_data.GetMNIST_ubyte()
batch_size = 100
train_data = mx.io.MNISTIter(
image="data/train-images-idx3-ubyte",
label="data/train-labels-idx1-ubyte",
data_shape=(784,),
label_name='sm_label',
batch_size=batch_size, shuffle=True, flat=True, silent=False, seed=10)
val_data = mx.io.MNISTIter(
image="data/t10k-images-idx3-ubyte",
label="data/t10k-labels-idx1-ubyte",
data_shape=(784,),
label_name='sm_label',
batch_size=batch_size, shuffle=True, flat=True, silent=False)
def score(net, ctx_list):
metric = mx.metric.Accuracy()
val_data.reset()
for batch in val_data:
datas = gluon.utils.split_and_load(batch.data[0], ctx_list, batch_axis=0)
labels = gluon.utils.split_and_load(batch.label[0], ctx_list, batch_axis=0)
outputs = []
for x in datas:
outputs.append(net(x))
metric.update(labels, outputs)
return metric.get()[1]
def train(net, epoch, ctx_list):
net.collect_params().initialize(mx.init.Xavier(magnitude=2.24), ctx=ctx_list)
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': 0.5})
metric = mx.metric.Accuracy()
loss = gluon.loss.SoftmaxCrossEntropyLoss()
for i in range(epoch):
train_data.reset()
for batch in train_data:
datas = gluon.utils.split_and_load(batch.data[0], ctx_list, batch_axis=0)
labels = gluon.utils.split_and_load(batch.label[0], ctx_list, batch_axis=0)
outputs = []
with autograd.record():
for x, y in zip(datas, labels):
z = net(x)
L = loss(z, y)
L.backward()
outputs.append(z)
trainer.step(batch.data[0].shape[0])
metric.update(labels, outputs)
name, acc = metric.get()
metric.reset()
print('training acc at epoch %d: %s=%f'%(i, name, acc))
def test_autograd():
net1 = get_net()
train(net1, 5, [mx.cpu(0), mx.cpu(1)])
acc1 = score(net1, [mx.cpu(0)])
acc2 = score(net1, [mx.cpu(0), mx.cpu(1)])
assert acc1 > 0.95
assert abs(acc1 - acc2) < 0.01
net1.collect_params().save('mnist.params')
net2 = get_net()
net2.collect_params().load('mnist.params', ctx=[mx.cpu(0)])
acc3 = score(net2, [mx.cpu(0)])
assert abs(acc3 - acc1) < 0.0001
if __name__ == '__main__':
test_autograd()
|
Yelp/kafka-utils
|
tests/util/zookeeper_test.py
|
Python
|
apache-2.0
| 15,746
| 0.000889
|
# -*- coding: utf-8 -*-
# Copyright 2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from collections import namedtuple
import mock
import pytest
from kazoo.exceptions import NoNodeError
from kafka_utils.util.config import ClusterConfig
from kafka_utils.util.serialization import dump_json
from kafka_utils.util.zookeeper import ZK
MockGetTopics = namedtuple('MockGetTopics', ['ctime'])
@mock.patch(
'kafka_utils.util.zookeeper.KazooClient',
autospec=True
)
class TestZK(object):
cluster_config = ClusterConfig(
type='mytype',
name='some_cluster',
broker_list='some_list',
zookeeper='some_ip'
)
def test_create(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.create(
'/kafka/consumers/some_group/offsets'
)
zk.create(
'/kafka/consumers/some_group/offsets',
value='some_val',
acl=None,
ephemeral=True,
sequence=True,
makepath=True
)
mock_obj = mock.Mock()
zk.create(
'/kafka/consumers/some_group/offsets',
value='some_val',
acl=mock_obj,
)
call_list = [
mock.call(
'/kafka/consumers/some_group/offsets',
'', None, False, False, False
),
mock.call(
'/kafka/consumers/some_group/offsets',
'some_val', None, True, True, True
),
mock.call(
'/kafka/consumers/some_group/offsets',
'some_val', mock_obj, False, False, False
),
]
assert mock_client.return_value.create.call_args_list == call_list
def test_set(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.set(
'config/topics/some_topic',
'some_val'
)
zk.set(
'brokers/topics/some_topic',
'{"name": "some_topic", "more": "properties"}'
)
call_list = [
mock.call(
'config/topics/some_topic',
'some_val'
),
mock.call(
'brokers/topics/some_topic',
'{"name": "some_topic", "more": "properties"}'
)
]
assert mock_client.return_value.set.call_args_list == call_list
def test_delete(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.delete(
'/kafka/consumers/some_group/offsets',
)
zk.delete(
'/kafka/consumers/some_group/offsets',
recursive=True
)
call_list = [
mock.call(
'/kafka/consumers/some_group/offsets',
recursive=False
),
mock.call(
'/kafka/consumers/some_group/offsets',
recursive=True
),
]
assert mock_client.return_value.delete.call_args_list == call_list
def test_delete_topic(self, _):
with mock.patch.object(
ZK,
'delete',
autospec=True
) as mock_delete:
with ZK(self.cluster_config) as zk:
zk.delete_topic(
'some_group',
'some_topic',
)
mock_delete.assert_called_once_with(
zk,
'/consumers/some_group/offsets/some
|
_topic',
True,
)
def test_get_my_subscribed_partitions(self, _):
with mock.patch.object(
ZK,
'get_children',
autospec=True,
) as mock_children:
with ZK(self.cluster_config) as zk:
zk.get_my_subscribed_partitions(
'some_group',
|
'some_topic',
)
mock_children.assert_called_once_with(
zk,
'/consumers/some_group/offsets/some_topic',
)
def test_get_topic_config(self, mock_client):
with ZK(self.cluster_config) as zk:
zk.zk.get = mock.Mock(
return_value=(
b'{"version": 1, "config": {"cleanup.policy": "compact"}}',
"Random node info that doesn't matter"
)
)
actual = zk.get_topic_config("some_topic")
expected = {"version": 1, "config": {"cleanup.policy": "compact"}}
assert actual == expected
def test_get_topic_config_8(self, mock_client):
"""
Test getting configuration for topics created in Kafa prior to 0.9.0.
"""
with ZK(self.cluster_config) as zk:
zk.zk.get = mock.Mock(side_effect=NoNodeError())
zk.get_topics = mock.Mock(return_value={"some_topic": {}})
actual = zk.get_topic_config("some_topic")
expected = {"config": {}}
assert actual == expected
def test_get_nonexistent_topic_config(self, mock_client):
"""
Test getting configuration for topics that don't exist.
"""
with ZK(self.cluster_config) as zk:
zk.zk.get = mock.Mock(side_effect=NoNodeError())
zk.get_topics = mock.Mock(return_value={})
with pytest.raises(NoNodeError):
zk.get_topic_config("some_topic")
def test_set_topic_config_kafka_10(self, mock_client):
with mock.patch.object(
ZK,
'set',
autospec=True
) as mock_set:
with ZK(self.cluster_config) as zk:
config = {"version": 1, "config": {"cleanup.policy": "compact"}}
config_change = {"entity_path": "topics/some_topic", "version": 2}
zk.set_topic_config(
"some_topic",
config,
)
serialized_config = dump_json(config)
serialized_config_change = dump_json(config_change)
mock_set.assert_called_once_with(
zk,
'/config/topics/some_topic',
serialized_config,
)
expected_create_call = mock.call(
'/config/changes/config_change_',
serialized_config_change,
None,
False,
True,
False
)
assert mock_client.return_value.create.call_args_list == [expected_create_call]
def test_set_topic_config_kafka_9(self, mock_client):
with mock.patch.object(
ZK,
'set',
autospec=True
) as mock_set:
with ZK(self.cluster_config) as zk:
config = {"version": 1, "config": {"cleanup.policy": "compact"}}
config_change = {"version": 1, "entity_type": "topics", "entity_name": "some_topic"}
zk.set_topic_config(
"some_topic",
config,
(0, 9, 2)
)
serialized_config = dump_json(config)
serialized_config_change = dump_json(config_change)
mock_set.assert_called_once_with(
|
ronekko/chainer
|
tests/chainer_tests/test_link.py
|
Python
|
mit
| 71,956
| 0.00025
|
import copy
import unittest
import warnings
import mock
import numpy
import chainer
from chainer.backends import cuda
from chainer.backends import intel64
from chainer import initializers
from chainer import testing
from chainer.testing import attr
class TestLink(unittest.TestCase):
def setUp(self):
x_shape_0 = 2
x_shape_1 = numpy.int64(3)
with testing.assert_warns(DeprecationWarning):
self.link = chainer.Link(x=((x_shape_0, x_shape_1), 'd'),
u=(None, 'd'))
with self.link.init_scope():
self.link.y = chainer.Parameter(shape=(2,))
self.link.v = chainer.Parameter()
self.p = numpy.array([1, 2, 3], dtype='f')
self.link.add_persistent('p', self.p)
self.link.name = 'a'
self.link.x.update_rule = chainer.UpdateRule()
self.link.x.update_rule.enabled = False
self.link.u.update_rule = chainer.UpdateRule()
if cuda.available:
self.current_device_id = cuda.cupy.cuda.get_device_id()
def tearDown(self):
if cuda.available \
and cuda.cupy.cuda.get_device_id() != self.current_device_id:
cuda.Device(self.current_device_id).use()
def check_param_init(self, name, shape, dtype, data_value=numpy.nan):
self.assertTrue(hasattr(self.link, name))
var = getattr(self.link, name)
self.assertEqual(var.name, name)
self.assertIsInstance(var, chainer.Parameter)
self.assertEqual(var.data.shape, shape)
self.assertEqual(var.data.dtype, dtype)
numpy.testing.assert_array_equal(var.data, data_value)
self.assertEqual(var.grad.shape, shape)
self.assertEqual(var.grad.dtype, dtype)
numpy.testing.assert_array_equal(var.grad, numpy.nan)
def check_param_uninit(self, name, initializer=None):
self.assertTrue(hasattr(self.link, name))
var = getattr(self.link, name)
self.assertIsInstance(var, chainer.Par
|
ameter)
self.assertEqual(var.name, name)
self.assertIsNone(var.data)
if initializer is not None:
self.assertIs(var.initializer, initializer)
def test_init(self):
self.check_param_init('x', (2, 3), 'd')
self.check_param_init('y', (2,), 'f')
self.check_param_uninit('u')
self.link.u.initialize((2, 3))
self.check_param_ini
|
t('u', (2, 3), 'd')
self.check_param_uninit('v')
self.link.v.initialize((2, 3))
self.check_param_init('v', (2, 3), 'f')
def test_assign_param_outside_of_init_scope(self):
p = chainer.Parameter()
self.link.p = p
self.assertTrue(all(p is not param for param in self.link.params()))
def test_assign_var_in_init_scope(self):
p = chainer.Variable()
with self.link.init_scope():
self.link.p = p
self.assertTrue(all(p is not param for param in self.link.params()))
def test_add_param(self):
with testing.assert_warns(DeprecationWarning):
self.link.add_param('z', (2, 3))
self.check_param_init('z', (2, 3), 'f')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('w', (2, 3), dtype='d')
self.check_param_init('w', (2, 3), 'd')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('r')
self.check_param_uninit('r')
self.link.r.initialize((2, 3))
self.check_param_init('r', (2, 3), 'f')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('s', dtype='d')
self.check_param_uninit('s')
self.link.s.initialize((2, 3))
self.check_param_init('s', (2, 3), 'd')
initializer = initializers.Zero('d')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('t', initializer=initializer)
self.check_param_uninit('t', initializer)
self.link.t.initialize((2, 3))
self.check_param_init('t', (2, 3), 'd', 0)
def test_add_param_direct_initialization(self):
z = numpy.random.rand(2, 3).astype('f')
with testing.assert_warns(DeprecationWarning):
self.link.add_param('z', initializer=z)
self.assertIsInstance(self.link.z.data, numpy.ndarray)
numpy.testing.assert_array_equal(self.link.z.data, z)
def test_add_param_duplicated_with_persistent(self):
self.link.add_persistent('z', 'abc')
with self.assertRaises(AttributeError):
with testing.assert_warns(DeprecationWarning):
self.link.add_param('z', (2, 3))
def test_add_persistent(self):
self.assertTrue(hasattr(self.link, 'p'))
self.assertIs(self.link.p, self.p)
self.link.add_persistent('q', 'abc')
self.assertTrue(hasattr(self.link, 'q'))
self.assertEqual(self.link.q, 'abc')
def test_delete(self):
del self.link.x
self.assertFalse(hasattr(self.link, 'x'))
self.assertNotIn('x', self.link._params)
self.assertNotIn('x', self.link._persistent)
del self.link.p
self.assertFalse(hasattr(self.link, 'p'))
self.assertNotIn('p', self.link._params)
self.assertNotIn('p', self.link._persistent)
def test_copy_with_share_mode(self):
link = self.link.copy(mode='share')
self.assertIsInstance(link._params, set)
self.assertIsInstance(link._persistent, set)
self.assertTrue(hasattr(link, 'x'))
self.assertTrue(hasattr(link, 'y'))
self.assertTrue(hasattr(link, 'u'))
self.assertTrue(hasattr(link, 'p'))
self.assertIsNot(link.x, self.link.x)
self.assertIs(link.x.array, self.link.x.array)
self.assertIsNot(link.y, self.link.y)
self.assertIs(link.y.array, self.link.y.array)
self.assertIsNone(link.u.array)
self.assertIs(link.p, self.link.p)
self.assertIs(link.name, None)
def test_copy_with_copy_mode(self):
link = self.link.copy(mode='copy')
self.assertIsInstance(link._params, set)
self.assertIsInstance(link._persistent, set)
self.assertTrue(hasattr(link, 'x'))
self.assertTrue(hasattr(link, 'y'))
self.assertTrue(hasattr(link, 'u'))
self.assertTrue(hasattr(link, 'p'))
self.assertIsNot(link.x, self.link.x)
self.assertIsNot(link.x.array, self.link.x.array)
self.assertIsNot(link.y, self.link.y)
self.assertIsNot(link.y.array, self.link.y.array)
self.assertIsNone(link.u.array)
self.assertIsNot(link.p, self.link.p)
self.assertIsNot(link.name, None)
def test_copy_with_init_mode(self):
self.link.u.initializer = initializers.Normal(
dtype=self.link.u.initializer.dtype)
self.link.u.initialize((2, 3))
link = self.link.copy(mode='init')
self.assertFalse(numpy.array_equal(self.link.u.array, link.u.array))
self.assertIsInstance(link._params, set)
self.assertIsInstance(link._persistent, set)
self.assertTrue(hasattr(link, 'x'))
self.assertTrue(hasattr(link, 'y'))
self.assertTrue(hasattr(link, 'u'))
self.assertTrue(hasattr(link, 'p'))
self.assertIsNot(link.x, self.link.x)
self.assertIsNot(link.x.array, self.link.x.array)
self.assertIsNot(link.y, self.link.y)
self.assertIsNot(link.y.array, self.link.y.array)
self.assertIsNot(link.p, self.link.p)
self.assertIsNot(link.name, None)
@attr.gpu
def test_copy_and_to_gpu_init(self):
cupy = cuda.cupy
l0 = self.link
l1 = l0.copy()
self.assertIs(l0.x.data, l1.x.data)
l1.to_gpu()
self.assertIsNot(l0.x.data, l1.x.data)
self.assertIsInstance(l0.x.data, numpy.ndarray)
self.assertIsInstance(l1.x.data, cupy.ndarray)
@attr.gpu
def test_copy_and_to_gpu_uninit(self):
cupy = cuda.cupy
l0 = self.link
l1 = l0.copy()
self.assertIsNone(l0.u.data)
self.assertIsNone(l1.u.data)
l1.to_gpu()
l1.u.initialize((2, 3))
self.a
|
fmuzf/python_hk_glazer
|
setup.py
|
Python
|
mit
| 566
| 0.008834
|
from setuptools import setup, find_packages
setup(
name='hk_glazer',
version='0.0.8',
description='Convert compatible JSON configs to DeBAM/DeTIM config.dat files',
url='https://github.com/fmuzf/python_hk_glazer',
author='Ly
|
man Gillispie',
author_email='lyman.gillispie@gmail.com',
packages=find_packages(),
scripts=['bin/hk_glazer'],
license='MIT',
long_description=open('README.md').read(),
install_requires = ['argparse'],
test_suite='nose.collector',
tests_require=['nose'],
include_pac
|
kage_data = True
)
|
TheTimmy/spack
|
var/spack/repos/builtin/packages/perl-font-ttf/package.py
|
Python
|
lgpl-2.1
| 1,567
| 0.001914
|
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# Li
|
cense along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PerlFontTtf(PerlPackage):
"""Perl module for TrueType Font hacking"""
homepage = "http://search.cpan.org/~bhallissy/Font-TTF-1.06/lib
|
/Font/TTF.pm"
url = "http://search.cpan.org/CPAN/authors/id/B/BH/BHALLISSY/Font-TTF-1.06.tar.gz"
version('1.06', '241b59310ad4450e6e050d5e790f1b21')
|
agx/git-buildpackage
|
gbp/format.py
|
Python
|
gpl-2.0
| 2,429
| 0.000824
|
# vim: set fileencoding=utf-8 :
#
# (C) 2014 Guido Günther <agx@sigxcpu.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; eith
|
er version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, please s
|
ee
# <http://www.gnu.org/licenses/>
"""Format a message"""
from gbp.errors import GbpError
def format_str(msg, args):
"""
Format a string with the given dict. Be a bit more verbose than
default python about the error cause.
>>> format_str("%(foo)", {})
Traceback (most recent call last):
...
gbp.errors.GbpError: Failed to format %(foo): Missing value 'foo' in {}
>>> format_str("%(foo)", {'foo': 'bar'})
Traceback (most recent call last):
...
gbp.errors.GbpError: Failed to format %(foo) with {'foo': 'bar'}: incomplete format
>>> format_str("A %(foo)s is a %(bar)s", {'foo': 'dog', 'bar': 'mamal'})
'A dog is a mamal'
"""
try:
return msg % args
except ValueError as e:
raise GbpError("Failed to format %s with %s: %s" % (msg, args, e))
except KeyError as e:
raise GbpError("Failed to format %s: Missing value %s in %s" %
(msg, e, args))
def format_b(fmtstr, *args):
"""String-like interpolation for bytes objects.
NOTE: This is a compatibility wrapper for older versions (<3.5) of Python 3
which do not support the percent operator ('%') for bytes objects. This
function should be removed (and replaced by simple '%') when Python 3.5
has gained wide enough adoption.
>>> format_b(b'%s %d', b'foo', 123)
b'foo 123'
>>> format_b(b'foo 123')
b'foo 123'
>>> format_b('%s %d', b'foo', 123)
Traceback (most recent call last):
...
AttributeError: 'str' object has no attribute 'decode'
"""
fmtstr = fmtstr.decode()
strargs = tuple([(a.decode() if isinstance(a, bytes) else a) for a in args])
return (fmtstr % strargs).encode()
|
AnshulYADAV007/Lean
|
Algorithm.Python/OptionDataNullReferenceRegressionAlgorithm.py
|
Python
|
apache-2.0
| 1,485
| 0.007417
|
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Algorithm import *
from datetime import timedelta
### <summary>
### This algorithm is a regression test for issue #2018 and PR #2038.
### </summary>
class OptionDataNullReferenceRegressionAlgorithm(QCAlgorithm):
def Initialize(self):
self.SetStartDate(2016, 12, 1)
self.SetEndDate(2017, 1, 1)
self.SetCash(500000)
self.AddEquity
|
("DUST")
|
option = self.AddOption("DUST")
option.SetFilter(self.UniverseFunc)
def UniverseFunc(self, universe):
return universe.IncludeWeeklys().Strikes(-1, +1).Expiration(timedelta(25), timedelta(100))
|
kalaspa/mc-eliece
|
src/arith.py
|
Python
|
gpl-3.0
| 891
| 0.042745
|
#*-coding:Utf-8 -*
#!/usr/bin/python3.2
"""Fichier contenant la classe generale d'objet algébriques contenant + * et eventuellement /"""
class arith(object):
"""Classe generique contenant les methodes redondantes"""
def __ne__(self,autre):
"""Definition de !="""
return not(self == autre)
def __radd__(self,autre):
"""Addition dans l'autre sens"""
return self + autre
def __iadd__(self,autre):
"""Methode de +="""
return self + autre
def __rmul__(self,autre):
"""Multiplication dans l'autre sens"""
return self * autre
def __imul__(self,autre):
|
"""Methode de *="""
return self * autre
def __sub__(self,autre):
"""Methode de soustraction"""
return self + (-1 * autre)
def __rsub__(self,autre):
"""Methode de soustraction dans l'autre sens"""
return autre +(-1 * self)
def __neg__(self):
"""Methode de p
|
assage a l'opposé"""
return -1 * self
|
tonybreak/Registered
|
plugins/csdn.py
|
Python
|
gpl-3.0
| 675
| 0.001481
|
# coding: utf-8
from common import base
class Plugin(base.BASE):
__name__ = 'csdn'
__title__ = 'CSDN'
__url__ = 'http://www.csdn.net/'
def regist
|
er(self, target):
self.information = {
'email': {
'url': 'http://passport.csdn.net/account/register',
|
'method': 'get',
'settings': {
'params': {
'action': 'validateEmail',
'email': target
}
},
'result': {
'type': 'str',
'value': 'false'
}
}
}
|
mozilla/mozilla-ignite
|
apps/awards/views.py
|
Python
|
bsd-3-clause
| 2,471
| 0
|
from awards.forms import AwardForm
from awards.models import JudgeAllowance
from awards.models import Award
from challenges.decorators import judge_required
from challenges.models import Submission
from django.contrib import messages
from django.http import Http404, HttpResponseRedirect
from django.views.decorators.http import require_POST
from tower import ugettext as _
@judge_required
@require_POST
def award(request, submission_id, project=None, slug=None):
"""Awards an ammount to a gren-lit ``Submission`` by a Judge"""
try:
submission = (Submission.objects
.select_related('phase')
.get(id=submission_id, phase__challenge__slug=slug,
phase__challenge__project__slug=project,
is_winner=True, is_draft=False))
except Submission.DoesNotExist:
raise Http404
judge_data = {
'judge': request.user.get_profile(),
'award__phase': submission.phase,
'award__status': Award.RELEASED,
}
if submission.phase_round:
judge_data.update({'award__phase_round': submission.phase_round})
try:
judge_allowance = JudgeAllowance.objects.get(**judge_data)
except JudgeAllowance.DoesNotExist:
raise Http404
form = AwardForm(request.POST)
if form.is_valid():
is_allocated = judge_allowance.allocate(form.cleaned_data['amount'],
submission)
if form.cleaned_data['amount'] == 0:
submission
|
_award = (judge_allowance.submissionaward_set
.filter(submission=submission))
if submission_award:
submission_award.
|
delete()
message = _("You have successfuly removed the award from this"
" submission")
messages.success(request, message)
return HttpResponseRedirect(submission.get_absolute_url())
if is_allocated:
message = _("You have successfuly awarded this Entry")
messages.success(request, message)
return HttpResponseRedirect(submission.get_absolute_url())
if form.errors:
message = _("Please enter a valid amount for the award")
else:
message = _("You don't have enough funding for award this submission")
messages.error(request, message)
return HttpResponseRedirect(submission.get_absolute_url())
|
e-gob/plataforma-kioscos-autoatencion
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/amazon/lambda_policy.py
|
Python
|
bsd-3-clause
| 13,776
| 0.00363
|
#!/usr/bin/python
# Copyright (c) 2016, Pierre Jodouin <pjodouin@virtualcomputing.solutions>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: lambda_policy
short_description: Creates, updates or deletes AWS Lambda policy statements.
description:
- This module allows the management of AWS Lambda policy statements.
It is idempotent and supports "Check" mode. Use module M(lambda) to manage the lambda
function itself, M(lambda_alias) to manage function aliases, M(lambda_event) to manage event source mappings
such as Kinesis streams, M(lambda_invoke) to execute a lambda function and M(lambda_facts) to gather facts
relating to one or more lambda functions.
version_added: "2.4"
author:
- Pierre Jodouin (@pjodouin)
- Michael De La Rue (@mikedlr)
options:
function_name:
description:
- "Name of the Lambda function whose resource policy you are updating by adding a new permission."
- "You can specify a function name (for example, Thumbnail ) or you can specify Amazon Resource Name (ARN) of the"
- "function (for example, arn:aws:lambda:us-west-2:account-id:function:ThumbNail ). AWS Lambda also allows you to"
- "specify partial ARN (for example, account-id:Thumbnail ). Note that the length constraint applies only to the"
- "ARN. If you specify only the function name, it is limited to 64 character in length."
required: true
aliases: ['lambda_function_arn',
|
'function_arn']
state:
description:
- Describes the desired state.
required: true
default: "present"
choices: ["present", "absent"]
alias:
description:
- Name of the function alias. Mutually exclusive with C(version).
version:
description:
- Version of the Lambda function. Mutually exclusive with C(alias).
statement_id:
description:
- A unique statement identifier.
required: true
aliases: ['si
|
d']
action:
description:
- "The AWS Lambda action you want to allow in this statement. Each Lambda action is a string starting with
lambda: followed by the API name (see Operations ). For example, lambda:CreateFunction . You can use wildcard
(lambda:* ) to grant permission for all AWS Lambda actions."
required: true
principal:
description:
- "The principal who is getting this permission. It can be Amazon S3 service Principal (s3.amazonaws.com ) if
you want Amazon S3 to invoke the function, an AWS account ID if you are granting cross-account permission, or
any valid AWS service principal such as sns.amazonaws.com . For example, you might want to allow a custom
application in another AWS account to push events to AWS Lambda by invoking your function."
required: true
source_arn:
description:
- This is optional; however, when granting Amazon S3 permission to invoke your function, you should specify this
field with the bucket Amazon Resource Name (ARN) as its value. This ensures that only events generated from
the specified bucket can invoke the function.
source_account:
description:
- The AWS account ID (without a hyphen) of the source owner. For example, if the SourceArn identifies a bucket,
then this is the bucket owner's account ID. You can use this additional condition to ensure the bucket you
specify is owned by a specific account (it is possible the bucket owner deleted the bucket and some other AWS
account created the bucket). You can also use this condition to specify all sources (that is, you don't
specify the SourceArn ) owned by a specific account.
event_source_token:
description:
- Token string representing source ARN or account. Mutually exclusive with C(source_arn) or C(source_account).
requirements:
- boto3
extends_documentation_fragment:
- aws
'''
EXAMPLES = '''
---
- hosts: localhost
gather_facts: no
vars:
state: present
tasks:
- name: Lambda S3 event notification
lambda_policy:
state: "{{ state | default('present') }}"
function_name: functionName
alias: Dev
statement_id: lambda-s3-myBucket-create-data-log
action: lambda:InvokeFunction
principal: s3.amazonaws.com
source_arn: arn:aws:s3:eu-central-1:123456789012:bucketName
source_account: 123456789012
- name: show results
debug: var=lambda_policy_action
'''
RETURN = '''
---
lambda_policy_action:
description: describes what action was taken
returned: success
type: string
'''
import json
import re
from ansible.module_utils._text import to_native
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import get_aws_connection_info, boto3_conn
try:
from botocore.exceptions import ClientError
except:
pass # will be protected by AnsibleAWSModule
def pc(key):
"""
Changes python key into Pascal case equivalent. For example, 'this_function_name' becomes 'ThisFunctionName'.
:param key:
:return:
"""
return "".join([token.capitalize() for token in key.split('_')])
def policy_equal(module, current_statement):
for param in ('action', 'principal', 'source_arn', 'source_account', 'event_source_token'):
if module.params.get(param) != current_statement.get(param):
return False
return True
def set_api_params(module, module_params):
"""
Sets module parameters to those expected by the boto3 API.
:param module:
:param module_params:
:return:
"""
api_params = dict()
for param in module_params:
module_param = module.params.get(param)
if module_param is not None:
api_params[pc(param)] = module_param
return api_params
def validate_params(module):
"""
Performs parameter validation beyond the module framework's validation.
:param module:
:return:
"""
function_name = module.params['function_name']
# validate function name
if function_name.startswith('arn:'):
if not re.search('^[\w\-]+$', function_name):
module.fail_json(
msg='Function name {0} is invalid. Names must contain only alphanumeric characters and hyphens.'.format(
function_name)
)
if len(function_name) > 64:
module.fail_json(
msg='Function name "{0}" exceeds 64 character limit'.format(function_name))
else:
if not re.search('^[\w\-:]+$', function_name):
module.fail_json(
msg='ARN {0} is invalid. ARNs must contain only alphanumeric characters, hyphens and colons.'.format(function_name)
)
if len(function_name) > 140:
module.fail_json(msg='ARN name "{0}" exceeds 140 character limit'.format(function_name))
def get_qualifier(module):
"""
Returns the function qualifier as a version or alias or None.
:param module:
:return:
"""
if module.params.get('version') is not None:
return to_native(module.params['version'])
elif module.params['alias']:
return to_native(module.params['alias'])
return None
def extract_statement(policy, sid):
"""return flattened single policy statement from a policy
If a policy statement is present in the policy extract it and
return it in a flattened form. Otherwise return an empty
dictionary.
"""
if 'Statement' not in policy:
return {}
policy_statement = {}
# Now that we have the policy, check if required permission statement is present and flatten to
# simple dictionary if found.
for statement in policy['Statement']:
if statement['Sid'] == sid:
policy_statement['action'] = statement['Action']
policy_statement['principal'] = statement['Principal']['Service']
try:
policy_statement['s
|
VagosAplas/GEO1005-Fire
|
SpatialDecision/utility_functions.py
|
Python
|
gpl-2.0
| 32,661
| 0.003827
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
SpatialDecision
A QGIS plugin
This is a SDSS template for the GEO1005 course
-------------------
begin : 2015-11-02
git sha : $Format:%H$
copyright : (C) 2015 by Jorge Gil, TU Delft
email : j.a.lopesgil@tudelft.nl
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4 import QtGui, QtCore
from qgis.core import *
from qgis.networkanalysis import *
from pyspatialite import dbapi2 as sqlite
import psycopg2 as pgsql
import numpy as np
import math
import os.path
try:
import networkx as nx
has_networkx = True
except ImportError, e:
has_networkx = False
#
# Layer functions
#
def getLegendLayers(iface, geom='all', provider='all'):
"""
Return list of layer objects in the legend, with specific geometry type and/or data provider
:param iface: QgsInterface
:param geom: string ('point', 'linestring', 'polygon')
:param provider: string
:return: list QgsVectorLayer
"""
layers_list = []
for layer in iface.legendInterface().layers():
add_layer = False
if layer.isValid() and layer.type() == QgsMapLayer.VectorLayer:
if layer.hasGeometryType() and (geom is 'all' or layer.geometryType() in geom):
if provider is 'all' or layer.dataProvider().name() in provider:
add_layer = True
if add_layer:
layers_list.append(layer)
return layers_list
def getCanvasLayers(iface, geom='all', provider='all'):
"""Return list of valid QgsVectorLayer in QgsMapCanvas, with specific geometry type and/or data provider"""
layers_list = []
for layer in iface.mapCanvas().layers():
add_layer = False
if layer.isValid() and layer.type() == QgsMapLayer.VectorLayer:
if layer.hasGeometryType() and (geom is 'all' or layer.geometryType() in geom):
if provider is 'all' or layer.dataProvider().name() in provider:
add_layer = True
if add_layer:
layers_list.append(layer)
return layers_list
def getRegistryLayers(geom='all', provider='all'):
"""Return list of valid QgsVectorLayer in QgsMapLayerRegistry, with specific geometry type and/or data provider"""
layers_list = []
for layer in QgsMapLayerRegistry.instance().mapLayers().values():
add_layer = False
if layer.isValid() and layer.type() == QgsMapLayer.VectorLayer:
if layer.hasGeometryType() and (geom is 'all' or layer.geometryType() in geom):
if provider is 'all' or layer.dataProvider().name() in provider:
add_layer = True
if add_layer:
layers_list.append(layer)
return layers_list
def isLayerProjected(layer):
projected = False
if layer:
projected = not layer.crs().geographicFlag()
return projected
def getLegendLayerByName(iface, name):
layer = None
for i in iface.legendInterface().layers():
if i.name() == name:
layer = i
return layer
def getCanvasLayerByName(iface, name):
layer = None
for i in iface.mapCanvas().layers():
if i.name() == name:
layer = i
return layer
def getLayersListNames(layerslist):
layer_names = [layer.name() for layer in layerslist]
return layer_names
def getLayerPath(layer):
path = ''
provider = layer.dataProvider()
provider_type = provider.name()
if provider_type == 'spatialite':
uri = QgsDataSourceURI(provider.dataSourceUri())
path = uri.database()
elif provider_type == 'ogr':
uri = provider.dataSourceUri()
path = os.path.dirname(uri)
return path
def reloadLayer(layer):
layer_name = layer.name()
layer_provider = layer.dataProvider().name()
new_layer = None
if layer_provider in ('spatialite','postgres'):
uri = QgsDataSourceURI(layer.dataProvider().dataSourceUri())
new_layer = QgsVectorLayer(uri.uri(), layer_name, layer_provider)
elif layer_provider == 'ogr':
uri = layer.dataProvider().dataSourceUri()
new_layer = QgsVectorLayer(uri.split("|")[0], layer_name, layer_provider)
QgsMapLayerRegistry.instance().removeMapLayer(layer.id())
if new_layer:
QgsMapLayerRegistry.instance().addMapLayer(new_layer)
return new_layer
#
# Field functions
#
def fieldExists(layer, name):
fields = getFieldNames(layer)
if name in fields:
return True
else:
return False
def getFieldNames(layer):
field_names = []
if layer and layer.dataProvider():
field_names = [field.name() for field in layer.dataProvider().fields()]
return field_names
def getNumericFields(layer, type='all'):
fields = []
if type == 'all':
types = (QtCore.QVariant.Int, QtCore.QVariant.LongLong, QtCore.QVariant.Double,
QtCore.QVariant.UInt, QtCore.QVariant.ULongLong)
else:
types = (type)
if layer and layer.dataProvider():
for field in layer.dataProvider().fields():
if field.type() in types:
fields.append(field)
return fields
def getNumericFieldNames(layer, type='all'):
field_names = []
if type == 'all':
types = (QtCore.QVariant.Int, QtCore.QVariant.LongLong, QtCore.QVariant.Double,
QtCore.QVariant.UInt, QtCore.QVariant.ULongLong)
else:
types = (type)
if layer and layer.dataProvider():
for field in layer.dataProvider().fields():
if field.type() in types:
field_names.append(field.name())
return field_names
def getFieldIndex(layer, name):
idx = layer.dataProvider().fields().indexFromName(name)
return idx
def fieldHasValues(layer, name):
if layer and fieldExists(layer, name):
# find fields that only have NULL values
idx = getFieldIndex(layer, name)
maxval = layer.maximumValue(idx)
minval = layer.minimumValue(idx)
if maxval == NULL and minval == NULL:
return False
else:
return True
def fieldHasNullValues(layer, name):
if layer and fieldExists(layer, name):
idx = getFieldIndex(layer, name)
vals = layer.uniqueValues(idx,1)
# depending on the provider list is empty or has NULL value in first position
if not vals or (len(vals) == 1 and vals[0] == NULL):
return True
else:
return False
def getFieldValues(layer, fieldname, null=True, selection=False):
attributes = []
ids = []
if fieldExists(layer, fieldname):
if selection:
|
features = layer.selectedFeatures()
else:
request = QgsFeatureRequest().setSubsetOfAttributes([getFieldIndex(layer, fieldname)])
features = layer.getFeatures(request)
if null:
for feature in features:
attributes.append(feature.attribute(fieldname))
ids.append(feature.id())
else:
for feature in features:
val = feature.attribute(fieldname)
if val != N
|
ULL:
attributes.append(val)
ids.append(feature.id())
return attributes, ids
def addFields(layer, names, types):
# types can be QVariant.Int, QVariant.Double, QVariant.Str
|
tchellomello/home-assistant
|
homeassistant/components/demo/sensor.py
|
Python
|
apache-2.0
| 2,682
| 0.001119
|
"""Demo platform that has a couple of fake sensors."""
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
PERCENT
|
AGE,
TEMP_CELSIUS,
)
from homeas
|
sistant.helpers.entity import Entity
from . import DOMAIN
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo sensors."""
async_add_entities(
[
DemoSensor(
"sensor_1",
"Outside Temperature",
15.6,
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
12,
),
DemoSensor(
"sensor_2",
"Outside Humidity",
54,
DEVICE_CLASS_HUMIDITY,
PERCENTAGE,
None,
),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoSensor(Entity):
"""Representation of a Demo sensor."""
def __init__(
self, unique_id, name, state, device_class, unit_of_measurement, battery
):
"""Initialize the sensor."""
self._unique_id = unique_id
self._name = name
self._state = state
self._device_class = device_class
self._unit_of_measurement = unit_of_measurement
self._battery = battery
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self.unique_id)
},
"name": self.name,
}
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def should_poll(self):
"""No polling needed for a demo sensor."""
return False
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._battery:
return {ATTR_BATTERY_LEVEL: self._battery}
|
project-chip/connectedhomeip
|
scripts/tools/memory/memdf/util/pretty.py
|
Python
|
apache-2.0
| 957
| 0
|
#
# Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pretty print logging."""
import logging
import pprint
from typing import Any
def log(level: int, x: Any) -> None:
if logging.getLogger(None).isEnabledFor(
|
level):
for line in pprint.pformat(x).split('\n'):
logging.log(level, line)
def info(x: Any) -
|
> None:
log(logging.INFO, x)
def debug(x: Any) -> None:
log(logging.DEBUG, x)
|
TeddyDesTodes/pyflipdot
|
pyflipdot/web/admin/__init__.py
|
Python
|
bsd-3-clause
| 559
| 0.003578
|
from flask import render_template
from pyflipdot.plugins import get_pluginmanager
from pyflipdot.web.view import MenuFlaskView
__author__ = 'teddydestodes'
class AdminView(MenuFlaskVi
|
ew):
route_base = "admin"
menu_name = "Admin"
def index(self):
return render_template('base.html')
class PluginView(MenuFlaskView):
route_base = "plugins"
menu_name = "Plugins"
def index(self):
pm = get_pluginmanager()
return render_templat
|
e('plugins.html', plugins=pm.get_plugin_index())
AdminView.plugins = PluginView
|
u-engine/UIforETW
|
bin/ETWPackSymbols.py
|
Python
|
apache-2.0
| 4,627
| 0.016425
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import shutil
import sys
def main():
if len(sys.argv) < 3:
print("Syntax: PackETWSymbols ETWFilename.etl destdirname [-verbose]")
print("This script looks for symbols needed to decode the specified trace, and")
print("copies them to the specified directory. This allows moving traces to")
print("other machines for analysis and sharing.")
sys.exit(0)
ETLName = sys.argv[1]
DestDirName = sys.argv[2]
if not os.path.exists(DestDirName):
os.mkdir(DestDirName)
verbose = False
if len(sys.argv) > 3 and sys.argv[3].lower() == "-verbose":
verbose = True
print("Extracting symbols from ETL file '%s'." % ETLName)
# This command is slow but thorough -- it tries to build the symbol cache.
#command = "xperf.exe -i \"%s\" -tle -symbols -a symcache -quiet -build -imageid -dbgid" % ETLName
# This command is faster. It relies on symbols being loaded already for the modules of interest.
command = "xperf.exe -i \"%s\" -tle -a symcache -quiet -imageid -dbgid" % ETLName
print("Executing command '%s'" % command)
lines = os.popen(command).readlines()
if len(lines) < 30:
print("Error:")
for line in lines:
print(line, end='')
sys.exit(0)
# Typical output lines (including one heading) look like this:
#TimeDateStamp, ImageSize, OrigFileName, CodeView Record
# 0x4da89d03, 0x00bcb000, "client.dll",
|
"[RSDS] PdbSig: {7b2a9028-87cd-448d-8500-1a18cdcf6166}; A
|
ge: 753; Pdb: u:\buildbot\dota_staging_win32\build\src\game\client\Release_dota\client.pdb"
scan = re.compile(r' 0x(.*), 0x(.*), "(.*)", "\[RSDS\].*; Pdb: (.*)"')
matchCount = 0
matchExists = 0
ourModuleCount = 0
# Get the users build directory
vgame = os.getenv("vgame")
if vgame == None:
print("Environment variable 'vgame' not found!")
sys.exit(-1)
vgame = vgame[:-5].lower()
prefixes = ["u:\\", "e:\\build_slave", vgame]
print("Looking for symbols built to:")
for prefix in prefixes:
print(" %s" % prefix)
# Default to looking for the SymCache on the C drive
prefix = "c"
# Look for a drive letter in the ETL Name and use that if present
if len(ETLName) > 1 and ETLName[1] == ':':
prefix = ETLName[0]
else:
# If there's no drive letter in the ETL name then look for one
# in the current working directory.
curwd = os.getcwd()
if len(curwd) > 1 and curwd[1] == ':':
prefix = curwd[0]
symCachePathBase = os.getenv("_NT_SYMCACHE_PATH");
if symCachePathBase == None or len(symCachePathBase) == 0:
symCachePathBase = "%s:\\symcache\\" % prefix
elif symCachePathBase[-1] != '\\':
symCachePathBase += '\\'
for line in lines:
result = scan.match(line)
if result is not None:
#print result.groups()
matchCount += 1
TimeDateStamp = result.groups()[0]
ImageSize = result.groups()[1]
OrigFileName = result.groups()[2]
PDBPath = result.groups()[3].lower()
# Find out which PDBs are 'interesting'. There is no obvious heuristic
# for this, but having a list of prefixes seems like a good start.
ours = False
for prefix in prefixes:
if PDBPath.startswith(prefix):
ours = True
if ours:
ourModuleCount += 1
ours = True
symFilePath = OrigFileName + "-" + TimeDateStamp + ImageSize + "v1.symcache"
symCachePath = symCachePathBase + symFilePath
if os.path.isfile(symCachePath):
matchExists += 1
print("Copying %s" % symCachePath)
shutil.copyfile(symCachePath, DestDirName + "\\" + symFilePath)
else:
print("Symbols for '%s' are not in %s" % (OrigFileName, symCachePathBase))
else:
#This is normally too verbose
if verbose:
print("Skipping %s" % PDBPath)
print("%d symbol files found in the trace, %d appear to be ours, and %d of those exist in symcache." % (matchCount, ourModuleCount, matchExists))
if __name__ == "__main__":
main()
|
uezo/minette-python
|
minette/serializer.py
|
Python
|
apache-2.0
| 6,581
| 0
|
import json
from datetime import datetime
import re
from .utils import date_to_str, str_to_date
def _is_datestring(s):
return isinstance(s, str) and \
re.match(r"(\d{4})-(\d{2})-(\d{2})T(\d{2})\:(\d{2})\:(\d{2})", s)
def _encode_datetime(obj):
if isinstance(obj, datetime):
return date_to_str(obj, obj.tzinfo is not None)
def _decode_datetime(d):
for k in d:
if _is_datestring(d[k]):
d[k] = str_to_date(d[k])
if isinstance(d[k], list):
for i, v in enumerate(d[k]):
if _is_datestring(v):
d[k][i] = str_to_date(v)
return d
def dumpd(obj):
"""
Convert object to dict
Parameters
----------
obj : object
Object to convert
Returns
-------
d : dict
Object as dict
"""
# return input directly if it is already dict
if isinstance(obj, dict):
return obj
# return list of dict
elif isinstance(obj, (list, tuple, set)):
return [dumpd(o) for o in obj]
# convert to dict
data = {}
for key in obj.__dict__.keys():
if not key.startswith("_"):
# convert each items in list-like object
if isinstance(getattr(obj, key, None), (list, tuple, set)):
data[key] = []
for v in getattr(obj, key, None):
if hasattr(v, "to_dict"):
data[key].append(v.to_dict())
elif hasattr(v, "__dict__"):
data[key].append(dumpd(v))
else:
data[key].append(v)
# convert each items in dict
elif isinstance(getattr(obj, key, None), dict):
data[key] = {}
for k, v in getattr(obj, key, None).items():
if hasattr(v, "to_dict"):
data[key][k] = v.to_dict()
elif hasattr(v, "__dict__"):
data[key][k] = dumpd(v)
else:
data[key][k] = v
# convert object with `to_dict`
elif hasattr(getattr(obj, key, None), "to_dict"):
data[key] = getattr(obj, key).to_dict()
# convert plain object
elif hasattr(getattr(obj, key, None), "__dict__"):
data[key] = dumpd(getattr(obj, key))
else:
data[key] = getattr(obj, key, None)
return data
def loadd(d, obj_cls):
"""
Convert dict to object
Parameters
----------
d : dict
Dictionary to convert
obj_cls : type
Class of object to convert
Returns
-------
obj : object
Instance of obj_cls
"""
# return None when input is None
if d is None:
return None
# return the list of objects when input is list
if isinstance(d, list):
return [loadd(di, obj_cls) for di in d]
# use `create_object` instead of its constructor
if hasattr(obj_cls, "create_object"):
obj = obj_cls.create_object(d)
else:
obj = obj_cls()
# get member's type info
types = obj_cls._types() if getattr(obj_cls, "_types", None) else {}
# set values to object
for k, v in d.items():
if k in types:
if hasattr(types[k], "from_dict"):
setattr(obj, k, types[k].from_dict(v))
else:
setattr(obj, k, loadd(v, types[k]))
else:
setattr(obj, k, v)
return obj
def dumps(obj, **kwargs):
"""
Encode object/dict to JSON
Parameters
----------
obj : object
Object to encode
Returns
-------
s : str
JSON string
"""
if obj is None:
return ""
d = dumpd(obj)
return json.dumps(d, default=_encode_datetime, **kwargs)
def loads(s, obj_cls=None, **kwargs):
"""
Decode JSON to dict/object
Parameters
----------
s : str
JSON string to decode
obj_cls : type, default None
Class of object to convert. If None, convert to dict
Returns
-------
obj : object
Instance of obj_cls
"""
if s is None or s == "":
return None
d = json.loads(s, object_hook=_decode_datetime, **kwargs)
if obj_cls is None:
return d
else:
return loadd(d, obj_cls)
class Serializable:
"""
Base class for serializable object
"""
@classmethod
def _types(cls):
"""
Override this method to create instance of specific class for members.
Configure like below then instance of `Foo` will be set to `self.foo`
and `Bar` to `self.bar`
```
return {
"foo": Foo,
"bar": Bar
}
```
"""
return {}
|
def __repr__(self):
return "<{} at {}>\n{}".format(
self.__class__.__name__,
hex(id(self)),
self.to_json(indent=2, ensure_ascii=F
|
alse))
@classmethod
def create_object(obj_cls, d):
return obj_cls()
def to_dict(self):
"""
Convert this object to dict
Returns
-------
d : dict
Object as dict
"""
return dumpd(self)
def to_json(self, **kwargs):
"""
Convert this object to JSON
Returns
-------
s : str
Object as JSON string
"""
return dumps(self, **kwargs)
@classmethod
def from_dict(cls, d):
"""
Create object from dict
Parameters
----------
d : dict
Dictionary of this object
Returns
-------
obj : Serializable
Instance of this class
"""
return loadd(d, cls)
@classmethod
def from_dict_dict(cls, dict_dict):
"""
Create dictionary of this objects from dictionaries of dictionaries
Parameters
----------
dict_dict : dict
Dictionary of dictionaries
Returns
-------
dict_of_this_obj : dict
Dictionary of this objects
"""
return {k: cls.from_dict(v) for k, v in dict_dict.items()}
@classmethod
def from_json(cls, s, **kwargs):
"""
Create this object from JSON string
Parameters
----------
s : str
JSON string of this object
Returns
-------
obj : Serializable
Instance of this class
"""
return loads(s, cls, **kwargs)
|
ee-book/api
|
api/v1/users.py
|
Python
|
apache-2.0
| 416
| 0.004808
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from ..lib.decorators import json
from . import api
@api.route("/test", methods=["GET"])
@json
def test():
return {}
@api.route("/auth/register", methods=["GET"])
def auth_register():
|
return {}
@api.route("/auth/exist", methods=["get"])
|
|
chanceraine/nupic.research
|
tests/classification/test_sensor_data_classification.py
|
Python
|
agpl-3.0
| 7,592
| 0.004478
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import shutil
import unittest
try:
import simplejson as json
except ImportError:
import json
from nupic.data.file_record_stream import FileRecordStream
from htmresearch.frameworks.classification.classification_network import (
configureNetwork,
runNetwork)
from htmresearch.frameworks.classification.utils.sensor_data import (
generateSensorData)
from htmresearch.frameworks.classification.utils.network_config import (
generateSampleNetworkConfig,
generateNetworkPartitions)
# Parameters to generate the artificial sensor data
OUTFILE_NAME = "white_noise"
SEQUENCE_LENGTH = 200
NUM_CATEGORIES = 3
NUM_RECORDS = 2400
WHITE_NOISE_AMPLITUDES = [0.0, 1.0]
SIGNAL_AMPLITUDES = [1.0]
SIGNAL_MEANS = [1.0]
SIGNAL_PERIODS = [20.0]
# Additional parameters to run the classification experiments
RESULTS_DIR = "results"
MODEL_PARAMS_DIR = 'model_params'
DATA_DIR = "data"
# Classifier types
CLA_CLASSIFIER_TYPE = "py.CLAClassifierRegion"
KNN_CLASSIFIER_TYPE = "py.KNNClassifierRegion"
class TestSensorDataClassification(unittest.TestCase):
"""Test classification results for sensor data."""
def setUp(self):
with open("sensor_data_network_config.json", "rb") as jsonFile:
self.templateNetworkConfig = json.load(jsonFile)
def testClassificationAccuracy(self):
"""Test classification accuracy for sensor data."""
networkConfigurations = generateSampleNetworkConfig(
self.templateNetworkConfig, NUM_CATEGORIES)
for networkConfig in networkConfigurations:
for noiseAmplitude in WHITE_NOISE_AMPLITUDES:
for signalMean in SIGNAL_MEANS:
for signalAmplitude in SIGNAL_AMPLITUDES:
for signalPeriod in SIGNAL_PERIODS:
sensorType = networkConfig["sensorRegionConfig"].get(
"regionType")
spEnabled = networkConfig["sensorRegionConfig"].get(
"regionEnabled")
tmEnabled = networkConfig["tmRegionConfig"].get(
"regionEnabled")
upEnabled = networkConfig["upRegionConfig"].get(
"regionEnabled")
classifierType = networkConfig["classifierRegionConfig"].get(
"regionType")
expParams = ("RUNNING EXPERIMENT WITH PARAMS:\n"
" * numRecords=%s\n"
" * signalAmplitude=%s\n"
" * signalMean=%s\n"
" * signalPeriod=%s\n"
" * noiseAmplitude=%s\n"
" * sensorType=%s\n"
" * spEnabled=%s\n"
" * tmEnabled=%s\n"
" * upEnabled=%s\n"
" * classifierType=%s\n"
) % (NUM_RECORDS,
signalAmplitude,
signalMean,
signalPeriod,
noiseAmplitude,
sensorType.split(".")[1],
spEnabled,
tmEnabled,
upEnabled,
classifierType.split(".")[1])
print expParams
inputFile = generateSensorData(DATA_DIR,
OUTFILE_NAME,
signalMean,
signalPeriod,
SEQUENCE_LENGTH,
NUM_RECORDS,
signalAmplitude,
NUM_CATEGORIES,
noiseAmplitude)
dataSource = FileRecordStream(streamID=inputFile)
network = configureNetwork(dataSource,
networkConfig)
partitions = generateNetworkPartitions(networkConfig,
NUM_RECORDS)
(numCorrect,
numTestRecords,
predictionAccuracy) = runNetwork(network, networkConfig,
partitions, NUM_RECORDS)
if (noiseAmplitude == 0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == KNN_CLASSIFIER_TYPE
and spEnabled
and tmEnabled
and not upEnabled):
self.assertEqual(predictionAccuracy, 100.00)
elif (noiseAmplitude == 0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == CLA_CLASSIFIER_TYPE
and spEnabled
and tmEnabled
and not upEnabled):
self.assertEqual(predictionAccuracy, 100.00)
elif (noiseAmplitude == 0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == CLA_CLASSIFIER_TYPE
and spEnabled
and not tmEnabled
and not
|
upEnabled):
self.assertEqual(predictionAccuracy, 100.00)
elif (noiseAmplitude == 1.0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == CLA_CLASSIFIER_TYPE
and spEnabled
and tmEnabled
|
and not upEnabled):
# using AlmostEqual until the random bug issue is fixed
self.assertAlmostEqual(predictionAccuracy, 80, delta=1)
elif (noiseAmplitude == 1.0
and signalMean == 1.0
and signalAmplitude == 1.0
and signalPeriod == 20.0
and classifierType == CLA_CLASSIFIER_TYPE
and spEnabled
and not tmEnabled
and not upEnabled):
# using AlmostEqual until the random bug issue is fixed
self.assertAlmostEqual(predictionAccuracy, 81, delta=1)
def tearDown(self):
shutil.rmtree(DATA_DIR)
if __name__ == "__main__":
unittest.main()
|
duaraghav8/Corque
|
demo.py
|
Python
|
mit
| 2,535
| 0.013807
|
import os
# We'll render HTML templates and access data sent by POST
# using the request object from flask. Redirect and url_for
# will be used to redirect the user once the upload is done
# and send_from_directory will help us to send/show on the
# browser the file that the user just uploaded
from flask import Flask, render_template, request, redirect, url_for, send_from_directory, jsonify
from werkzeug import secure_filename
import detect, face_recognizer
# Initialize the Flask application
app = Flask(__name__)
# This is the path to the upload directory
app.config['UPLOAD_FOLDER'] = 'uploads/'
# These are the extension that
|
we are accepting to be uploaded
app.
|
config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg'])
# For a given file, return whether it's an allowed type or not
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
# This route will show a form to perform an AJAX request
# jQuery is loaded to execute the request and update the
# value of the operation
@app.route('/')
def index():
return render_template('index.html')
file =None
# Route that will process the file upload
@app.route('/upload', methods=['GET','POST'])
def upload():
# Get the name of the uploaded file
file = request.files['file']
# Check if the file is one of the allowed types/extensions
if file and allowed_file(file.filename):
# Make the filename safe, remove unsupported chars
filename = secure_filename(file.filename)
# Move the file form the temporal folder to
# the upload folder we setup
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
detect.doDetect(file)
present_student_list = face_recognizer.doFaceRecognition()
return jsonify (
students_present=present_student_list)
return "something went wrong"
# Redirect the user to the uploaded_file route, which
# will basicaly show on the browser the uploaded file
# return redirect(url_for('uploaded_file',
# filename=filename))
@app.route('/uploads/<filename>')
def uploaded_file(filename):
#return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
#detect.doDetect(file)
if file is None:
return "File is none"
# if file is not None:
# # present_student = face_recognizer.doFaceRecognition()
# return "file is not none"
return "file is not none"
if __name__ == '__main__':
app.run()
|
comic/comic-django
|
app/grandchallenge/evaluation/serializers.py
|
Python
|
apache-2.0
| 1,856
| 0
|
from django.contrib.auth import get_user_model
from rest_framework.fields import CharField
from rest_framework.serializers import ModelSerializer
from grandchallenge.challenges.models import Challenge
from grandchallenge.components.serializers import (
ComponentInterfaceValueSerializer,
)
from grandchallenge.evaluation.models import (
Evaluation,
Phase,
Submission,
)
class UserSerializer(ModelSerializer):
class Meta:
model = get_user_model()
fields = ("username",)
class ChallengeSerializer(ModelSerializer):
class Meta:
model = Challenge
fields = (
"title",
"short_name",
)
class PhaseSerializer(ModelSerializer):
challenge = ChallengeSerializer()
class Meta:
model = Phase
fields = (
"challenge",
"title",
"slug",
)
class SubmissionSerializer(ModelSerializer):
phase = PhaseSerializer()
creator = UserSerializer()
class Meta:
model = Submission
fields = (
"pk",
|
"phase",
"created",
"creator",
"comment",
"predictions_file",
"supplementary_file",
"supplementary_url",
)
class EvaluationSerializer(ModelSerializer):
submission = SubmissionSerializer()
outputs = ComponentInterfaceValueSerializer(many=True)
stat
|
us = CharField(source="get_status_display", read_only=True)
title = CharField(read_only=True)
class Meta:
model = Evaluation
fields = (
"pk",
"method",
"submission",
"created",
"published",
"outputs",
"rank",
"rank_score",
"rank_per_metric",
"status",
"title",
)
|
vileopratama/vitech
|
src/addons/delivery/__openerp__.py
|
Python
|
mit
| 886
| 0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Delivery Costs',
'version': '1
|
.0',
'category': 'Stock',
'description': """
Allows you to add delivery methods in sale orders and picking.
==============================================================
You can define your own carrier for prices. When creating
invoices from picking, the system is able to add and compute the shipping lin
|
e.
""",
'depends': ['sale_stock'],
'data': [
'security/ir.model.access.csv',
'views/delivery_view.xml',
'views/partner_view.xml',
'data/delivery_data.xml',
'views/report_shipping.xml',
'views/report_deliveryslip.xml'
],
'demo': ['data/delivery_demo.xml'],
'test': [
'../account/test/account_minimal_test.xml',
],
'installable': True,
}
|
Unofficial-Extend-Project-Mirror/openfoam-extend-Breeder-other-scripting-PyFoam
|
unittests/Basics/test_TemplateFile.py
|
Python
|
gpl-2.0
| 7,289
| 0.019207
|
import unittest
from PyFoam.Basics.TemplateFile import TemplateFile,TemplateFileOldFormat,PyratempPreprocessor
from PyFoam.Error import FatalErrorPyFoamException
from tempfile import mktemp
from PyFoam.ThirdParty.six import PY3
import sys
theSuite=unittest.TestSuite()
template1="""$$ y = 3+x
This should be $x+y$"""
template2="""
$$ xxx=13
$$ xx=34+xxx
$2*x+xx-xxx$
"""
templateFor="""$$ y = 2*x
<!--(for i in range(y))--> @!i!@ <!--(end)-->#!
"""
templateMath="sqrt(x) = $sqrt(x)$"
templateList="""<!--(for e in theList)-->#!
<!--(if e.lower()=="joe")-->#!
Big @!e!@
<!--(else)-->#!
Little @!e!@
<!--(end)-->#!
<!--(end)-->#!
"""
templateMacro="""<!--(macro tabsquare)-->
@!x!@ \t = @!x*x!@
<!--(end)-->
<!--(for i in vals)-->@!tabsquare(x=i)!@<!--(end)-->#!
"""
templateBuiltIn="""
<!--(if True)-->TRUE<!--(end)-->
<!--(if not False)-->FALSE<!--(end)-->
@!min(2,3)!@ @!max(2,3)!@
@!chr(42)!@ @!ord(' ')!@
"""
templateVariablesIn3="""
$$ duesenAus=[0,2,3]
$$ duesenNamen=["B30"]+["B%d_%d" % (29-i,j) for i in range(7) for j in [2,1]]
$$ removeDuesen=[duesenNamen[i] for i in duesenAus]
<!--(for d in removeDuesen)-->
|-d-|
<!--(end)-->
"""
class TemplateFileTest(unittest.TestCase):
def testTemplateFileString(self):
t=TemplateFile(content=template1,expressionDelimiter="$")
self.assertEqual(t.getString({"x":-1}),"This should be 1")
fName=mktemp()
t.writeToFile(fName,{"x":1+2.})
result=open(fName).read()
self.assertEqual(result,"This should be 9.0")
def testTemplateFileFile(self):
fName=mktemp()
open(fName,"w").write(template1)
t=TemplateFile(name=fName,expressionDelimiter="$")
self.assertEqual(t.getString({"x":-1}),"This should be 1")
def testTemplateFileLongVars(self):
t=TemplateFile(content=template2,expressionDelimiter="$")
self.assertEqual(int(t.getString({"x":1})),36)
def testTemplateFileForLoop(self):
t=TemplateFile(content=templateFor)
self.assertEqual(t.getString({"x":2})," 0 1 2 3 ")
def testTemplateFileMacro(self):
t=TemplateFile(content=templateMacro)
if PY3 and sys.version_info.minor>1:
self.assertEqual(t.getString({"vals":[2,3.3,-1]}),"2 \t = 4\n3.3 \t = 10.889999999999999\n-1 \t = 1\n")
else:
self.assertEqual(t.getString({"vals":[2,3.3,-1]}),"2 \t = 4\n3.3 \t = 10.89\n-1 \t = 1\n")
def testTemplateFileListLoop(self):
t=TemplateFile(content=templateList)
self.assertEqual(t.getString({"theList":["Henry","Joe","joe","Tom"]}),"Little Henry\nBig Joe\nBig joe\nLittle Tom\n")
def testTemplateFileLongMath(self):
t=TemplateFile(content=templateMath,expressionDelimiter="$")
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0")
def testTemplateFileMathRealDelim(self):
t=TemplateFile(content=templateMath.replace("$","|"))
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0")
def testTemplateFilePercentDelimiter(self):
t=TemplateFile(content="x=$!x!$")
self.assertEqual(t.getString({"x":4}),"x=4")
def testTemplateFileBuiltinStuff(self):
t=TemplateFile(content=templateBuiltIn)
self.assertEqual(t.getString({}),"\nTRUE\nFALSE\n2 3\n* 32\n")
theSuite.addTest(unittest.makeSuite(TemplateFileTest,"test"))
class TemplateFileAllowExecutionTest(unittest.TestCase):
def testAssignmentNotWorkingInPython3(self):
t=TemplateFile(content=templateVariablesIn3,
expressionDelimiter="|-",
allowExec=True)
self.assertEqual(t.getString({}),"\nB30\nB29_1\nB28_2\n")
class TemplateFileOldFormatTest(unittest.TestCase):
def testTemplateFileString(self):
t=TemplateFileOldFormat(content=template1)
self.assertEqual(t.getString({"x":-1}),"This should be 1\n")
fName=mktemp()
t.writeToFile(fName,{"x":"1+sqrt(4)"})
result=open(fName).read()
self.assertEqual(result,"This should be 9.0\n")
def testTemplateFileFile(self):
fName=mktemp()
open(fName,"w").write(template1)
t=TemplateFileOldFormat(name=fName)
self.assertEqual(t.getString({"x":-1}),"This should be 1\n")
def testTemplateFileLongVars(self):
t=TemplateFileOldFormat(content=template2)
self.assertEqual(int(t.getString({"x":1})),36)
def testTemplateFileLongMath(self):
t=TemplateFileOldFormat(content=templateMath)
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0\n")
theSuite.addTest(unittest.makeSuite(TemplateFileOldFormatTest,"test"))
class PyratempPreprocessorTest(unittest.TestCase):
def testFullPreprocessing(self):
p=PyratempPreprocessor()
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$!setvar("a", "2")!$#!')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertRaises(FatalErrorPyFoamException,p,"$$ a ")
# Does not work with old nose
# with self.assertRaises(FatalErrorPyFoamException):
# p("$$ a ")
self.assertEqual(p("$$ a=2\n"),'$!setvar("a", "2")!$#!\n')
self.assertEqual(p("$$ a=2\n$$ b=3"),'$!setvar("a", "2")!$#!\n$!setvar("b", "3")!$#!')
self.assertEqual(p(" $foo$ $bar$ ")," $!foo!$ $!bar!$ ")
self.assertEqual(p("$foo$ $bar$"),"$!foo!$ $!bar!$")
self.assertEqual(p("$foo$ $bar$\n"),"$!foo!$ $!bar!$\n")
def testNoVarLinePreprocessing(self):
p=PyratempPreprocessor(dovarline=False)
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$$ a=2 ')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertEqual(p("$$ a "),"$$ a ")
self.assertEqual(p("$$ a=2\n"),'$$ a=2\n')
self.assertEqual(p("$$ a=2\n$$ b=3"),'$$ a=2\n$$ b=3')
self.assertEqual(p(" $foo$ $bar$ ")," $!foo!$ $!bar!$ ")
self.assertEqual(p("$foo$ $bar$"),"$!foo!$ $!bar!$")
self.assertEqual(p("$foo$ $bar$\n"),"$!foo!$ $!bar!$\n")
def testNoExprPreprocessing(self):
p=PyratempPreprocessor(doexpr=False)
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$!setvar("a", "2")!$#!')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertRaises(FatalErrorPyFoamException,p,"$$ a ")
|
# Does not work with old nose
# with self.assertRaises(FatalErrorPyFoamException):
# p("$$ a ")
self.assertEqual(p("$$ a=2\n"),'$!setvar("a", "2")!$#!\n')
self.assertEqual(p("$$ a=2\n$$
|
b=3"),'$!setvar("a", "2")!$#!\n$!setvar("b", "3")!$#!')
self.assertEqual(p(" $foo$ $bar$ ")," $foo$ $bar$ ")
self.assertEqual(p("$foo$ $bar$"),"$foo$ $bar$")
self.assertEqual(p("$foo$ $bar$\n"),"$foo$ $bar$\n")
theSuite.addTest(unittest.makeSuite(PyratempPreprocessorTest,"test"))
|
earies/jvpn
|
jvpn/netstats.py
|
Python
|
apache-2.0
| 1,247
| 0.020048
|
"""JVPN netstats libraries
"""
__author__ = 'e@dscp.org (Ebben Aries)'
import socket
import struct
def GetNetstats(device):
device = device + ':'
for line in open('/proc/net/dev', 'r'):
data = filter(None, line.split(' '))
if data[0] == device:
return (data[1], data[2], data[9], data[10])
def GetRoutes(device):
routes = []
for line in open('/proc/net/route', 'r'):
if line.startswith(device):
prefix = socket.inet_ntoa(struct.pack('<L', int(line.split()[1], 16)))
metric = int(line.split()[6])
netmask = socket.inet_ntoa(struct.pack('<L', int(li
|
ne.split()[7], 16)))
route_detail = '%s/%s:%d' % (prefix, netmask, metric)
routes.append(route_detail)
return routes
def GetIp(device):
ip = ''
for line in open('/proc/net/route', 'r'):
if line.startswith(device):
ip = socket.inet_ntoa(struct.pack('<L', int(line.split()[2]
|
, 16)))
break
return ip
def GetDefInterface(interface='eth0', gateway='0.0.0.0'):
for line in open('/proc/net/route', 'r'):
if line.split()[1] == '00000000' and line.split()[7] == '00000000':
interface = line.split()[0]
gateway = socket.inet_ntoa(struct.pack('<L', int(line.split()[2], 16)))
return gateway, interface
|
danisuke0781/instant-press
|
languages/nl.py
|
Python
|
gpl-2.0
| 14,840
| 0.021972
|
# coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is een SQL-expressie zoals "field1=\'newvalue\'". U kunt de resultaten van een JOIN niet updaten of wissen',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': '%s kaarten gewist',
'%s rows updated': '*** %s kaarten veranderd',
'About': 'Info',
'Act': 'Act',
'Add': 'Voegtoe',
'Admin Panel': 'Admin Paneel',
'Are you sure to delete this category?': 'Weet u zeker dat u deze categorie wilt wissen?',
'Are you sure you want to delete this category?': 'Weet u zeker dat u deze categorie wilt wissen?',
'Article added': 'Artikel toegevoegd',
'Articles in Archive ': 'Artikel in Archief ',
'Articles with ': 'Artikel met ',
'Articles with category': 'Artikel met categorie',
'Articles with tag': 'Artikelen with kenmerk',
'Available databases and tables': 'Beschikbare gegevensbestanden en tabellen',
'Avatar uploaded': 'Avatar opgestuurd',
'Avatars are disable.': 'Avatars zijn uitgeschakeld.',
'Avatars are disabled.': 'Avatars zijn uitgeschakeld.',
'Back to the index page': 'Terug naar de startpagina',
'Cannot be empty': 'Mag niet leeg zijn',
'Cats': 'Catn',
'Change Avatar': 'Verander Avatar',
'Change about': 'Verander info',
'Change author': 'Verander auteur',
'Change content': 'Verander inhoud',
'Change css': 'Verander css',
'Change description': 'Verander omschrijving',
'Change email': 'Verander e-mail',
'Change extract': 'Verander samenvatting',
'Change first name': 'Verander voornaam',
'Change footer': 'Verander voettekst',
'Change front page': 'Verander voorpagina',
'Change keywords (sep. by ,)': 'Verander trefwoorden (gesch. dr. ,)',
'Change last name': 'Verander achternaam',
'Change logo url': 'Verander logo url',
'Change name': 'Verander naam',
'Change password': 'Verander wachtwoord',
'Change site information': 'Verander site informatie',
'Change subtitle': 'Verander de subtitel',
'Change title': 'Verander de titel',
'Change url': 'Verander de url',
'Check to delete': 'Vink aan om te wissen',
'Check to delete:': 'Vink aan om te wissen:',
'Click if you want to make this article a link to a site, to list in panels need to be a page also': 'Klik als u van dit artikel een link naar een site wilt maken. (Om getoond te worden in de panelen moet het ook een Pagina zijn)',
'Click if you want to make this article a page': 'Klik als u van dit artikel een Pagina (met menuknop) wilt maken',
'Click to change about content': 'Klik om Info aan te passen',
'Click to change categories of this article': 'Klik om categorieën van artikel aan te passen',
'Click to change footer content': 'Klik om voettekst aan te passen',
'Click to change keywords of the site': 'Klik om trefwoorden aan te passen',
'Click to change keywords of this article': 'Klik om trefwoorden aan te passen',
'Click to change name of this article': 'Klik om aan te passen name of this article',
'Click to change the content of the article, content is all the body of the article': 'Klik om inhoud van artikel aan te passen, inhoud de z.g. body van het artikel',
'Click to change the description of the site': 'Klik om omschrijving van de site aan te passen',
'Click to change the extract of the article, extract is a slice of the content you want to show in search': 'Klik om aan te passen the extract of the article, extract is a slice of the content you want to show in search',
'Click to change the frontpage of the site': 'Klik om voorpagina aan te passen',
'Click to change the logo': 'Klik om het logo aan te passen',
'Click to change the subtitle of the site': 'Klik om subtitel van de site aan te passen',
'Click to change the title of the article': 'Klik om titel van artikel aan te passen',
'Click to change the title of the site': 'Klik om titel van de site aan te passen',
'Click to delete this article': 'Klik om dit artikel te wissen',
'Click to preview the article (publish or not)': 'Klik om dit artikel te bekijken (publiek of niet)',
'Click to publish this article': 'Klik omdit artikel te publiceren',
'Client IP': 'Client IP',
'Close this window': 'Sluit dit venster',
'Comment edit': 'Comment aanpassing',
'Content': 'Inhoud',
'Controller': 'Controller',
'Copyrigh
|
t': 'Copyright',
'Create new article': 'Maak nieuw artikel',
'Curre
|
nt request': 'Huidige request',
'Current response': 'Huidige response',
'Current session': 'Huidige session',
'DB Model': 'DB Model',
'Database': 'Database',
'Delete:': 'Wis:',
'Description': 'Omschrijving',
'E-mail': 'E-mail',
'Edit': 'Verander',
'Edit This App': 'Pas deze App aan',
'Edit current record': 'Pas huidige kaart aan',
'Error 400!': 'Fout 400!',
'Error 404!': 'Fout 404!',
'Extract': 'Extract',
'First name': 'Voornaam',
'Footer': 'Voettekst',
'Front Page': 'Voorpagina',
'Go back to main page': 'Ga terug naar start pagina',
'Group %(group_id)s created': 'Groep %(group_id)s aangemaakt',
'Group ID': 'Groep ID',
'Group uniquely assigned to user %(id)s': 'Groep exclusief toegekend aan gebruiker %(id)s',
'Hello World': 'Salve Mondo',
'Home': 'Start',
'Image': 'Image',
'Import/Export': 'Import/Export',
'Index': 'Index',
'Internal State': 'Internal State',
'Invalid Query': 'Query invalida',
'Invalid email': 'Invalid email',
'Invalid login': 'Invalid login',
'Invalid password': 'Invalid password',
'Keywords': 'Keywords',
'Language': 'Language',
'Last name': 'Last name',
'Layout': 'Layout',
'Leave a Reply': 'Leave a Reply',
'List articles': 'List articles',
'Logged in': 'Logged in',
'Logged out': 'Logged out',
'Login': 'Login',
'Logo': 'Logo',
'Logout': 'Logout',
'Lost password': 'Lost password',
'Main Menu': 'Main Menu',
'Make sure all words are spelled correctly': 'Make sure all words are spelled correctly',
'Menu Model': 'Menu Model',
'My Profile': 'Mijn profiel',
'NO': 'NO',
'Name': 'Naam',
'New Record': 'Nieuw Record',
'New password': 'Nieuw password',
'No Title': 'Geen Titel',
'No articles': 'Geen artikelen',
'No comments loaded yet!. If persist enable javascript or update your browser.': 'Commentaren nog niet geladen! Als dit zo blijft zet javascript-ondersteuning aan of ververs browser.',
'No databases in this application': 'Geen databases in deze applicatie',
'No description': 'Geen omschrijving',
'No message receive from server': 'Geen mededeling ontvangen van server',
'Old password': 'Oude wachtwoord',
'Origin': 'Afkomstig',
'Page': 'Pagina',
'PageUrl': 'PaginaUrl',
'Pages': 'Pagina\'s',
'Password': 'Wachtwoord',
'Password changed': 'Wachtwoord aangepast',
"Password fields don't match": "Wachtwoordvelden komen niet overeen",
'Powered by': 'Aangedreven door',
'Powered by Instant Press': 'Aangedreven door Instant Press',
'Powered by Web2py Enterprise Framework': 'Aangedreven door Web2py Enterprise Framework',
'Powered by python': 'Aangedreven door python',
'Problem with avatars': 'Probleem met avatars',
'Problem with categorie id value!': 'Probleem met categorie id waarde!',
'Problem with id value': 'Probleem met id waarde',
'Problem with some submitted values': 'Probleem met enkele opgestuurde waardes',
'Problem with the values submitted': 'Probleem met opgestuurde waardes',
'Profile': 'Profiel',
'Public': 'Publiek',
'Query:': 'Query:',
'Record %(id)s updated': 'Kaart %(id)s aangepast',
'Record ID': 'Kaart ID',
'Record Updated': 'Kaart Aangepast',
'Refresh': 'Ververs',
'Register': 'Registeer',
'Registration key': 'Registratie sleutel',
'Registration successful': 'Registratie successful',
'Reload the list': 'Ververs de lijst',
'Remember me (for 30 days)': 'Onthoud me (30 dagen)',
'Request reset password': 'Verzoek om wachtwoord terug te zetten',
'Reset Password key': 'Zet Wachtwoord terug',
'Role': 'Rol',
'Rows in table': 'Rijen in tabel',
'Rows selected': 'Rijen geselecteerd',
'Rss': 'Rss',
'Rss last comments': 'Rss laatste commentaren',
'Rss last posts': 'Rss laatste plaatsingen',
'Save the content': 'Sla de inhoud op',
'Search': 'Zoek',
'Search in title': 'Zoek in de titel',
'Show articles': 'Toon artikelen',
|
FedoraScientific/salome-smesh
|
doc/salome/examples/filters_ex01.py
|
Python
|
lgpl-2.1
| 1,332
| 0.016517
|
# Aspect ratio
# create mesh
from SMESH_mechanic import *
# get faces with aspect ratio > 1.5
filter = smesh.GetFilter(SMESH.FACE, SMESH.FT_AspectRatio, SMESH.FT_MoreThan, 1.5)
ids = mesh.GetIdsFromFilter(filter)
print "Number of faces with aspect ratio > 1.5:", len(ids)
# copy the faces with aspect ratio > 1.5 to another mesh;
# this demostrates that a filter can be used where usually a group or submesh is acceptable
filter.SetMesh( mesh.GetMesh() )
mesh2 = smesh.CopyMesh( filter, "AR > 1.5" )
print "Number of copied faces with aspect ratio > 1.5:", mesh2.NbFaces()
# create a Group of faces with Aspect Ratio < 1.5
group = mesh.MakeGroup("AR < 1.5", SMESH.FACE, SMESH.FT_AspectRatio, '<', 1.5)
print "Number of faces with aspect ratio < 1.5:", group.Size()
# combine several criteria to Create a Group of only Triangular fac
|
es with Aspect Ratio < 1.5;
# note that contents of a GroupOnFilter is dynamically updated as the mesh changes
crit = [ smesh.GetCriterion( SMESH.FACE, SMESH.FT_AspectRatio, '<', 1.5, BinaryOp=SMESH.FT_LogicalAND ),
smesh.GetCriterion( SMESH.FACE, SMESH.FT_ElemGeomType,'=', SMESH.Geom_TRIANGLE ) ]
filter = smesh.GetFilterFromCriteria( cri
|
t )
triaGroup = mesh.GroupOnFilter( SMESH.FACE, "Tria AR < 1.5", filter )
print "Number of triangles with aspect ratio < 1.5:", triaGroup.Size()
|
kagel/foobnix
|
foobnix/gui/about/about.py
|
Python
|
gpl-3.0
| 1,313
| 0.008397
|
# -*- coding: utf-8 -*-
'''
Created on Oct 2, 2010
@author: dimitry (zavlab1)
'''
from gi.repository import Gt
|
k
from gi.repository import Gdk
from foobnix.gui.service.path_service import get_foobnix_resourse_path_by_name
from foobnix.util.const import ICON_FOOBNIX
from foobnix.version import FOOBNIX_VERSION
class AboutWindow(Gtk.AboutDialog):
def __init__(self):
Gtk.AboutDialog.__init__(self)
self.set_program_name("Foobnix")
self.set_version(FOOBNIX_VERSION)
self.set_copyright("(c) Ivan Ivanenko <ivan.ivanenk
|
o@gmail.com>")
self.set_comments(_("Simple and Powerful player"))
self.set_website("http://www.foobnix.com")
self.set_authors(["Dmitry Kozhura (zavlab1) <zavlab1@gmail.com>", "Pietro Campagnano <fain182@gmailcom>", "Viktor Suprun <popsul1993@gmail.com>"])
self.set_translator_credits("""Bernardo Miguel Savone
Sérgio Marques
XsLiDian
KamilSPL
north
Alex Serada
Ivan Ivanenko
Dmitry-Kogura
Fitoschido
zeugma
Schaffino
Oleg «Eleidan» Kulik
Sergey Zigachev
Martino Barbon
Florian Heissenberger
Aldo Mann""")
self.set_logo(Gdk.pixbuf_new_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))) #@UndefinedVariable
def show(self):
self.run()
self.destroy()
|
simark/simulavr
|
regress/test_opcodes/test_SUB.py
|
Python
|
gpl-2.0
| 3,851
| 0.027266
|
#! /usr/bin/env python
###############################################################################
#
# simulavr - A simulator for the Atmel AVR family of microcontrollers.
# Copyright (C) 2001, 2002 Theodore A. Roth
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1
|
301 USA.
#
###############################################################################
#
# $Id: test_SUB.py,v 1.1 2004/07/31 00:59:11 rivetwa Exp $
#
"""Test the SUB opcode.
"""
import base_test
from registers import Reg, SREG
class SUB_TestFail(base_test.TestFail): pass
class base_SUB(base_test.opcode_test):
"""Generic test case for testing SUB opcode.
SUB - Subtract wit
|
hout Carry. [Rd <- Rd - Rr]
opcode is '0001 10rd dddd rrrr' where r and d are registers (d is destination).
Only registers PC, Rd and SREG should be changed.
"""
def setup(self):
# Set SREG to zero
self.setup_regs[Reg.SREG] = 0
# Set the register values
self.setup_regs[self.Rd] = self.Vd
self.setup_regs[self.Rr] = self.Vr
# Return the raw opcode
return 0x1800 | (self.Rd << 4) | ((self.Rr & 0x10) << 5) | (self.Rr & 0xf)
def analyze_results(self):
self.reg_changed.extend( [self.Rd, Reg.SREG] )
# check that result is correct
res = (self.Vd - self.Vr)
expect = res & 0xff
got = self.anal_regs[self.Rd]
if expect != got:
self.fail('SUB r%02d, r%02d: 0x%02x - 0x%02x = (expect=%02x, got=%02x)' % (
self.Rd, self.Rr, self.Vd, self.Vr, expect, got))
expect_sreg = 0
# calculate what we expect sreg to be (I and T should be zero)
carry = ((~self.Vd & self.Vr) | (self.Vr & res) | (res & ~self.Vd))
H = (carry >> 3) & 1
C = (carry >> 7) & 1
V = (((self.Vd & ~self.Vr & ~res) | (~self.Vd & self.Vr & res)) >> 7) & 1
N = ((expect & 0x80) != 0)
expect_sreg += H << SREG.H
expect_sreg += V << SREG.V
expect_sreg += N << SREG.N
expect_sreg += (N ^ V) << SREG.S
expect_sreg += C << SREG.C
expect_sreg += (expect == 0) << SREG.Z
got_sreg = self.anal_regs[Reg.SREG]
if expect_sreg != got_sreg:
self.fail('SUB r%02d, r%02d: 0x%02x - 0x%02x -> SREG (expect=%02x, got=%02x)' % (
self.Rd, self.Rr, self.Vd, self.Vr, expect_sreg, got_sreg))
#
# Template code for test case.
# The fail method will raise a test specific exception.
#
template = """
class SUB_rd%02d_vd%02x_rr%02d_vr%02x_TestFail(SUB_TestFail): pass
class test_SUB_rd%02d_vd%02x_rr%02d_vr%02x(base_SUB):
Rd = %d
Vd = 0x%x
Rr = %d
Vr = 0x%x
def fail(self,s):
raise SUB_rd%02d_vd%02x_rr%02d_vr%02x_TestFail, s
"""
#
# Define a list of test values such that we all the cases of SREG bits being set.
#
vals = (
( 0x00, 0x00 ),
( 0xff, 0x00 ),
( 0xfe, 0x01 ),
( 0x0f, 0x00 ),
( 0x0f, 0xf0 ),
( 0x01, 0x02 ),
( 0x80, 0x01 )
)
#
# automagically generate the test_SUB_rdNN_vdXX_rrNN_vrXX class definitions.
# For these, we don't want Rd=Rr as that is a special case handled below.
#
code = ''
for d in range(0,32,4):
for r in range(1,32,4):
for vd,vr in vals:
args = (d,vd,r,vr)*4
code += template % args
# make sure things work if Rd == Rr
for d in range(2,32,4):
for vd,vr in vals:
args = (d,vd,d,vd)*4
code += template % args
exec code
|
winksaville/craftr
|
craftr/defaults.py
|
Python
|
gpl-3.0
| 10,759
| 0.008458
|
# The Craftr build system
# Copyright (C) 2016 Niklas Rosenstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`craftr.defaults`
======================
This module provides the default global namespace for Craftr modules. Names
starting with an underscore will be ignored.
"""
from craftr.core import build as _build
from craftr.core.logging import logger
from craftr.core.manifest import Namespace
from craftr.core.session import session, ModuleNotFound
from craftr.utils import path, shell
from craftr.targetbuilder import gtn, TargetBuilder, Framework
from craftr import platform
import builtins as _builtins
import itertools as _itertools
import os as _os
import require
import sys as _sys
require = require.Require(write_bytecode=False)
class ToolDetectionError(Exception):
pass
class ModuleError(Exception):
pass
def include_defs(filename, globals=None):
"""
Uses :mod:`require` to load a Python file and then copies all symbols
that do not start with an underscore into the *globals* dictionary. If
*globals* is not specified, it will fall back to the globals of the frame
that calls the function.
"""
module = require(filename, _stackdepth=1)
if globals is None:
globals = _sys._getframe(1).f_globals
for key, value in vars(module).items():
if not key.startswith('_'):
globals[key] = value
def glob(patterns, parent=None, exclude=(), include_dotfiles=False):
"""
Wrapper for :func:`path.glob` that automatically uses the current modules
project directory for the *parent* argument if it has not been specifically
set.
"""
if parent is None and session and session.module:
parent = session.module.project_dir
return path.glob(patterns, parent, exclude, include_dotfiles)
def local(rel_path):
"""
Given a relative path, returns the absolute path relative to the
|
current
module's project directory.
"""
parent = session.module.project_dir
return path.norm(rel_path, parent)
def buildlocal(rel_path):
"""
Given a relative path, returns the path (still relative) to the build
directory for the current module. This is basically a shorthand for
prepending the module name and version to *path*.
|
"""
if path.isabs(rel_path):
return rel_path
return path.canonical(path.join(session.module.ident, rel_path))
def relocate_files(files, outdir, suffix, replace_suffix=True, parent=None):
"""
Converts a list of filenames, relocating them to *outdir* and replacing
their existing suffix. If *suffix* is a callable, it will be passed the
new filename and expected to return the same filename, eventually with
a different suffix.
"""
if parent is None:
parent = session.module.project_dir
result = []
for filename in files:
filename = path.join(outdir, path.rel(filename, parent))
filename = path.addsuffix(filename, suffix, replace=replace_suffix)
result.append(filename)
return result
def filter(predicate, iterable):
"""
Alternative for the built-in ``filter()`` function that returns a list
instead of an iterable (which is the behaviour since Python 3).
"""
result = []
for item in iterable:
if predicate(item):
result.append(item)
return result
def map(procedure, iterable):
"""
Alternative for the built-in ``map()`` function that returns a list instead
of an iterable (which is the behaviour since Python 3).
"""
result = []
for item in iterable:
result.append(procedure(item))
return result
def zip(*iterables, fill=NotImplemented):
"""
Alternative to the Python built-in ``zip()`` function. This function returns
a list rather than an iterable and also supports swapping to the
:func:`itertools.izip_longest` version if the *fill* parameter is specified.
"""
if fill is NotImplemented:
return list(_builtins.zip(*iterables))
else:
return list(_itertools.zip_longest(*iterables, fillvalue=fill))
def load_module(name, into=None, get_namespace=True, _stackframe=1):
"""
Load a Craftr module by name and return it. If *into* is specified, it must
be a dictionary that will be filled with all the members of the module. Note
that this function returns the namespace object of the module rather than
the actual :class:`craftr.core.session.Module` object that wraps the module
information unless *get_namespace* is False.
The version criteria is read from the current module's manifest.
:param name: The name of the module to load. If this name is suffixed
with the two characters ``.*`` and the *into* parameter is :const:`None`,
the contents of the module will be exported into the globals of the
calling frame.
:param into: If specified, must be a dictionary.
:param get_namespace:
:return: The module namespace object (of type :class:`types.ModuleType`)
or the actual :class:`craftr.core.session.Module` if *get_namespace*
is False.
:raise ModuleNotFound: If the module could not be found.
:raise RuntimeError: If the module that is attempted to be loaded is not
declared in the current module's manifest.
Examples:
.. code:: python
cxx = load_module('lang.cxx')
load_module('lang.cxx.*')
assert cxx.c_compile is c_compile
"""
if name.endswith('.*') and into is None:
name = name[:-2]
into = _sys._getframe(_stackframe).f_globals
if not session:
raise RuntimeError('no session context')
module = session.module
if not module:
raise RuntimeError('no current module')
if name not in module.manifest.dependencies:
raise RuntimeError('"{}" can not load "{}", make sure that it is listed '
'in the dependencies'.format(module.ident, name))
loaded_module = session.find_module(name, module.manifest.dependencies[name])
if not loaded_module.executed:
loaded_module.run()
if into is not None:
module_builtins = frozenset('loader project_dir options'.split())
all_vars = getattr(loaded_module.namespace, '__all__', None)
for key, value in vars(loaded_module.namespace).items():
if all_vars is not None:
if key in all_vars:
into[key] = value
else:
if not key.startswith('_') and key not in module_builtins and key not in globals():
into[key] = value
if get_namespace:
return loaded_module.namespace
return loaded_module
def load_file(filename):
"""
Loads a Python file into a new module-like object and returns it. The
*filename* is assumed relative to the currently executed module's
directory (NOT the project directory which can be different).
"""
if not path.isabs(filename):
filename = path.join(session.module.directory, filename)
with open(filename, 'r') as fp:
code = compile(fp.read(), filename, 'exec')
scope = Namespace()
vars(scope).update(globals())
exec(code, vars(scope))
return scope
def gentool(commands, preamble=None, environ=None, name=None):
"""
Create a :class:`~_build.Tool` object. The name of the tool will be derived
from the variable name it is assigned to unless *name* is specified.
"""
tool = _build.Tool(gtn(name), commands, preamble, environ)
session.graph.add_tool(tool)
return tool
def gentarget(commands, inputs=(), outputs=(), *args, **kwargs):
"""
Create a :class:`~_build.Target` object. The name of the target will be
derived from the variable name it is assigned to unless *name* is specified.
"""
target = _build.Target(gtn(kwargs.pop('name', None)), commands, inputs,
outputs, *args, **kwargs)
session.graph.add_target(target)
return target
def runtarget(t
|
poppogbr/genropy
|
packages/test15/webpages/tools/css3make.py
|
Python
|
lgpl-2.1
| 5,203
| 0.032866
|
# -*- coding: UTF-8 -*-
#
"""css3make tester"""
class GnrCustomWebPage(object):
py_requires = "gnrcomponents/testhandler:TestHandlerBase"
dojo_theme = 'tundra'
def test_1_rounded(self, pane):
sl = pane.slotBar('k,*,test,*')
sl.k.verticalSlider(value='^.k',minimum=0,maximum='30',intermediateChanges=True,height='100px')
test = sl.test.div(width='400px')
test.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
rounded='15')
test.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
rounded='12',rounded_left_top=0,rounded_bottom_right=0)
test.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
rounded_left_top='12',rounded_bottom_right='^.k')
def test_2_shadow(self, pane):
sl = pane.slotBar('x,y,blur,inset,*,test1,test2,*',lbl_fon
|
t_size='8px',
lbl_position='L',lbl_transform_rotate='-90',cell_border='1px dotted gray',
lbl_width='10px'
)
sl.x.verticalSlider(value='^.x',minimum=-30,maximum=30,intermediateChanges=True,height='100px',lbl='X')
sl.y.verticalSlider(value='^.y',minimum=-30,maxi
|
mum=30,intermediateChanges=True,height='100px',lbl='Y')
sl.blur.verticalSlider(value='^.blur',minimum=-30,maximum=30,intermediateChanges=True,height='100px',lbl='Blurrone')
sl.inset.checkbox(value='^.inset',label='Inset')
sl.test1.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
shadow='3px 3px 5px gray inset')
sl.test2.div(margin='5px', display='inline-block', border='1px solid gray', width='100px', height='80px',
shadow='3px 3px 5px gray inset',
shadow_x='^.x',shadow_y='^.y',
shadow_blur='^.blur',shadow_inset='^.inset')
def test_3_gradient_fixed(self, pane):
sl = pane.slotBar('deg,fld,tan,*,test,*,test1,*',lbl_position='B',lbl_font_size='8px')
sl.deg.verticalSlider(value='^.deg',minimum=0,maximum=360,intermediateChanges=True,height='100px',lbl='Deg')
fb = sl.fld.formbuilder(cols=6, border_spacing='2px')
fb.numbertextbox(value='^.deg',lbl='deg')
sl.test.div(margin='5px', display='inline-block',
border='1px solid gray', width='100px', height='80px',
gradient_from='white',gradient_to='navy',gradient_deg='^.deg')
sl.test1.div(margin='5px', display='inline-block',
border='1px solid gray', width='100px', height='80px',
gradient_color_0='pink,15',gradient_color_1='yellow,50' ,gradient_color_2='red,100',gradient_deg='^.deg')
def test_4_transform(self, pane):
sl = pane.slotBar('rotate,translatex,translatey,scalex,scaley,skewx,skewy,*,test')
sl.rotate.verticalSlider(value='^.rotate',minimum=0,maximum=360,intermediateChanges=True,height='100px',default_value=0)
sl.translatex.verticalSlider(value='^.translate_x',minimum=-100,maximum=100,intermediateChanges=True,height='100px',default_value=0)
sl.translatey.verticalSlider(value='^.translate_y',minimum=-100,maximum=100,intermediateChanges=True,height='100px',default_value=0)
sl.scalex.verticalSlider(value='^.scale_x',minimum=0,maximum=1,intermediateChanges=True,height='100px',default_value=1)
sl.scaley.verticalSlider(value='^.scale_y',minimum=0,maximum=1,intermediateChanges=True,height='100px',default_value=1)
sl.skewx.verticalSlider(value='^.skew_x',minimum=0,maximum=360,intermediateChanges=True,height='100px',default_value=0)
sl.skewy.verticalSlider(value='^.skew_y',minimum=0,maximum=360,intermediateChanges=True,height='100px',default_value=0)
sl.test.div(margin='5px', display='inline-block', border='1px solid gray', width='50px', height='70px'
,transform_rotate='^.rotate'
,transform_translate_x='^.translate_x',transform_translate_y='^.translate_y'
,transform_scale_x='^.scale_x',transform_scale_y='^.scale_y'
,transform_skew_x='^.skew_x',transform_skew_y='^.skew_y'
)
def test_5_transition(self, pane):
sl = pane.slotBar('w,color,mode,duration,*,test',lbl_position='T')
sl.w.textbox(value='^.w',lbl='width',default_value='3px')
sl.color.textbox(value='^.color',lbl='color',default_value='red')
sl.mode.comboBox(value='^.function',default_value='linear',values='linear,ease,ease-in,ease-out,ease-in-out')
sl.duration.verticalSlider(value='^.duration',minimum=0,maximum=10,intermediateChanges=True,height='100px',default_value=1)
sl.test.div(width='^.w',background='^.color',height='50px',transition='all 3s',border='1px solid gray',transition_function='.^function',
transition_duration='.^duration')
|
AustereCuriosity/astropy
|
astropy/utils/tests/test_data_info.py
|
Python
|
bsd-3-clause
| 1,575
| 0.00127
|
# -*- coding: utf-8 -*-
# TEST_UNICODE_LITERALS
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
from ...extern import six
from ..data_info import dtype_info_name
STRING_TYPE_NAMES = {(False, 'S'): 'str', # PY2
(False, 'U'): 'unicode',
(True, 'S'): 'bytes', # not PY2
(True, 'U'): 'str'}
DTYPE_TESTS = ((np.array(b'abcd').dtype, STRING_TYPE_NAMES[(not six.PY2, 'S')] + '4'),
(np.array(u'abcd').dtype, STRING_TYPE_NAMES[(not six.PY2, 'U')] + '4'),
('S4', STRING_TYPE_NAMES[(not six.PY2, 'S')] + '4'),
('U4', STRING_TYPE_NAMES[(not six.PY2, 'U')] + '4'),
(np.void, 'void'),
(np.int32, 'int32'),
(np.bool, 'bool'),
(bool, 'bool'),
(float, 'float
|
64'),
('<f4', 'float32'),
('u8', 'uint64'),
('c16', 'complex128'),
('object', 'object'))
@pytest.mark.parametrize('input,output', DTYPE_TESTS)
def test_dtype_info_name(input, output):
"""
Test that dtype_info_name is giving the expected output
Here the available types::
|
'b' boolean
'i' (signed) integer
'u' unsigned integer
'f' floating-point
'c' complex-floating point
'O' (Python) objects
'S', 'a' (byte-)string
'U' Unicode
'V' raw data (void)
"""
assert dtype_info_name(input) == output
|
morepj/numerical-mooc
|
working/HelloWorld.py
|
Python
|
mit
| 44
| 0
|
p
|
rint("Greetings Earth! We come in
|
peace.")
|
adsabs/adsabs-pyingest
|
pyingest/parsers/gcncirc.py
|
Python
|
mit
| 4,888
| 0.000818
|
from __future__ import print_function
from __future__ import absolute_import
import re
from pyingest.config.utils import u2asc
from .default import DefaultParser
from .author_names import AuthorNames
from .entity_convert import EntityConverter
head_dict = {'TITLE:': 'journal', 'NUMBER:': 'volume', 'SUBJECT:': 'title',
'DATE:': 'pubdate', 'FROM:': 'email'
}
class GCNCParser(DefaultParser):
def __init__(self, data):
# econv = EntityConverter()
# econv.input_text = data
# econv.convert()
# self.raw = econv.output_text
self.raw = data
self.data_dict = dict()
def make_pubdate(self):
input_date = self.data_dict['pubdate']
yymmdd = input_date.split('/')
if int(yymmdd[0]) > 50:
year = '19' + yymmdd[0]
else:
year = '20' + yymmdd[0]
pubdate = year + '/' + yymmdd[1]
self.data_dict['pubdate'] = pubdate
def make_bibcode(self):
year = self.data_dict['pubdate'][0:4]
bibcode = 'GCN.'
self.data_dict['volume'] = self.data_dict['volume'].ljust(5, '.')
volume = self.data_dict['volume'].ljust(9, '.') + '1'
try:
init = u2asc(self.data_dict['authors'][0][0])
except Exception as err:
print ("Problem generating author initial")
init = '.'
self.data_dict['bibcode'] = year + bibcode + volume + init
def make_publication(self):
base_string = 'GRB Coordinates Network, Circular Service, No. '
self.data_dict['publication'] = base_string + self.data_dict['volume']
self.data_dict['page'] = '1'
def split_authors_abstract(self):
# This could be used to extract affils and apply them to authors,
# but the process of doing so is unwieldy. I'm leaving code that
# was my initial try but commented out.
body = self.data_dict['abstract']
while body[0] == '':
body.pop(0)
auths = []
affils = []
while body[0] != '' and ':' not in body[0]:
auths.append(body.pop(0).strip())
auths.append(body.pop(0).strip())
auth_delimiter = u'| '
auth_string = ' '.join(auths)
auth_string = re.sub(r'\s+\((.*?)\)', ',', auth_string)
auth_string = re.sub(r'[ ,]and\s', ',', auth_s
|
tring)
auth_string = re.sub(r'on behalf of', ',', auth_string)
auth_string = re.sub(r'reports?', ',', auth_string)
auth_string = re.sub(r'\s?:', '', auth_string)
auth_string = re.sub(r',?\s+,', ',', auth_string)
auth_a
|
rray = [s.strip() for s in auth_string.split(',')]
auth_array = list([a for a in auth_array if len(a) > 3])
# auth_string = u'; '.join(auth_array)
auth_string = auth_delimiter.join(auth_array)
auth_mod = AuthorNames()
# self.data_dict['authors'] = auth_mod.parse(auth_string)
self.data_dict['authors'] = auth_mod.parse(auth_string, delimiter=auth_delimiter)
self.data_dict['authors'] = re.sub(r'\| ', u';', self.data_dict['authors'])
def parse(self):
self.data_dict = {}
# Start by looking at the Circular line by line...
try:
gdata = self.raw.split('\n')
# Header is fixed format and five lines long...
head = gdata[0:5]
for l in head:
lfix = l.replace(' ', '\t', 1)
lparts = lfix.split('\t')
self.data_dict[head_dict[lparts[0]]] = lparts[1].strip()
# Now you need to split the authors from the abstract.
# This should work in *most* cases, maybe not all,
# especially from older (pre-2016) Circulars
self.data_dict['abstract'] = gdata[5:]
self.split_authors_abstract()
# Authors and abstract content should now be defined
# If you want to try and keep fixed formatting
# (e.g. for tables), use '\n' for the join character
abstract_new = ' '.join(self.data_dict['abstract'])
self.data_dict['abstract'] = abstract_new.strip()
# Extract pubdate from the header date
self.make_pubdate()
# Create the bibcode from circular info
self.make_bibcode()
# Make the publication string
self.make_publication()
# Pass the necessary fields through EntityConverter
ec_fields = ['authors', 'abstract', 'title']
econv = EntityConverter()
for ecf in ec_fields:
econv.input_text = self.data_dict[ecf]
econv.convert()
self.data_dict[ecf] = econv.output_text
except Exception as err:
self.data_dict['raw'] = self.raw
self.data_dict['error'] = err
return self.data_dict
|
rtb1c13/scripts
|
IR_lineshapes/lmcurvefit.py
|
Python
|
gpl-2.0
| 5,749
| 0.003131
|
#!/usr/bin/env python
# Author: Richard Bradshaw, R.T.Bradshaw@soton.ac.uk
# Module to fit various curves to provided x/y data
# Current available curves: Linear, Gaussian, Lorentzian, Voigt
# Requirements: lmfit, numpy, matplotlib (as dependencies of lmfit)
from lmfit.models import LinearModel,GaussianModel,LorentzianModel,VoigtModel
class FitError(Exception):
"""Exception in lmfit wrapper"""
class Fit():
"""Class to contain methods for curve fitting from
lmfit package."""
def __init__(self, data):
"""Usage: Fit(data). Initialises data as xs and ys attributes.
Data should be a 2d numpy arra
|
y of x and y values."""
if len(data) != 2:
raise FitError("""Your data is formatted incorrectly -
it should be a 2D array of all x-,
then all y-values""")
|
self.xs = data[0]
self.ys = data[1]
def __str__(self):
"""Prints lmfit fit report for the current object"""
try:
return self.fit.fit_report()
except AttributeError:
return "No fit yet performed for this object."
def linear(self, **kwargs):
"""Linear fit of data. Usage: self.linear([**kwargs])
kwargs are passed to a lmfit LinearModel."""
self._mod = LinearModel(**kwargs)
pars = self._mod.guess(self.ys, self.xs)
self.fit = self._mod.fit(self.ys, pars, x=self.xs)
def gaussian(self, **kwargs):
"""Gaussian fit of data. Usage: self.gaussian([**kwargs])
kwargs are passed to a lmfit GaussianModel."""
self._mod = GaussianModel(**kwargs)
pars = self._mod.guess(self.ys, self.xs)
self.fit = self._mod.fit(self.ys, pars, x=self.xs)
def lorentzian(self, **kwargs):
"""Lorentzian fit of datia. Usage: self.lorentzian([**kwargs])
kwargs are passed to a lmfit LorentzianModel."""
self._mod = LorentzianModel(**kwargs)
pars = self._mod.guess(self.ys, self.xs)
self.fit = self._mod.fit(self.ys, pars, x=self.xs)
def voigt(self, **kwargs):
"""Voigt fit of data. Usage: self.voigt([**kwargs])
kwargs are passed to a lmfit VoigtModel."""
self._mod = VoigtModel(**kwargs)
pars = self._mod.guess(self.ys, self.xs)
self.fit = self._mod.fit(self.ys, pars, x=self.xs)
def plots(self, **kwargs):
"""Returns matplotlib axes with original data, fitted
function & initial model.
Usage: self.plots([**kwargs])
kwargs are passed to lmfit.ModelResult.plot_fit"""
try:
return self.fit.plot_fit(**kwargs)
except AttributeError:
raise FitError("No fit yet performed to plot!")
class Multifit():
"""Composite model from two or more underlying models,
passed as Fit objects defined in lmcurvefit. Models in
Fit objects should have been defined with unique prefixes
otherwise output in the composite model may be confusing/incorrect."""
def __init__(self, *args):
"""Usage: Multifit(model1, [model2, model3, ...])
Reads in individual models to perform a composite fit.
Models should be read in as Fit objects with their
own defined individual models already assigned"""
self._mods = args
try:
self._pars = self._mods[0]._mod.make_params()
except AttributeError:
raise FitError("""Your starting models should be read in as Fit objects
each with a single model already defined.""")
for fit in self._mods[1:]:
self._pars.update(fit._mod.make_params())
def __str__(self):
"""Prints lmfit fit report for the current object"""
try:
return self.total_fit.fit_report()
except AttributeError:
return "No composite fit yet performed for this object."
def init_params(self, prefix='', center=0, sigma=10, amplitude=10):
"""Usage: self.init_params([prefix='', center=0, sigma=10, amplitude=10])
Sets initial guess parameters for the model defined with 'prefix'."""
self._pars[prefix+'center'].set(center)
self._pars[prefix+'sigma'].set(sigma)
self._pars[prefix+'amplitude'].set(amplitude)
def make_mod(self):
"""Usage: self.make_mod()
Makes composite model from all models read in."""
self._compmod = self._mods[0]._mod
for fit in self._mods[1:]:
self._compmod += fit._mod
def do_multifit(self,data):
"""Usage: self.do_multifit(data)
Performs fitting of data to composite model.
Data should be a 2D numpy array of x and y values"""
if len(data) != 2:
raise FitError("""Your data is formatted incorrectly -
it should be a 2D array of all x-,
then all y-values""")
self.xs = data[0]
self.ys = data[1]
try:
self.total_fit = self._compmod.fit(self.ys, self._pars, x=self.xs)
self.composite_fits = self.total_fit.eval_components(x=self.xs)
except AttributeError:
raise FitError("""You don't seem to have a composite model - run
make_mod() first!""")
def plots(self, **kwargs):
"""Returns matplotlib axes with original data, fitted
function & initial model.
Usage: self.plots([**kwargs])
kwargs are passed to lmfit.ModelResult.plot_fit"""
try:
return self.total_fit.plot_fit(**kwargs)
except AttributeError:
raise FitError("No fit yet performed to plot!")
|
cloudbase/neutron-virtualbox
|
neutron/db/migration/migrate_to_ml2.py
|
Python
|
apache-2.0
| 19,607
| 0.000102
|
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This script will migrate the database of an openvswitch or linuxbridge
plugin so that it can be used with the ml2 plugin.
Known Limitations:
- THIS SCRIPT IS DESTRUCTIVE! Make sure to backup your
Neutron database before running this script, in case anything goes
wrong.
- It will be necessary to upgrade the database to the target release
via neutron-db-manage before attempting to migrate to ml2.
Initially, only the icehouse release is supported.
- This script does not automate configuration migration.
Example usage:
python -m neutron.db.migration.migrate_to_ml2 openvswitch \
mysql://login:pass@127.0.0.1/neutron
Note that migration of tunneling state will only be attempted if the
--tunnel-type parameter is provided.
To manually test migration from ovs to ml2 with devstack:
- stack with Q_PLUGIN=openvswitch
- boot an instance and validate connectivity
- stop the neutron service and all agents
- run the neutron-migrate-to-ml2 script
- update /etc/neutron/neutron.conf as follows:
core_plugin = neutron.plugins.ml2.plugin.Ml2Plugin
- Create /etc/neutron/plugins/ml2/ml2_conf.ini and ensure that:
- ml2.mechanism_drivers includes 'openvswitch'
- ovs.local_ip is set correctly
- database.connection is set correctly
- Start the neutron service with the ml2 config file created in
the previous step in place of the openvswitch config file
- Start all the agents
- verify that the booted instance still has connectivity
- boot a second instance and validate connectivity
"""
import argparse
from oslo_db.sqlalchemy import session
import sqlalchemy as sa
from neutron.extensions import portbindings
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2.drivers import type_vxlan
# Migration targets
LINUXBRIDGE = 'linuxbridge'
OPENVSWITCH = 'openvswitch'
# Releases
ICEHOUSE = 'icehouse'
JUNO = 'juno'
SUPPORTED_SCHEMA_VERSIONS = [ICEHOUSE, JUNO]
def check_db_schema_version(engine, metadata):
"""Check that current version of the db schema is supported."""
version_table = sa.Table(
'alembic_version', metadata, autoload=True, autoload_with=engine)
versions = [v[0] for v in engine.execute(version_table.select())]
if not versions:
raise ValueError(_("Missing version in alembic_versions table"))
elif len(versions) > 1:
raise ValueError(_("Multiple versions in alembic_versions table: %s")
% versions)
current_version = versions[0]
if current_version not in SUPPORTED_SCHEMA_VERSIONS:
raise SystemError(_("Unsupported database schema %(current)s. "
"Please migrate your database to one of following "
"versions: %(supported)s")
% {'current': current_version,
'supported': ', '.join(SUPPORTED_SCHEMA_VERSIONS)}
)
# Duplicated from neutron.plugins.linuxbridge.common.constants to
# avoid having any dependency on the linuxbridge plugin being
# installed.
def interpret_vlan_id(vlan_id):
"""Return (network_type, segmentation_id) tuple for encoded vlan_id."""
FLAT_VLAN_ID = -1
LOCAL_VLAN_ID = -2
if vlan_id == LOCAL_VLAN_ID:
return (p_const.TYPE_LOCAL, None)
elif vlan_id == FLAT_VLAN_ID:
return (p_const.TYPE_FLAT, None)
else:
return (p_const.TYPE_VLAN, vlan_id)
class BaseMigrateToMl2(object):
def __init__(self, vif_type, driver_type, segment_table_name,
vlan_allocation_table_name, old_tables):
self.vif_type = vif_type
self.driver_type = driver_type
self.segment_table_name = segment_table_name
self.vlan_allocation_table_name = vlan_allocation_table_name
self.old_tables = old_tables
def __call__(self, connection_url, save_tables=False, tunnel_type=None,
vxlan_udp_port=None):
engine = session.create_engine(connection_url)
metadata = sa.MetaData()
check_db_schema_version(engine, metadata)
if hasattr(self, 'define_ml2_tables'):
self.define_ml2_tables(metadata)
# Autoload the ports table to ensure that foreign keys to it and
# the network table can be created for the new tables.
sa.Table('ports', metadata, autoload=True, autoload_with=engine)
metadata.create_all(engine)
self.migrate_network_segments(engine, metadata)
if tunnel_type:
self.migrate_tunnels(engine, tunnel_type, vxlan_udp_port)
self.migrate_vlan_allocations(engine)
self.migrate_port_bindings(engine, metadata)
if hasattr(self, 'drop_old_tables'):
self.drop_old_tables(engine, save_tables)
def migrate_segment_dict(self, binding):
binding['id'] = uuidutils.generate_uuid()
def migrate_network_segments(self, engine, metadata):
# Migrating network segments requires loading the data to python
# so that a uuid can be generated for each segment.
source_table = sa.Table(self.segment_table_name, metadata,
autoload=True, autoload_with=engine)
source_segments = engine.execute(source_table.select())
ml2_segments = [dict(x) for x in source_segments]
for segment in ml2_segments:
self.migrate_segment_dict(segment)
if ml2_segments:
ml2_network_segmen
|
ts = metadata.tables['ml2_network_segments']
engine.execute(ml2_network_segments.insert(), ml2_segments)
def migrate_tunnels(self, engine, tunnel_type, vxlan_udp_port=None):
"""Override this method to perform plugin-
|
specific tunnel migration."""
pass
def migrate_vlan_allocations(self, engine):
engine.execute(("""
INSERT INTO ml2_vlan_allocations
SELECT physical_network, vlan_id, allocated
FROM %(source_table)s
WHERE allocated = TRUE
""") % {'source_table': self.vlan_allocation_table_name})
def get_port_segment_map(self, engine):
"""Retrieve a mapping of port id to segment id.
The monolithic plugins only support a single segment per
network, so the segment id can be uniquely identified by
the network associated with a given port.
"""
port_segments = engine.execute("""
SELECT ports_network.port_id, ml2_network_segments.id AS segment_id
FROM ml2_network_segments, (
SELECT portbindingports.port_id, ports.network_id
FROM portbindingports, ports
WHERE portbindingports.port_id = ports.id
) AS ports_network
WHERE ml2_network_segments.network_id = ports_network.network_id
""")
return dict(x for x in port_segments)
def migrate_port_bindings(self, engine, metadata):
port_segment_map = self.get_port_segment_map(engine)
port_binding_ports = sa.Table('portbindingports', metadata,
autoload=True, autoload_with=engine)
source_bindings = engine.execute(port_binding_ports.select())
ml2_bindings = [dict(x) for x in source_bindings]
for binding in ml2_bindings:
binding['vif_type'] = self.vif_type
binding['driver'] = self.driver_type
segment = port_segment_map.get(binding['port_id'])
if segment:
bi
|
904labs/ctTrakr
|
nlp/simple.py
|
Python
|
mit
| 764
| 0.024869
|
import nltk.data
from nltk.tokenize import word_tokenize, sent_tokenize
from util import errors, cleaning
def tokenize(**kwargs):
"""Tokenize text using nltk's tokenizer."""
if 'text' in kwargs.keys():
return word_tokenize(kwargs['text'])
raise errors.CustomAPIError('No text argument found.', status_code=400, payload={'arguments':kwargs.keys()})
|
def sentence_split(**kwargs):
"""Split sentences using nltk."""
tokenizer = nltk.data.load('tokenizers/punkt/dutch.pickle')
if 'text' in kwargs.keys():
cleaner = cleaning.Clean()
cleaner.feed(kwargs['text'])
cleanedText = cleaner.get_data()
return tokenizer.tokenize(cleanedText)
raise errors.CustomAPIError('No text argument found.', status_co
|
de=400, payload={'arguments':kwargs.keys()})
|
abramhindle/UnnaturalCodeFork
|
python/testdata/launchpad/lib/lp/code/interfaces/tests/test_branch.py
|
Python
|
agpl-3.0
| 2,218
| 0
|
# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests of the branch interface.""
|
"
__metaclass__ = type
from bzrlib.branch import format_registry as branch_format_registry
from bzrlib.bzrdir import BzrProber
from bzrlib.repository import format_registry as repo_format_registry
from lp.code.bzr import (
BranchFormat,
ControlFormat,
RepositoryFormat,
)
import lp.codehosting # For plugins.
from lp.testing import TestCase
class TestFormatSupport(TestCase):
"""En
|
sure the launchpad format list is up-to-date.
While ideally we would ensure that the lists of markers were the same,
early branch and repo formats did not use markers. (The branch/repo
was implied by the control dir format.)
"""
def test_control_format_complement(self):
self.bzrlib_is_subset(BzrProber.formats.keys(), ControlFormat)
def test_branch_format_complement(self):
self.bzrlib_is_subset(branch_format_registry.keys(), BranchFormat)
def test_repository_format_complement(self):
self.bzrlib_is_subset(repo_format_registry.keys(), RepositoryFormat)
def bzrlib_is_subset(self, bzrlib_formats, launchpad_enum):
"""Ensure the bzr format marker list is a subset of launchpad."""
bzrlib_format_strings = set(bzrlib_formats)
launchpad_format_strings = set(format.title for format
in launchpad_enum.items)
self.assertEqual(
set(), bzrlib_format_strings.difference(launchpad_format_strings))
def test_repositoryDescriptions(self):
self.checkDescriptions(RepositoryFormat)
def test_branchDescriptions(self):
self.checkDescriptions(BranchFormat)
def test_controlDescriptions(self):
self.checkDescriptions(ControlFormat)
def checkDescriptions(self, format_enums):
for item in format_enums.items:
description = item.description
if description.endswith('\n'):
description = description[:-1]
self.assertTrue(len(description.split('\n')) == 1,
item.description)
|
lbybee/Python-for-Econ
|
Chap_4/scraper_example.py
|
Python
|
gpl-2.0
| 1,393
| 0
|
# This script gives an example of how to scrape a webpage
import requests
from BeautifulSoup import BeautifulSoup
url = "http://chicagofoodtruckfinder.com/weekly-schedule"
truck_data_list = []
soup = BeautifulSoup(requests.post(url).text)
table = soup.find("table").findAll("tr")
days = [d.text for d in table[0].findAll("th")[1:]]
print days
# skip the first row because it is a header
for location in table[1:]:
cols = location.findAll("td")
loc_name = cols[0].text
for i, c in enumerate(cols[1:]):
print i
print c
trucks = c.findAll("img")
if trucks is not None:
|
for t in trucks:
time_name = t["title"]
am_spt = time_name.split("AM")
pm_spt = time_name.split("PM")
if len(pm_spt) > 1 and len(am_spt) > 1:
name = pm_spt[1]
if len(pm_spt) > 1 and len(am_spt) == 1:
name = pm_spt[2]
i
|
f len(pm_spt) == 1 and len(am_spt) > 1:
name = am_spt[2]
time = time_name.replace(name, "")
truck_data_list.append({"name": name,
"time": time,
"week_day": days[i],
"date": date,
"location": loc_name})
|
sostenibilidad-unam/posgrado
|
posgradmin/posgradmin/migrations/0035_auto_20190620_1343.py
|
Python
|
gpl-3.0
| 1,049
| 0.001907
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-06-20 18:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations
|
.Migration):
dependencies = [
('posgradmin', '0034_auto_20190620_1333'),
]
operations = [
migrations.RemoveField(
model_name='asignatura',
name='clave',
),
migrations.AddField(
model_name='curso',
name='clave',
field=models.CharField(blank=True, max_length=20, null
|
=True),
),
migrations.AddField(
model_name='curso',
name='entidad',
field=models.CharField(blank=True, choices=[(3, 3), (700, 700), (800, 800)], max_length=20, null=True),
),
migrations.AlterField(
model_name='curso',
name='sede',
field=models.CharField(blank=True, choices=[(b'CDMX', b'CDMX'), (b'Morelia', b'Morelia'), ('Le\xf3n', 'Le\xf3n')], max_length=80, null=True),
),
]
|
tivaliy/empire-of-code
|
find_sequence.py
|
Python
|
gpl-2.0
| 1,551
| 0.000645
|
__author__ = 'Vitalii K'
from itertools import groupby
SEQ_LENGTH = 4
def is_in_matrix(m):
len_list = [[len(list(group)) for key, group in groupby(j)] for j in m]
if any(map(lambda x: [i for i in x if i >= SEQ_LENGTH], len_list)):
return True
return False
def get_diagonals(m):
d = []
for o in range(-len(m) + SEQ_LENGTH, len(m) - SEQ_LENGTH + 1):
d.append([r[i + o] for i, r in enumerate(m) if 0 <= i + o < len(r)])
return d
def has_sequence(matrix):
if is_in_matrix(matrix):
return True
if is_in_matrix(map(lambda *row: list(row), *matrix)):
return True
if is_in_matrix(get_diagonals(matrix)):
return True
if is_in_matrix(get_diagonals(list(reversed(matrix)))):
return True
return False
if __name__ == '__main__':
# These "asserts" using only for self-checking and not necessary for auto-testing
assert has_sequence([
[1, 2, 1, 1],
[1, 1, 4, 1],
[1, 3, 1, 6],
[1, 7, 2, 5]
]), "Vertical"
assert not has_sequence([
[7, 1, 4, 1],
[1, 2, 5, 2],
[3, 4, 1, 3],
[1, 1, 8, 1]
]), "Nothing here"
assert has_sequence([
[2, 1, 1, 6, 1],
[1, 3, 2, 1, 1],
[4, 1, 1, 3, 1],
[5, 5, 5, 5, 5],
[1, 1, 3, 1, 1]
]), "Long Horizontal"
assert has_sequence([
[7, 1, 1, 8, 1, 1],
[1, 1, 7, 3, 1, 5],
|
[2, 3, 1, 2, 5, 1],
[1, 1, 1, 5, 1, 4],
[4, 6, 5, 1, 3, 1],
[1, 1, 9, 1, 2, 1]
]), "Diagonal"
print("All set? Click 'Check' to review your code and earn
|
rewards!")
|
igudym/twango
|
twango/template/default/src/apps/twango_dashboard/admin.py
|
Python
|
bsd-3-clause
| 83
| 0.012048
|
from django.contrib import ad
|
min
fro
|
m models import *
admin.site.register(Section)
|
rgayon/plaso
|
tests/parsers/presets.py
|
Python
|
apache-2.0
| 4,926
| 0.003045
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for parser and parser plugin presets."""
from __future__ import unicode_literals
import unittest
from plaso.containers import artifacts
from plaso.parsers import presets
from tests import test_lib as shared_test_lib
class ParserPresetTest(shared_test_lib.BaseTestCase):
"""Tests for the parser and parser plugin preset."""
def testInitialize(self):
"""Tests the __init__ function."""
test_definition = presets.ParserPreset('test', ['parser1', 'parser2'])
self.assertIsNotNone(test_definition)
class ParserPresetsManagerTest(shared_test_lib.BaseTestCase):
"""Tests for the parser and parser plugin presets manager."""
_LINUX_PARSERS = [
'bash_history',
'bencode',
'czip/oxml',
'dockerjson',
'dpkg',
'filestat',
'gdrive_synclog',
'olecf',
'pls_recall',
'popularity_contest',
'selinux',
'sqlite/google_drive',
'sqlite/skype',
'sqlite/zeitgeist',
'syslog',
'systemd_journal',
'utmp',
'vsftpd',
'webhist',
'xchatlog',
'xchatscrollback',
'zsh_extended_history']
_MACOS_PARSERS = [
'asl_log',
'bash_history',
'bencode',
'bsm_log',
'cups_ipp',
'czip/oxml',
'filestat',
'fseventsd',
'gdrive_synclog',
'mac_appfirewall_log',
'mac_keychain',
'mac_securityd',
'macwifi',
'olecf',
'plist',
'sqlite/appusage',
'sqlite/google_drive',
'sqlite/imessage',
'sqlite/ls_quarantine',
'sqlite/mac_document_versions',
'sqlite/mackeeper_cache',
'sqlite/skype',
'syslog',
'utmpx',
'webhist',
'zsh_extended_history']
# TODO add tests for _ReadPresetDefinitionValues
# TODO add tests for _ReadPresetsFromFileObject
def testGetNames(self):
"""Tests the GetNames function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
test_names = list(test_manager.GetNames())
self.assertEqual(len(test_names), 7)
expected_names = sorted([
'android', 'linux', 'macos', 'webhist', 'win7', 'win_gen', 'winxp'])
self.assertEqual(test_names, expected_names)
def testGetParsersByPreset(self):
"""Tests the GetParsersByPreset function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
parser_names = test_manager.GetParsersByPreset('linux')
self.assertEqual(parser_names, self._LINUX_PARSERS)
with self.assertRaises(KeyError):
test_manager.GetParsersByPreset('bogus')
def testGetPresetByName(sel
|
f):
"""Tests the GetPresetByName function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
test_preset = test_manager.GetPresetByName('linux')
self.assertIsNotNone
|
(test_preset)
self.assertEqual(test_preset.name, 'linux')
self.assertEqual(test_preset.parsers, self._LINUX_PARSERS)
test_preset = test_manager.GetPresetByName('bogus')
self.assertIsNone(test_preset)
def testGetPresetsByOperatingSystem(self):
"""Tests the GetPresetsByOperatingSystem function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
operating_system = artifacts.OperatingSystemArtifact(family='MacOS')
test_presets = test_manager.GetPresetsByOperatingSystem(operating_system)
self.assertEqual(len(test_presets), 1)
self.assertEqual(test_presets[0].name, 'macos')
self.assertEqual(test_presets[0].parsers, self._MACOS_PARSERS)
operating_system = artifacts.OperatingSystemArtifact(family='bogus')
test_presets = test_manager.GetPresetsByOperatingSystem(operating_system)
self.assertEqual(len(test_presets), 0)
def testGetPresetsInformation(self):
"""Tests the GetPresetsInformation function."""
test_file_path = self._GetTestFilePath(['presets.yaml'])
self._SkipIfPathNotExists(test_file_path)
test_manager = presets.ParserPresetsManager()
test_manager.ReadFromFile(test_file_path)
parser_presets_information = test_manager.GetPresetsInformation()
self.assertGreaterEqual(len(parser_presets_information), 1)
available_parser_names = [name for name, _ in parser_presets_information]
self.assertIn('linux', available_parser_names)
# TODO add tests for ReadFromFile
if __name__ == '__main__':
unittest.main()
|
rodo/cotetra
|
cotetra/survey/api.py
|
Python
|
agpl-3.0
| 1,782
| 0
|
# -*- coding: utf-8 -*- pylint: disable-msg=R0801
#
# Copyright (c) 2013 Rodolphe Quiédeville <rodolphe@quiedeville.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
API definition
"""
from tastypie import fields
from tastypie.resources import ModelResource
from tastypie.throttle import BaseThrottle
from cotetra.survey.models import Journey, Connection
from cotetra.network.api import StationResource
class JourneyResource(ModelResource):
"""
The journeys
"""
station_from = fields.ForeignKey(StationResource, 'station_from')
station_to = fields.ForeignK
|
ey(StationResource, 'station_to')
class Meta:
queryset = Journey.objects.all()
resource_name = 'journey'
throttle = BaseThrottle(throttle_at=100, timeframe=60)
class ConnectionResource(ModelResource):
"""
The connections
"""
station_from = fields.ForeignKey(StationResource, 'station_from')
station_to = fields.ForeignKey(StationResource, 'station_to')
class Me
|
ta:
queryset = Connection.objects.all()
resource_name = 'connection'
throttle = BaseThrottle(throttle_at=100, timeframe=60)
|
ZachOhara/OCSTA-Programming-Contest-2015
|
python/TestPrintLines.py
|
Python
|
gpl-3.0
| 134
| 0.014925
|
lines = int(inpu
|
t("How many lines of text? "))
lineText = input("What is the line of text? ")
for i in range(lines):
print(lineText
|
)
|
Queens-Applied-Sustainability/PyRTM
|
rtm/test/test_cache.py
|
Python
|
gpl-3.0
| 2,536
| 0.001577
|
"""
Copyright (c) 2012 Philip Schliehauf (uniphil@gmail.com) and the
Queen's University Applied Sustainability Centre
This project is hosted on github; for up-to-date code and contacts:
https://github.com/Queens-Applied-Sustainability/PyRTM
This file is part of PyRTM.
PyRTM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PyRTM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PyRTM. If not, see <http://www.gnu.org/licenses/>.
"""
# import unittest
# import shutil
# import time
# from datetime import datetime
# from .. import cache
# class TestVarsToFile(unittest.TestCase):
# def assertClean(self, inp, res):
# clean = cache.vars_to_file(inp)
# self.assertEqual(clean, res)
# def testOneChar(self):
# self.assertClean(['a'], 'a')
# def testOneString(self):
# self.assertClean(['hello'], 'hello')
# def testOtherType(self):
# self.assertClean([1], '1')
# def testStringJoin(self):
# self.assertClean(['a', 'b'], 'a-b')
# def testCharReplace(self):
# some_illegals = ' !@#$%^&*()+=<>?;"\'[]{}~`'
# for illegal in some_illegals:
# dirty = illegal.join(['a', 'b'])
# self.assertClean([dirty], 'a.b')
# def testGeneratorIn(self):
# self.assertClean((str(i) for i in xrange(2)), '0-1')
# class TestGet(unittest.TestCase):
# def setUp(self):
# self.expen
|
sive_fn =
|
lambda c: 1
# self.config = {
# 'description': 'test',
# 'longitude': -75.3,
# 'latitude': 44.22,
# 'time': datetime(2012, 1, 1, 0, 0, 0)
# }
# self.cachedconfig = {
# 'description': 'cachedtest',
# 'longitude': -75.3,
# 'latitude': 44.22,
# 'time': datetime(2012, 1, 1, 0, 0, 0)
# }
# cache.get(self.expensive_fn, self.cachedconfig)
# def testFunc(self):
# result = cache.get(self.expensive_fn, self.config)
# self.assertEqual(result, (1, False))
# def testCached(self):
# result = cache.get(self.expensive_fn, self.cachedconfig)
# self.assertEqual(result, (1, True))
# def tearDown(self):
# shutil.rmtree(cache.CACHE_DIR)
# if __name__ == '__main__':
# unittest.main()
|
gouthambs/Flask-Blogging
|
test/utils.py
|
Python
|
mit
| 948
| 0
|
# http://stackoverflow.com/questions/1477294/generate-random-utf-8-string-in-python
import random
def get_random_unicode(length):
try:
get_char = unichr
except NameError:
|
get_char = chr
# Update this to include code point ranges to be sampled
include_ranges = [
(
|
0x0021, 0x0021),
(0x0023, 0x0026),
(0x0028, 0x007E),
(0x00A1, 0x00AC),
(0x00AE, 0x00FF),
(0x0100, 0x017F),
(0x0180, 0x024F),
(0x2C60, 0x2C7F),
(0x16A0, 0x16F0),
(0x0370, 0x0377),
(0x037A, 0x037E),
(0x0384, 0x038A),
(0x038C, 0x038C),
]
alphabet = [
get_char(code_point) for current_range in include_ranges
for code_point in range(current_range[0], current_range[1] + 1)
]
return ''.join(random.choice(alphabet) for i in range(length))
|
donatello/minio-py
|
tests/unit/minio_test.py
|
Python
|
apache-2.0
| 3,851
| 0.000519
|
# -*- coding: utf-8 -*-
# Minio Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 Minio, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from nose.tools import raises, eq_
from minio import Minio
from minio.api import _DEFAULT_USER_AGENT
from minio.error import InvalidEndpointError, InvalidBucketError
from minio.helpers import get_target_url, is_valid_bucket_name
class ValidBucketName(TestCase):
@raises(InvalidBucketError)
def test_bucket_name(self):
is_valid_bucket_name('bucketName')
@raises(InvalidBucketError)
def test_bucket_name_invalid_characters(self):
is_valid_bucket_name('$$$bcuket')
@raises(InvalidBucketError)
def test_bucket_name_length(self):
is_valid_bucket_name('dd')
@raises(InvalidBucketError)
def test_bucket_name_periods(self):
is_valid_bucket_name('dd..mybucket')
@raises(InvalidBucketError)
def test_bucket_name_begins_period(self):
is_valid_bucket_name('.ddmybucket')
class GetURLTests(TestCase):
def test_get_target_url_works(self):
url = 'http://localhost:9000'
eq_(get_target_url(url, 'bucket-name'),
'http://localhost:9000/bucket-name/')
eq_(get_target_url(url, 'bucket-name', 'objectName'),
'http://localhost:9000/bucket-name/objectName')
eq_(get_target_url(url, 'bucket-name', 'objectName', None),
'http://localhost:9000/bucket-name/objectName')
eq_(get_target_url(url, 'bucket-name', 'objectName', 'us-east-1',
{'foo': 'bar'}),
'http://localhost:9000/bucket-name/objectName?foo=bar')
eq_(get_target_url(url, 'bucket-name', 'objectName', 'us-east-1',
{'foo': 'bar',
'b': 'c',
'a': 'b'}),
'http://localhost:9000/bucket-name/objectName?a=b&b=c&foo=bar')
# S3 urls.
s3_url = 'https://s3.amazonaws.com'
eq_(get_target_url(s3_url), 'https://s3.amazonaws.com/')
eq_(get_target_url(s3_url, 'my.bucket.name'),
'https://s3.amazonaws.com/my.bucket.name/')
eq_(get_target_url(s3_url,
'bucket-name',
'objectName',
'us-west-2', None),
|
'https://bucket-name.s3-us-west-2.amazonaws.com/objectName')
@raises(TypeError)
def test_minio_requires_string(self):
Minio(10)
@raises(InvalidEndpointError)
def test_minio_requires_hostname(self):
Minio('http://
|
')
class UserAgentTests(TestCase):
def test_default_user_agent(self):
client = Minio('localhost')
eq_(client._user_agent, _DEFAULT_USER_AGENT)
def test_set_app_info(self):
client = Minio('localhost')
expected_user_agent = _DEFAULT_USER_AGENT + ' hello/2.0.6'
client.set_app_info('hello', '2.0.6')
eq_(client._user_agent, expected_user_agent)
@raises(ValueError)
def test_set_app_info_requires_non_empty_name(self):
client = Minio('localhost:9000')
client.set_app_info('', '2.0.6')
@raises(ValueError)
def test_set_app_info_requires_non_empty_version(self):
client = Minio('localhost:9000')
client.set_app_info('hello', '')
|
hal0x2328/neo-python
|
neo/Core/State/AssetState.py
|
Python
|
mit
| 6,477
| 0.000926
|
from .StateBase import StateBase
from neo.Core.Fixed8 import Fixed8
from neo.Core.IO.BinaryReader import BinaryReader
from neo.IO.MemoryStream import StreamManager
from neo.Core.AssetType import AssetType
from neo.Core.UInt160 import UInt160
from neo.Core.Cryptography.Crypto import Crypto
from neo.Core.Cryptography.ECCurve import EllipticCurve, ECDSA
from neo.Core.Size import Size as s
from neo.Core.Size import GetVarSize
class AssetState(StateBase):
def Size(self):
return super(AssetState, self).Size() + s.uint256 + s.uint8 + GetVarSize(
self.Name) + self.Amount.Size() + self.Available.Size() + s.uint8 + s.uint8 + self.Fee.Size() + s.uint160 + self.Owner.Size() + s.uint160 + s.uint160 + s.uint32 + s.uint8
def __init__(self, asset_id=None, asset_type=None, name=None, amount=None, available=None,
precision=0, fee_mode=0, fee=None, fee_addr=None, owner=None,
admin=None, issuer=None, expiration=None, is_frozen=False):
"""
Create an instance.
Args:
asset_id (UInt256):
asset_type (neo.Core.AssetType):
name (str): the asset name.
amount (Fixed8):
available (Fixed8):
precision (int): number of decimals the asset has.
fee_mode (int):
fee (Fixed8):
fee_addr (UInt160): where the fee will be send to.
owner (EllipticCurve.ECPoint):
admin (UInt160): the administrator of the asset.
issuer (UInt160): the issuer of the asset.
expiration (UInt32): the block number on which the asset expires.
is_frozen (bool):
"""
self.AssetId = asset_id
self.AssetType = asset_type
self.Name = name
self.Amount = Fixed8(0) if amount is None else amount
self.Available = Fixed8(0) if available is None else available
self.Precision = precision
self.FeeMode = fee_mode
self.Fee = Fixed8(0) if fee is None else fee
self.FeeAddress = UInt160(data=bytearray(20)) if fee_addr is None else fee_addr
if owner is not None and type(owner) is not EllipticCurve.ECPoint:
raise Exception("Owner must be ECPoint Instance")
self.Owner = owner
self.Admin = admin
self.Issuer = issuer
self.Expiration = expiration
self.IsFrozen = is_frozen
# def Size(self):
# return super(AssetState, self).Size()
@staticmethod
def DeserializeFromDB(buffer):
"""
Deserialize full object.
Args:
buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from.
Returns:
AssetState:
"""
m = StreamManager.GetStream(buffer)
reader = BinaryReader(m)
account = AssetState()
account.Deserialize(reader)
StreamManager.ReleaseStream(m)
return account
def Deserialize(self, reader):
"""
Deserialize full object.
Args:
reader (neo.Core.IO.BinaryReader):
"""
super(AssetState, self).Deserialize(reader)
self.AssetId = reader.ReadUInt256()
self.AssetType = ord(reader.ReadByte())
self.Name = reader.ReadVarString()
position = reader.stream.tell()
try:
self.Amount = reader.ReadFixed8()
except Exception:
reader.stream.seek(position)
self.Amount = reader.ReadFixed8()
self.Available = reader.ReadFixed8()
self.Precision = ord(reader.ReadByte())
# fee mode
reader.ReadByte()
self.Fee = reader.ReadFixed8()
self.FeeAddress = reader.ReadUInt160()
self.Owner = ECDSA.Deserialize_Secp256r1(reader)
self.Admin = reader.ReadUInt160()
self.Issuer = reader.ReadUInt160()
self.Expiration = reader.ReadUInt32()
self.IsFrozen = reader.ReadBool()
def Serialize(self, writer):
"""
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
"""
super(AssetState, self).Serialize(writer)
writer.WriteUInt256(self.AssetId)
writer.WriteByte(self.AssetType)
writer.WriteVarString(self.Name)
if self.Amount.value > -1:
writer.WriteFixed8(self.Amount, unsigned=True)
else:
writer.WriteFixed8(self.Amount)
if type(self.Available) is not Fixed8:
raise Exception("AVAILABLE IS NOT FIXED 8!")
writer.WriteFixed8(self.Available, unsigned=True)
writer.WriteByte(self.Precision)
writer.WriteByte(b'\x00')
writer.WriteFixed8(self.Fe
|
e)
writer.WriteUInt160(self.FeeAddress)
self.Owner.Serialize(writer)
writer.WriteUInt160(self.Admin)
writer.WriteUInt160(self.Issuer)
writer.WriteUInt32(self.Expiration)
writer.WriteBool(self.IsFrozen)
def GetName(self):
"""
Get the asset name based on its type.
Returns:
str: 'NEO' or 'NEOGas'
"""
if self.AssetTy
|
pe == AssetType.GoverningToken:
return "NEO"
elif self.AssetType == AssetType.UtilityToken:
return "NEOGas"
if type(self.Name) is bytes:
return self.Name.decode('utf-8')
return self.Name
def ToJson(self):
"""
Convert object members to a dictionary that can be parsed as JSON.
Returns:
dict:
"""
return {
'assetId': self.AssetId.To0xString(),
'assetType': self.AssetType,
'name': self.GetName(),
'amount': self.Amount.value,
'available': self.Available.value,
'precision': self.Precision,
'fee': self.Fee.value,
'address': self.FeeAddress.ToString(),
'owner': self.Owner.ToString(),
'admin': Crypto.ToAddress(self.Admin),
'issuer': Crypto.ToAddress(self.Issuer),
'expiration': self.Expiration,
'is_frozen': self.IsFrozen
}
def Clone(self):
return AssetState(asset_id=self.AssetId, asset_type=self.AssetType, name=self.Name, amount=self.Amount, available=self.Available, precision=self.Precision, fee=self.Fee, fee_addr=self.FeeAddress, owner=self.Owner, admin=self.Admin, issuer=self.Issuer, expiration=self.Expiration, is_frozen=self.IsFrozen)
|
quantumlib/OpenFermion
|
src/openfermion/measurements/qubit_partitioning.py
|
Python
|
apache-2.0
| 9,586
| 0.000104
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Code to generate Pauli strings for measurement of local operators"""
from itertools import chain, zip_longest
import numpy
from openfermion.ops.operators import QubitOperator
def binary_partition_iterator(qubit_list, num_iterations=None):
"""Generator for a list of 2-partitions of N qubits
such that all pairs of qubits are split in at least one partition,
This follows a variation on ArXiv:1908.0562 - instead of
explicitly partitioning the list based on the binary indices of
the qubits, we repeatedly divide the list in two and then
zip it back together.
Args:
qubit_list(list): list of qubits to be partitioned
num_iterations(int or None): number of iterations to perform.
If None, will be set to ceil(log2(len(qubit_list)))
Returns:
partition(iterator of tuples of lists): the required partitioning
"""
# Some edge cases
if num_iterations is not None and num_iterations == 0:
return
num_qubits = len(qubit_list)
if num_qubits < 2:
raise ValueError('Need at least 2 qubits to partition')
if num_qubits == 2:
yield ([qubit_list[0]], [qubit_list[1]])
return
if num_iterations is None:
num_iterations = int(numpy.ceil(numpy.log2(num_qubits)))
# Calculate the point where we need to split the list each time.
half_point = int(numpy.ceil(num_qubits / 2))
# Repeat the division and zip steps as many times
# as required.
for _ in range(num_iterations):
# Divide the qubit list in two and return it
partition = (qubit_list[:half_point], qubit_list[half_point:])
yield partition
# Zip the partition together to remake the qubit list.
qubit_list = list(chain(*zip_longest(partition[0], partition[1])))
# If len(qubit_list) is odd, the end of the list will be 'None'
# which we delete.
if qubit_list[-1] is None:
del qubit_list[-1]
def partition_iterator(qubit_list, partition_size, num_iterations=None):
"""Generator for a list of k-partitions of N qubits such that
all sets of k qubits are perfectly split in at least one
partition, following ArXiv:1908.05628
Args:
qubit_list(list): list of qubits to be partitioned
partition_size(int): the number of sets in the partition.
num_iterations(int or None): the number of iterations in the
outer iterator. If None, set to ceil(log2(len(qubit_list)))
Returns:
partition(iterator of tuples of lists): the required partitioning
"""
# Some edge cases
if num_iterations == 0:
return
if partition_size == 1:
yield (qubit_list,)
return
elif partition_size == 2:
for p in binary_partition_iterator(qubit_list, num_iterations):
yield p
return
num_qubits = len(qubit_list)
if partition_size == num_qubits:
yield tuple([q] for q in qubit_list)
return
elif partition_size > num_qubits:
raise ValueError('I cant k-partition less than k qubits')
if num_iterations is None:
num_iterations = int(numpy.ceil(numpy.log2(num_qubits)))
# First iterate over the outer binary partition
outer_iterator = binary_partition_iterator(qubit_list,
num_iterations=num_iterations)
for set1, set2 in outer_iterator:
# Each new partition needs to be subdivided fewer times
# to prevent an additional k! factor in the scaling.
num_iterations -= 1
# Iterate over all possibilities of partitioning the first
# set into l parts and the second set into k - l parts.
for inner_partition_size in range(1, partition_size):
if inner_partition_size > len(set1) or\
partition_size - inner_partition_size > len(set2):
continue
# subdivide the first partition
inner_iterator1 = partition_iterator(set1, inner_partition_size,
num_iterations)
for inner_partition1 in inner_iterator1:
# subdivide the second partition
inner_iterator2 = partition_iterator(
set2, partition_size - inner_partition_size, num_iterations)
for inner_partition2 in inner_iterator2:
yield inner_partition1 + inner_partition2
def pauli_string_iterator(num_qubits, max_word_size=2):
"""Generates a set of Pauli strings such that each word
of k Pauli
|
operators lies in at least one string.
Args:
num_qubits(int): number of qubits in string
max_word_size(int): maximum required word
Returns:
pauli_string(iterator of strings): iterator
over Pauli strings
"""
if max_word_si
|
ze > num_qubits:
raise ValueError('Number of qubits is too few')
if max_word_size <= 0:
raise ValueError('Word size too small')
qubit_list = list(range(num_qubits))
partitions = partition_iterator(qubit_list, max_word_size)
pauli_string = ['I' for temp in range(num_qubits)]
pauli_letters = ['X', 'Y', 'Z']
for partition in partitions:
for lettering in range(3**max_word_size):
for p in partition:
letter = pauli_letters[lettering % 3]
for qubit in p:
pauli_string[qubit] = letter
lettering = lettering // 3
yield tuple(pauli_string)
def _find_compatible_basis(term, bases):
for basis in bases:
basis_qubits = {op[0] for op in basis}
conflicts = ((i, P)
for (i, P) in term
if i in basis_qubits and (i, P) not in basis)
if any(conflicts):
continue
return basis
return None
def group_into_tensor_product_basis_sets(operator, seed=None):
"""
Split an operator (instance of QubitOperator) into `sub-operator`
QubitOperators, where each sub-operator has terms that are diagonal
in the same tensor product basis.
Each `sub-operator` can be measured using the same qubit post-rotations
in expectation estimation. Grouping into these tensor product basis
sets has been found to improve the efficiency of expectation estimation
significantly for some Hamiltonians in the context of
VQE (see section V(A) in the supplementary material of
https://arxiv.org/pdf/1704.05018v2.pdf). The more general problem
of grouping operators into commutitative groups is discussed in
section IV (B2) of https://arxiv.org/pdf/1509.04279v1.pdf. The
original input operator is the union of all output sub-operators,
and all sub-operators are disjoint (do not share any terms).
Args:
operator (QubitOperator): the operator that will be split into
sub-operators (tensor product basis sets).
seed (int): default None. Random seed used to initialize the
numpy.RandomState pseudo-random number generator.
Returns:
sub_operators (dict): a dictionary where each key defines a
tensor product basis, and each corresponding value is a
QubitOperator with terms that are all diagonal in
that basis.
**key** (tuple of tuples): Each key is a term, which defines
a tensor product basis. A term is a product of individual
factors; each factor is represented by a tuple of the form
(`index`, `action`), and these tuples are collected into a
|
otsaloma/poor-maps
|
guides/foursquare.py
|
Python
|
gpl-3.0
| 4,437
| 0.001129
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Listing nearby places using Foursquare.
http://developer.foursquare.com/docs/venues/explore
"""
import copy
import html
import itertools
import poor
import urllib.parse
CONF_DEFAULTS = {"sort_by_distance": False}
CLIENT_ID = "BP3KCWJXGQDXWVMYSVLWWRITMVZTG5XANJ43D2ZD0D5JMKCX"
URL = ("https://api.foursquare.com/v2/venues/explore"
"?client_id={CLIENT_ID}"
"&client_secret=JTINTTCK4S5V4RTZ40IJB0GIKDX1XT0LJVNRH2EZXNVLNZ2T"
"&v=20140912"
"&m=foursquare"
"&query={query}"
"&ll={y:.5f},{x:.5f}"
"&limit=50"
"&radius={radius:.0f}"
"&sortByDistance={sort_by_distance}")
cache = {}
def nearby(query, near, radius, params):
"""Return X, Y and a list of dictionaries of places matching `query`."""
query = urllib.parse.quote_plus(query)
sort_by_distance = str(int(poor.conf.guides.foursquare.sort_by_distance))
x, y = prepare_point(near)
url = URL.format(CLIENT_ID=CLIENT_ID, **locals())
with poor.util.silent(KeyError):
return copy.deepcopy(cache[url])
results = poor.http.get_json(url)
results = poor.AttrDict(results)
results = [dict(
title=item.venue.name,
description=parse_description(item),
text=parse_text(item),
link=parse_link(item),
x=float(item.venue.location.lng),
y=float(item.venue.location.lat),
) for item in itertools.chain.from_iterable(
group["items"] for group in
results.response.get("groups", [])
)]
if results and results[0]:
cache[url] = copy.deepcopy((x, y, results))
return x, y, results
def parse_description(item):
"""Parse description
|
from search result `item`."""
description = []
with poor.util.silent(Exception):
rating = float(item.venue.rating)
description.append("{:.1f}/10".format(rating))
with poor.util.silent(Exception):
description.append(item.venue.categories[0]
|
.name)
with poor.util.silent(Exception):
description.append(item.venue.location.address)
description = ", ".join(description)
with poor.util.silent(Exception):
description += "\n“{}”".format(item.tips[0].text)
return description
def parse_link(item):
"""Parse hyperlink from search result `item`."""
return ("http://foursquare.com/v/{}?ref={}"
.format(item.venue.id, CLIENT_ID))
def parse_text(item):
"""Parse blurb text from search result `item`."""
lines = []
with poor.util.silent(Exception):
lines.append('<font color="Theme.highlightColor">'
'<big>{}</big>'
'</font>'
.format(html.escape(item.venue.name)))
subtitle = []
with poor.util.silent(Exception):
subtitle.append('<font color="Theme.highlightColor">'
'<big>{:.1f}</big>'
'</font>'
'<small> / 10</small>'
.format(float(item.venue.rating)))
with poor.util.silent(Exception):
category = html.escape(item.venue.categories[0].name)
subtitle.append("<small>{}</small>".format(category))
lines.append(" ".join(subtitle))
with poor.util.silent(Exception):
quote = html.escape(item.tips[0].text)
lines.append("<small>“{}”</small>".format(quote))
return "<br>".join(lines)
def prepare_point(point):
"""Return geocoded coordinates for `point`."""
# Foursquare does geocoding too, but not that well.
if isinstance(point, (list, tuple)):
return point[0], point[1]
geocoder = poor.Geocoder("default")
results = geocoder.geocode(point, dict(limit=1))
return results[0]["x"], results[0]["y"]
|
narasimhan-v/avocado-misc-tests-1
|
io/net/multicast.py
|
Python
|
gpl-2.0
| 5,757
| 0
|
#!/usr/bin/env python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: 2016 IBM
# Author: Prudhvi Miryala<mprudhvi@linux.vnet.ibm.com>
#
# test multicasting
# to test we need to enable multicast option on host
# then ping from peer to multicast group
import netifaces
from avocado import Test
from avocado.utils.software_manager import SoftwareManager
from avocado.utils.ssh import Session
from avocado.utils import process
from avocado.utils import distro
from avocado.utils.network.interfaces import NetworkInterface
from avocado.utils.network.hosts import LocalHost
class ReceiveMulticastTest(Test):
'''
check multicast receive
using ping tool
'''
def setUp(self):
'''
To check and install dependencies for the test
'''
self.peer = self.params.get("peer_ip", default="")
self.user = self.params.get("user_name", default="root")
self.peer_password = self.params.get("peer_password",
'*', default="None")
interfaces = netifaces.interfaces()
self.iface = self.params.get("interface", default="")
if self.iface not in interfaces:
self.cancel("%s interface is not available" % self.iface)
self.ipaddr = self.params.get("host_ip", default="")
self.netmask = self.params.get("netmask", default="")
local = LocalHost()
self.networkinterface = NetworkInterface(self.iface, local)
try:
self.networkinterface.add_ipaddr(self.ipaddr, self.netmask)
self.networkinterface.save(self.ipaddr, self.netmask)
except Exception:
self.networkinterface.save(self.ipaddr, self.netmask)
self.networkinterface.bring_up()
self.session = Session(self.peer, user=self.user,
password=self.peer_password)
if not self.session.connect():
self.cancel("failed connecting to peer")
self.count = self.params.get("count", default="500000")
smm = SoftwareManager()
pkgs = ["net-tools"]
detected_distro = distro.detect()
if detected_distro.name == "Ubuntu":
pkgs.extend(["openssh-client", "iputils-ping"])
elif detected_distro.name == "SuSE":
pkgs.extend(["openssh", "iputils"])
else:
pkgs.extend(["openssh-clients", "iputils"])
for pkg in pkgs:
if not smm.check_installed(pkg) and not smm.install(pkg):
self.cancel("%s package is need to test" % pkg)
if self.peer == "":
self.cancel("peer ip should specify in input")
cmd = "ip addr show | grep %s" % self.peer
output = self.session.cmd(cmd)
result = ""
result = result.join(output.stdout.decode("utf-8"))
self.peerif = result.split()[-1]
if self.peerif == "":
self.cancel("unable to get peer interface")
cmd = "ip -f inet -o addr show %s | awk '{print $4}' | cut -d / -f1"\
% self.iface
self.local_ip = process.system_output(cmd, shell=True).strip()
if self.local_ip == "":
self.cancel("unable to get local ip")
def test_multicast(self):
'''
ping to peer machine
'''
cmd = "echo 0 > /proc/sys/net/ipv4/icmp_echo_ignore_broadcasts"
if process.system(cmd, shell=True, verbose=True,
ignore_status=True) != 0:
self.fail("unable to set value to icmp_echo_ignore_broadcasts")
cmd = "ip link set %s allmulticast on" % self.iface
if process.system(cmd, shell=True, verbose=True,
ignore_status=True) != 0:
self.fail("unable to set all mulicast option to test interface")
cmd = "ip route add 224.0.0.0/4 dev %s" % self.peerif
output = self.session.cmd(cmd)
if not output.exit_status == 0:
self.fail("Unable to add route for Peer interafce")
cmd = "timeout 600 ping -I %s 224.0.0.1 -c %s -f" % (self.peerif,
self.count)
output = self.session.cmd(cmd)
if not output.exit_status == 0:
self.fail("multicast test failed")
def tearDown(self):
'''
delete multicast route and turn off multicast option
'''
cmd = "ip route del 224.0.0.0/4"
output = self.session.cmd(cmd)
if not output.exit_status ==
|
0:
self.log.info("Unable to delete m
|
ulticast route added for peer")
cmd = "echo 1 > /proc/sys/net/ipv4/icmp_echo_ignore_broadcasts"
if process.system(cmd, shell=True, verbose=True,
ignore_status=True) != 0:
self.log.info("unable to unset all mulicast option")
cmd = "ip link set %s allmulticast off" % self.iface
if process.system(cmd, shell=True, verbose=True,
ignore_status=True) != 0:
self.log.info("unable to unset all mulicast option")
self.networkinterface.remove_ipaddr(self.ipaddr, self.netmask)
try:
self.networkinterface.restore_from_backup()
except Exception:
self.log.info("backup file not availbale, could not restore file.")
self.session.quit()
|
druss16/danslist
|
polls/urls.py
|
Python
|
mit
| 400
| 0.02
|
from django.conf.urls import url
from . import views
urlpatterns = [
# ex: /polls/
url(r'^$', views.index, name='index'),
# ex: /polls/5/
url(r'^(?P<question_id>[0-9]+)/$', views.detail, name='detail'),
# ex: /polls/5/results/
url(r'^(?P<
|
question_id>[0-9]+)/results/$', views.results, name='results'),
# ex: /polls/5/vote/
url(r'^(?P<question_id>[0-9]+)/vote/$', views.vote, name='v
|
ote'),
]
|
thorwhalen/ut
|
util/context_managers.py
|
Python
|
mit
| 908
| 0.001101
|
"""useful context managers"""
from contextlib import suppress
with suppress(ModuleNotFoundError):
from lag import *
import os
import contextlib
def clog(*args, condition=True, log_func=print, **kwargs):
if condition:
return log_func(*args, **kwargs)
@contextlib.contextmanager
def cd(newdir, verbose=True):
"""Change your working directory, do stuff, and change back to the original"""
_clog = partial(clog, condition=verbose, log_func=print)
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
_clog(f'cd {newdir}')
yield
finally:
_clog(f'cd {prevdir}')
os.chdir(prevdir)
# from pathlib import Path
# _clog("Called before cd", Path().absolute())
# with cd(Path.home()):
# if verbose: pr
|
int("Called under cd", Path().absolute
|
())
# _clog("Called after cd and same as before", Path().absolute())
|
bjornsturmberg/NumBAT
|
JOSAB_tutorial/simo-josab-IFSBS-1umcylwg-SiO2.py
|
Python
|
gpl-3.0
| 7,020
| 0.010684
|
"""
Script to evaluate intermodal forward Brillouin scattering in a cylindrical SiO2 waveguide
"""
# Import the necessary packages
import time
import datetime
import numpy as np
import sys
import copy
from matplotlib.ticker import AutoMinorLocator
import math
sys.path.append("../backend/")
import materials
import objects
import mode_calcs
import integration
import plotting
from plotting import FieldDecorator
from fortran import NumBAT
# Naming conventions
# AC: acoustic
# EM: electromagnetic
# k_AC: acoustic wavenumber
start = time.time()
# Specify Geometric Parameters - all in [nm].
wl_nm = 1550 # Wavelength of EM wave in vacuum.
# Unit cell dimensions must be sufficiently large to ensure fields are zero at outermost boundary.
unitcell_x = 4.01*wl_nm #be careful to ensure not whole integer multiples
unitcell_y = unitcell_x
inc_a_x = 1000 # Waveguide width.
inc_a_y = inc_a_x
inc_shape = 'circular' # Shape of the waveguide.
# Specify number of electromagnetic modes, acoustic modes, and which EM indices
# are involved in the calculation for intermodal FSBS
num_modes_EM_pump = 20
num_modes_EM_Stokes = num_modes_EM_pump
num_modes_AC = 100 # Number of acoustic modes to solve for.
# The EM pump mode(s) for which to calculate interaction with AC modes.
# Can specify a mode number (zero has lowest propagation constant) or 'All'.
EM_ival_pump = 1
# The EM Stokes mode(s) for which to calculate interaction with AC modes.
EM_ival_Stokes = 0
# The AC mode(s) for which to calculate interaction with EM modes.
AC_ival = 'All'
# Output files are generated in a folder with the following prefix
prefix_str = 'ifsbs-josab-1umSiO2'
# Use all specified parameters to create a waveguide object
wguide = objects.Struct(unitcell_x,inc_a_x,unitcell_y,inc_a_y,inc_shape,
material_bkg=materials.materials_dict["Vacuum"],
material_a=materials.materials_dict["SiO2_2021_Poulton"],
lc_bkg=0.05, # mesh coarseness in background, larger lc_bkg = coarser along horizontal outer edge
lc_refine_1=20.0, # mesh refinement factor near the interface of waveguide, larger lc2 = finer along horizontal interface
lc_refine_2=30.0, # mesh refinement factor near the origin/centre of waveguide
plt_mesh=False, # creates png file of geometry and mesh in backend/fortran/msh/
check_mesh=False) # note requires x-windows configuration to work
# Initial guess for the EM effective index of the waveguide
n_eff = wguide.material_a.n-0.1
# Calculate Electromagnetic Modes
print("Starting EM pump modes")
sim_EM_pump = wguide.calc_EM_modes(num_modes_EM_pump, wl_nm, n_eff=n_eff, debug=False)
print("Starting EM Stokes modes")
sim_EM_Stokes = mode_calcs.fwd_Stokes_modes(sim_EM_pump)
# Generate images for the EM modes involved in the calculation
print("Starting EM field plotting ")
plotting.plt_mode_fields(sim_EM_pump,
ivals=[EM_ival_pump,EM_ival_Stokes],
EM_AC='EM_E', num_ticks=3,xlim_min=0.2, xlim_max=0.2, ylim_min=0.2, ylim_max=0.2,
prefix_str=prefix_str, pdf_png='png', ticks=True, quiver_steps=10,
comps=['Et','Eabs'], n_points=1000, colorbar=True)
# A computation interruption if needed
# sys.exit("We interrupt your regularly scheduled computation to bring you something completely different... for now")
# Print the wavevectors of EM modes.
print('k_z of EM modes \n', np.round(np.real(sim_EM_pump.Eig_values), 4))
# Calculate the EM effective index of the waveguide.
n_eff_sim = np.real(sim_EM_pump.Eig_values*((wl_nm*1e-9)/(2.*np.pi)))
print("n_eff = ", np.round(n_eff_sim, 4))
# Calculate and print the acoustic wave vector
k_AC = np.real(sim_EM_pump.Eig_values[EM_ival_pump] - sim_EM_Stokes.Eig_values[EM_ival_Stokes])
print('Intermode q_AC (Hz) \n', k_AC)
# Calculate Acoustic Modes
print("Starting acoustic modes")
sim_AC = wguide.calc_AC_modes(num_modes_AC, k_AC, EM_sim=sim_EM_pump, debug=False)
# Print the frequencies of AC modes.
AC_freqs_GHz = np.round(np.real(sim_AC.Eig_values)*1e-9, 4)
print('\n Freq of AC modes (GHz) \n', AC_freqs_GHz)
# Calculate total SBS gain, photoelastic and moving boundary contributions, as
# well as other important quantities
SBS_gain, SBS_gain_PE, SBS_gain_MB, linewidth_Hz, Q_factors, alpha = integration.gain_and_qs(
sim_EM_pump, sim_EM_Stokes, sim_AC, k_AC,
EM_ival_pump=
|
EM_ival_pump, EM_ival_Stokes=EM_ival_Stokes, AC_ival=AC_ival)
# Mask negligible gain values to improve clarity of print out.
threshold = 1e-3
masked_PE = np.ma.masked_inside(SBS_gain_PE[EM_ival_pump,EM_ival_Stokes,:], 0, threshold)
masked_MB = np.ma.masked_inside(SBS_gain_MB[EM_ival_pump,EM_ival_Stokes,:], 0, threshold)
masked = np.ma.masked_inside(SBS_gain[EM_ival_pump,EM_ival_Stokes,:], 0, threshold)
#
|
Display these in terminal
print("\n Displaying results with negligible components masked out")
print("SBS_gain [1/(Wm)] PE contribution \n", masked_PE)
print("SBS_gain [1/(Wm)] MB contribution \n", masked_MB)
print("SBS_gain [1/(Wm)] total \n", masked)
# determining the location of the maximum gain
maxGainloc=6; #note sometimes its necessary to manually specify as certain values are NOT possible by symmetry arguments
print("Plotting acoustic mode corresponding to maximum")
plotting.plt_mode_fields(sim_AC, EM_AC='AC', prefix_str=prefix_str, ivals=[maxGainloc],
num_ticks=3, quiver_steps=40, pdf_png='png',ticks=True, comps=['ut','uabs'], colorbar=True)
# Displaying results for the maximum found in the selection
print("-----------------")
print("Displaying results for maximum gain value found:")
print("Greatest SBS_gain [1/(Wm)] total \n", masked.data[maxGainloc])
print("displaying corresponding acoustic mode number (i.e., AC_field_#) for reference \n",maxGainloc )
print("EM Pump Power [Watts] \n", sim_EM_pump.EM_mode_power[EM_ival_pump] )
print("EM Stokes Power [Watts] \n", sim_EM_Stokes.EM_mode_power[EM_ival_Stokes] )
print("EM angular frequency [THz] \n", sim_EM_pump.omega_EM/1e12 )
print("AC Energy Density [J*m^{-1}] \n", sim_AC.AC_mode_energy_elastic[maxGainloc] )
print("AC loss alpha [1/s] \n", alpha[maxGainloc] )
print("AC frequency [GHz] \n", sim_AC.Omega_AC[maxGainloc]/(1e9*2*math.pi) )
print("AC linewidth [MHz] \n", linewidth_Hz[maxGainloc]/1e6)
#since the overlap is not returned directly we'll have to deduce it
absQtot2 = (alpha[maxGainloc]*sim_EM_pump.EM_mode_power[EM_ival_pump]*sim_EM_Stokes.EM_mode_power[EM_ival_Stokes]*sim_AC.AC_mode_energy_elastic[maxGainloc]*masked.data[maxGainloc])/(2*sim_EM_pump.omega_EM*sim_AC.Omega_AC[maxGainloc]);
absQtot = pow(absQtot2,1/2)
print("Total coupling |Qtot| [W*m^{-1}*s] \n", absQtot )
end = time.time()
print("\n Simulation time (sec.)", (end - start))
|
Alkalit/silk
|
silk/config.py
|
Python
|
mit
| 1,268
| 0.000789
|
from copy import copy
import silk.utils.six as six
from silk.singleton import Singleton
def default_permissions(user):
if user:
return user.is_staff
return False
class SilkyConfig(six.with_metaclass(Singleton, object)):
defaults = {
'SILKY_DYNAMIC_PROFILING': [],
'SILKY_IGNORE_PATHS': [],
'SILKY_HIDE_COOKIES': True,
'SILKY_IGNORE_QUERIES': [],
'SILKY_META':
|
False,
'SILKY_AUTHENTICATION': False,
'SILKY_AUTHORISATION': False,
'SILKY_PERMISSIONS': default_permissions,
'SILKY_MAX_REQUEST_BODY_SIZE': -1,
'SILKY_MAX_RESPONSE_BODY_SIZE': -1,
'SILKY_INTER
|
CEPT_PERCENT': 100,
'SILKY_INTERCEPT_FUNC': None,
'SILKY_PYTHON_PROFILER': False,
}
def _setup(self):
from django.conf import settings
options = {option: getattr(settings, option) for option in dir(settings) if option.startswith('SILKY')}
self.attrs = copy(self.defaults)
self.attrs.update(options)
def __init__(self):
super(SilkyConfig, self).__init__()
self._setup()
def __getattr__(self, item):
return self.attrs.get(item, None)
def __setattribute__(self, key, value):
self.attrs[key] = value
|
SDoc/py-sdoc
|
sdoc/sdoc1/error.py
|
Python
|
mit
| 147
| 0.006803
|
class DataTypeError(RuntimeError):
"""
Generic exception cl
|
ass for SDoc1 language errors with data types and expressions.
|
"""
pass
|
yujikato/DIRAC
|
src/DIRAC/Core/Utilities/Devloader.py
|
Python
|
gpl-3.0
| 2,365
| 0.011839
|
""" Here, we need some documentation...
"""
from __future__ import absolute_import
|
from __future__ import division
from __future__ import print_fu
|
nction
import sys
import os
import types
import threading
import time
import six
from DIRAC import gLogger
from DIRAC.Core.Utilities.DIRACSingleton import DIRACSingleton
@six.add_metaclass(DIRACSingleton)
class Devloader(object):
def __init__(self):
self.__log = gLogger.getSubLogger("Devloader")
self.__reloaded = False
self.__enabled = True
self.__reloadTask = False
self.__stuffToClose = []
self.__watchedFiles = []
self.__modifyTimes = {}
def addStuffToClose(self, stuff):
self.__stuffToClose.append(stuff)
@property
def enabled(self):
return self.__enabled
def watchFile(self, fp):
if os.path.isfile(fp):
self.__watchedFiles.append(fp)
return True
return False
def __restart(self):
self.__reloaded = True
for stuff in self.__stuffToClose:
try:
self.__log.always("Closing %s" % stuff)
sys.stdout.flush()
stuff.close()
except Exception:
gLogger.exception("Could not close %s" % stuff)
python = sys.executable
os.execl(python, python, * sys.argv)
def bootstrap(self):
if not self.__enabled:
return False
if self.__reloadTask:
return True
self.__reloadTask = threading.Thread(target=self.__reloadOnUpdate)
self.__reloadTask.setDaemon(1)
self.__reloadTask.start()
def __reloadOnUpdate(self):
while True:
time.sleep(1)
if self.__reloaded:
return
for modName in sys.modules:
modObj = sys.modules[modName]
if not isinstance(modObj, types.ModuleType):
continue
path = getattr(modObj, "__file__", None)
if not path:
continue
if path.endswith(".pyc") or path.endswith(".pyo"):
path = path[:-1]
self.__checkFile(path)
for path in self.__watchedFiles:
self.__checkFile(path)
def __checkFile(self, path):
try:
modified = os.stat(path).st_mtime
except Exception:
return
if path not in self.__modifyTimes:
self.__modifyTimes[path] = modified
return
if self.__modifyTimes[path] != modified:
self.__log.always("File system changed (%s). Restarting..." % (path))
self.__restart()
|
polypmer/obligarcy
|
obligarcy/migrations/0007_auto_20151010_2304.py
|
Python
|
gpl-3.0
| 696
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('obligarcy', '0006_auto_20151009_1947'),
]
operations = [
migrations.AlterField(
model_name='contract',
name='users',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='submissi
|
on',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
|
migrations.DeleteModel(
name='User',
),
]
|
mkeller0815/py65
|
py65/memory.py
|
Python
|
bsd-3-clause
| 2,382
| 0.00084
|
from collections import defaultdict
class ObservableMemory:
def __init__(self, subject=None, addrWidth=16):
self.physMask = 0xffff
if addrWidth > 16:
# even with 32-bit address space, model only 256k memory
self.physMask = 0x3ffff
if subject is None:
subject = (self.physMask + 1) * [0x00]
self._subject = subject
self._read_subscribers = defaultdict(list)
self._write_subscribers = defaultdict(list)
def __setitem__(self, address, value):
if isinstance(address, slice):
r = range(*address.indices(self.physMask + 1))
for n, v in zip(r, value):
self[n] = v
return
address &= self.physMask
callbacks = self._write_subscribers[address]
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address
|
):
if isinstance(address, slice):
r = range(*address.indices(self.physMask + 1))
return [ self[n] for n in r ]
address &= self.physMask
callbacks = self._read_subscribers[address]
final_result = None
for callback in callbacks:
result = callback(address)
|
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
address &= self.physMask
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
address &= self.physMask
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
start_address &= self.physMask
self._subject[start_address:start_address + len(bytes)] = bytes
|
lafranceinsoumise/api-django
|
agir/payments/migrations/0014_auto_20190726_1503.py
|
Python
|
agpl-3.0
| 364
| 0
|
# Generated by Django 2.2.3 on 2019-07-26 13:03
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("payments", "0013_auto_20
|
190724_1628")]
operations = [
migrations.AlterModelOptions(
name="payment",
|
options={"get_latest_by": "created", "ordering": ("-created",)},
)
]
|
saydulk/django-wysiwyg
|
django_wysiwyg/templatetags/wysiwyg.py
|
Python
|
mit
| 2,413
| 0.002072
|
from django import template
from django.conf import settings
from django.template.loader import render_to_string
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin
register = template.Library()
def get_settings():
"""Utility function to retrieve settings.py values with defaults"""
flavor = getattr(settings, "DJANGO_WYSIWYG_FLAVOR", "yui")
return {
"DJANGO_WYSIWYG_MEDIA_URL": getattr(settings, "DJANGO_WYSIWYG_MEDIA_URL", urljoin(settings.STATIC_URL, flavor) + '/'),
"DJANGO_WYSIWYG_FLAVOR": flavor,
}
@register.simple_tag
def wysiwyg_setup(protocol="http"):
"""
Create the <style> and <script> tags needed to initialize the rich text editor.
Create a local django_wysiwyg/includes.html template if you don't want to use Yahoo's CDN
"""
ctx = {
"protocol": protocol,
}
ctx.update(get_settings())
return render_to_str
|
ing(
"django_wysiwyg/%s/includes.html" % ctx['DJANGO_WYSIWYG_FLAVOR'],
ctx
)
@register.simple_tag
def wysiwyg_editor(field_id, editor_name=None, config=None):
"""
Turn the textarea #field_id into a rich editor. If you do not spe
|
cify the
JavaScript name of the editor, it will be derived from the field_id.
If you don't specify the editor_name then you'll have a JavaScript object
named "<field_id>_editor" in the global namespace. We give you control of
this in case you have a complex JS ctxironment.
"""
if not editor_name:
editor_name = "%s_editor" % field_id
ctx = {
'field_id': field_id,
'editor_name': editor_name,
'config': config
}
ctx.update(get_settings())
return render_to_string(
"django_wysiwyg/%s/editor_instance.html" % ctx['DJANGO_WYSIWYG_FLAVOR'],
ctx
)
@register.simple_tag
def wysiwyg_static_url(appname, prefix, default_path):
"""
Automatically use an prefix if a given application is installed.
For example, if django-ckeditor is installed, use it's STATIC_URL/ckeditor folder to find the CKEditor distribution.
When the application does not available, fallback to the default path.
This is a function for the internal templates of *django-wysiwyg*.
"""
if appname in settings.INSTALLED_APPS:
return urljoin(settings.STATIC_URL, prefix)
else:
return default_path
|
FrodeSolheim/fs-uae-launcher
|
launcher/ui/config/CustomOptionsPage.py
|
Python
|
gpl-2.0
| 2,973
| 0.000336
|
import fsui
from fsgamesys.context import fsgs
from launcher.i18n import gettext
from launcher.launcher_config import LauncherConfig
class CustomOptionsPage(fsui.Panel):
def __init__(self, parent):
fsui.Panel.__init__(self, parent)
self.layout = fsui.VerticalLayout()
label = fsui.MultiLineLabel(
self,
gettext(
"You can write key = value pairs here to set FS-UAE options "
"not currently supported by the user interface. This is only a "
"temporary feature until the GUI supports all options "
"directly."
),
760,
)
self.layout.add(label, fill=True, margin_bottom=10)
label = fsui.MultiLineLabel(
self,
gettext(
"The options specified here will apply to this configuration "
"only."
),
760,
)
self.layout.add(label, fill=True, margin_bottom=10)
hor_layout = fsui.HorizontalLayout()
self.layout.add(hor_layout, fill=True, expand=True)
# hor_layout.add_spacer(20)
self.text_area = fsui.TextArea(self, font_family="monospace")
self.text_area.set_min_width(760)
self.text_area.set_min_height(400)
self.text_area.set_text(initial_text())
hor_layout.add(self.text_area, fill=True, expand=True)
# hor_layout.add_spacer(20)
# self.layout.add_spacer(20)
self.get_window().add_close_listener(self.on_close_window)
def on_close_window(self):
self.update_config()
def on_close_button(self):
self.end_modal(0)
def update_config(self):
text = self.text_area.get_text()
update_config = {}
# First mark all unknown config values as cleared
for key in lis
|
t(fsgs.config.values.keys()):
if key not in LauncherConfig.default_config:
update_config[key] = ""
# Then we overwrite with specific values
for line in text.split("\n"):
line = line.strip()
parts = line.split("=", 1)
if len(parts) == 2:
key = parts[0].strip()
# if key in Config.no_custom_c
|
onfig:
# continue
value = parts[1].strip()
update_config[key] = value
# Finally, set everything at once
LauncherConfig.set_multiple(update_config.items())
def initial_text():
text = []
keys = fsgs.config.values.keys()
for key in sorted(keys):
# FIXME: Move to LauncherConfig as a method, maybe use
# is_custom_option.
if key in LauncherConfig.no_custom_config:
continue
if key.startswith("__implicit_"):
continue
value = fsgs.config.values[key]
if not value:
continue
text.append("{0} = {1}\n".format(key, value))
return "".join(text)
|
lukas/ml-class
|
examples/keras-smile/smile-server-1.py
|
Python
|
gpl-2.0
| 1,356
| 0
|
import flask
import keras
import numpy as np
import os
from keras.model
|
s import load_model
from PIL import Image
from flask import Flask, request
from jinja2 import Template
app = Flask(__name__)
model = load_model('smile.h5')
model._make_predict_function()
def predict_image(image):
image = image.convert(mode="L")
image = image.resize((32, 32))
im = np.asarray(image)
im = im.reshape(1, 32, 32, 1)
pred = model.predict(im)
return pred
@app.
|
route("/predict", methods=["POST"])
def predict():
f = request.files['file']
image = Image.open(f.stream)
pred = predict_image(image)
template = Template("""
<html>
<body>
<p>Probability of Smiling: {{smile_prob}}</p>
<p>Probability of Not Smiling: {{no_smile_prob}}</p>
</body>
</html>
""")
return template.render(smile_prob=pred[0], no_smile_prob=pred[1])
@app.route("/")
def index():
html = """
<html>
<body>
<form action="predict" method="POST" enctype="multipart/form-data">
<input type="file" name="file" accept="image/*;capture=camera">
<input type="submit"/>
</form>
</body>
</html>
"""
return(html)
if __name__ == '__main__' and not os.getenv("FLASK_DEBUG"):
app.run(port=8080)
|
frederick623/pb
|
deltaone/d1_sbl_recon.py
|
Python
|
apache-2.0
| 7,739
| 0.030236
|
import re
import sqlite3
import csv
import ast
import os
import sys
import fnmatch
import datetime
import xlrd
import win32com.client
def question_marks(st):
question_marks = '?'
for i in range(0, len(st.split(','))-1):
question_marks = question_marks + ",?"
return question_marks
def xlsx_to_arr(xlsx_file, worksheet=0, row_start=0, col_start=0, row_end=-1, col_end=-1):
arr = []
wb = xlrd.open_workbook(xlsx_file)
ws = wb.sheet_by_index(worksheet)
row_end = ws.nrows if row_end == -1 else row_end
col_end = ws.ncols if col_end == -1 else col_end
arr = [ws.row_values(row, start_colx=col_start, end_colx=col_end-1) for row in range(row_start, row_end)]
header = ','.join(arr[0])
return re.sub(r"[\*\.#/\$%\"\(\)&\- ]", "", header), arr[1:]
def csv_to_arr(csv_file, start=0, end=0, has_header=True, delim=',', ignore_col=""):
arr = []
with open(csv_file, 'rU') as f:
reader = csv.reader(f, delimiter=delim)
arr = list(reader)
header = ""
if has_header:
header = ','.join(arr[start])
if end == 0:
arr = arr[start+1:]
else:
arr = arr[start+1:end]
return re.sub(r"[\*\.#/\$%\d\" ]", "", header), arr
else:
return arr[start:]
return
def arr_to_csv(file_name, header, data_arr):
csv_file = open(file_name, 'wb')
wr = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
wr.writerow(header.split(','))
for data_row in data_arr:
line = []
for ele in data_row:
line.append(str(ele))
wr.writerow(line)
csv_file.close()
return
def arrs_to_xlsx(filename, header=[], arr=[]):
i = 1
xl = win32com.client.Dispatch('Excel.Application')
wb = xl.Workbooks.Add()
for x in range(0, len(header)):
ws = wb.Worksheets(x+1)
for i, cell in enumerate(header[x].split(',')):
ws.Cells(1,i+1).Value = cell
for i, row in enumerate(arr[x]):
for j, cell in enumerate(row):
ws.Cells(i+2,j+1).Value = str(cell)
wb.Worksheets(1).Columns.AutoFit()
wb.Worksheets(1).UsedRange.FormatConditions.Add(win32com.client.constants.xlExpression, "", '=OR(AND(ISNUMBER($C1),$C1<>$D1),AND(ISNUMBER($E1),$E1<>$F1))')
wb.Worksheets(1).UsedRange.FormatConditions(1).Interior.ColorIndex = 6
wb.Worksheets(1).UsedRange.FormatConditions(1).StopIfTrue = False
wb.Worksheets(1).Columns("C:F").NumberFormat = "#,##0_);[Red](#,##0);0;@"
xl.DisplayAlerts = False
wb.SaveAs(filename)
xl.DisplayAlerts = True
wb.Close(True)
return
def db_cur(source = ":memory:"):
conn = sqlite3.connect(source, detect_types=sqlite3.PARSE_DECLTYPES)
# conn.row_factory = sqlite3.Row
cur = conn.cursor()
return conn, cur
def create_tbl(cur, tbl_name, header, arr = [], index_arr = []):
cur.execute("""select count(*) FROM sqlite_master WHERE type='table' AND name = '%s' """ % (tbl_name))
tbl_exists = cur.fetchone()
if tbl_exists[0] == 0:
cur.execute("CREATE TABLE " + tbl_name + " (" + header.replace("id,", "id PRIMARY KEY,") + " );")
for index in index_arr:
cur.execute("CREATE INDEX " + tbl_name + "_" + index + " ON " + tbl_name + " (" + index + ");")
if arr != []:
cur.executemany("INSERT INTO " + tbl_name + " VALUES ("+question_marks(header)+")", arr)
return
def files_lookup(tgt_dir, pattern, recur_list=False, sub_folder=False, most_recent=True):
filepath_arr = []
for fi in os.listdir(tgt_dir):
full_path = os.path.join(tgt_dir, fi)
if sub_folder and os.path.isdir(full_path):
filepath_arr += files_lookup(full_path, pattern, recur_list, sub_folder, most_recent)
if fnmatch.fnmatch(fi, pattern):
filepath_arr.append(full_path)
filepath_arr.sort(reverse=most_recent)
if recur_list:
return filepath_arr
else:
return filepath_arr[0]
def recon_sbl(cur):
sbl_header = "Contract,CPCode,Client,StockCode,G1 O/S,FA O/S,G1 Pending,FA Pending"
create_tbl(cur, "g1_inv", "SBLCODE,CPTY,STOCK,OS,PD")
create_tbl(cur, "fa_inv", "EXT,DESK,STOCK,OS,PD")
cur.execute("""
insert into fa_inv
select ExternalReference, ClientCode, StockCode, sum(case when date('now') > ValueDate then Qty else 0 end), sum(case when date('now') <= ValueDate then Qty else 0 end)
from fasbl
group by ExternalReference, ClientCode, StockCode
""")
cur.execute("""
insert into g1_inv
select SBLCODE, business, STOCK||' HK Equity', sum(case when source = 'os' then QTY else 0 end), sum(case when source = 'pd' then QTY else 0 end)
from (
select sblmap.SBLCode as SBLCODE, sblmap.Name as business, cast(STOCK as int) as STOCK, case when PTYPE = 'B' then -QTY else QTY end as QTY, 'os' as source
from os join sblmap on os.CPTY = sblmap.SBLCode
where cast(STOCK as int) <> 0
UNION ALL
select sblmap.SBLCode as SBLCODE, sblmap.Name as business, cast(STOCK as int) as STOCK, case when (BL = 'L' and STATUS = 'R') or (BL = 'B' and STATUS = 'L') then -QTY else QTY end as QTY, 'pd' as source
from pd join sblmap on pd.CPTY = sblmap.SBLCode
where cast(STOCK as int) <> 0
) aggrg
where STOCK <> ''
group by business, STOCK
""")
cur.execute("""
select EXT, SBLCode, CPTY, STOCK, sbl_os, fa_os, sbl_pd, fa_pd
from (
select EXT, SBLCODE, g1_inv.CPTY as CPTY, g1_inv.STOCK as STOCK, g1_inv.OS as sbl_os, ifnull(fa_inv.OS, 0) as fa_os, g1_inv.PD as sbl_pd, ifnull(fa_inv.PD, 0) as fa_pd
from g1_inv left join fa_inv
on g1_inv.CPTY = fa_inv.DESK
and g1_inv.STOCK = fa_inv.STOCK
union
select EXT, SBLCODE, fa_inv.DESK as CPTY, fa_inv.STOCK as STOCK, ifnull(g1_inv.OS, 0) as sbl_os, fa_inv.OS as fa_os, ifnull(g1_inv.PD, 0) as sbl_pd, fa_inv.PD as fa_pd
from fa_inv left join g1_inv
on g1_inv.CPTY = fa_inv.DESK
and g1_inv.STOCK = fa_inv.STOCK
) consol
where sbl_os <> 0 or fa_os <> 0 or sbl_pd <> 0 or fa_pd <> 0
""")
sbl_arr = cur.fetchall()
# for row in sbl_arr:
# print row
return sbl_header, sbl_arr
def conv_xl_dt(xl_dt):
dt = datetime.datetime.fromordinal(datetime.datetime(1900, 1, 1).toordinal() + int(xl_dt) - 2).date().strftime("%Y-%m-%d")
# tt = dt.timetuple()
return dt
def conv_xl_dt_arr(arr, cols):
return [ [ conv_xl_dt(ele) if idx in cols else ele for idx, ele in enumerate(row) ] for row in arr ]
def main():
conn, cur = db_cur()
pb_dir = os.path.dirname(os.path.abspath(__file__))
# pb_dir = "\\\\p7fs0003\\nd\\3033-Horizon-FA-Share\\PB_DeltaOne\\Daily_Data"
sbl_dir = os.path.dirname(os.path.abspath(__file__))
# sbl_dir = "\\\\P7FS0001\\ED\\SBL\\Reports\\Daily SBL Report\\ReportData"
output_dir = "\\\\p7fs0003\\nd\\3033-Horizon-
|
FA-Share\\PB_DeltaOne\\SBL FA Deltaone Recon"
sblmap_file = files_lookup(pb_dir, "ClientDetails_????????.xlsx")
fasbl_file = files_lookup(pb_dir, "RepoSBLTrade_????????.xlsx")
os_file = files_lookup(sbl_dir, "OS_Trades_Extract_*.CSV")
pd_file = files_lookup(sbl_dir, "Pending_Trades_Extract_*.CSV")
print (sblmap_file)
print (fasbl_file)
print (os_file)
print (pd_file)
trd_date = sblmap_file[-13:-5]
inv_file = os.path.join(output_d
|
ir, "FA_G1_SBL_recon_"+trd_date+".xlsx")
sblmap_header, sblmap_arr = xlsx_to_arr(sblmap_file, row_start=1)
sblmap_header = sblmap_header.replace("ClientId", "ClientId1", 1)
fasbl_header, fasbl_arr = xlsx_to_arr(fasbl_file, row_start=1)
fasbl_arr = conv_xl_dt_arr(fasbl_arr, [3, 4])
os_header, os_arr = csv_to_arr(os_file, 1, -1, True, '\t')
pd_header, pd_arr = csv_to_arr(pd_file, 1, -1, True, '\t')
pd_header = pd_header.replace("BL","B_L",1)
create_tbl(cur, "sblmap", sblmap_header, sblmap_arr)
create_tbl(cur, "os", os_header, os_arr)
create_tbl(cur, "pd", pd_header, pd_arr)
create_tbl(cur, "fasbl", fasbl_header, fasbl_arr)
sbl_header, sbl_arr = recon_sbl(cur)
arrs_to_xlsx(inv_file, [sbl_header], [sbl_arr])
return
if __name__ == "__main__":
print ("D1 G1 SBL Recon")
try:
main()
except KeyboardInterrupt:
print ("Ctrl+C pressed. Stopping...")
|
jeremiah-c-leary/vhdl-style-guide
|
vsg/tests/ieee/test_rule_500.py
|
Python
|
gpl-3.0
| 2,648
| 0.003776
|
import os
import unittest
from vsg.rules import ieee
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_500_test_input.vhd'))
lExpected_lower = []
lExpected_lower.append('')
utils.read_file(os.path.join(sTestDir, 'rule_500_test_input.fixed_lower.vhd'), lExpected_lower)
lExpected_upper = []
lExpected_upper.append('')
utils.read_file(os.path.join(sTestDir, 'rule_500_test_input.fixed_upper.vhd'), lExpected_upper)
class test_port_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
def test_rule_500_lower(self):
oRule = ieee.rule_500()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'ieee')
self.assertEqual(oRule.identifier, '500')
self.assertEqual(oRule.groups, ['case', 'case::keyword'])
lExpected = [66, 67, 68, 69, 70]
lExpected.extend([73, 74, 76, 77, 78,79])
lExpected.extend(range(87, 89))
lExpected.extend([91])
lExpected.extend(range(93, 95))
lExpected.extend(range(100, 105))
lExpected.extend([107, 108, 110, 111, 112, 113])
oRule.analyze(self.oFile)
self.assertEqual(utils.extract_violation_lines_from_violation_object(oRule.violations), lExpected)
def test_rule_500_upper(self):
oRule = ieee.rule_500()
oRule.case = 'upper'
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'ieee')
self.assertEqual(oRule.identifier, '500')
lExpected = []
lExpected.extend(range(5, 10))
lExpected.extend([12, 13, 15, 16, 17, 18])
lExpected.extend(range(26, 28))
lExpected.extend([30])
lExpected.extend(range(32, 34))
lExpected.extend(range(39, 44))
lExpected.extend([46, 47, 49, 50, 51, 52])
oRule.analyze(self.oFile)
self.assertEqual(utils.extract_violation_lines_from_violation_object(oRule.violations), lExpected)
def test_fix_rule_500_lower
|
(self):
oRule = ieee.rule_500()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected_lower, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations
|
, [])
def test_fix_rule_500_upper(self):
oRule = ieee.rule_500()
oRule.case = 'upper'
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected_upper, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
|
fzuellich/urlmonitor
|
view.py
|
Python
|
gpl-3.0
| 4,690
| 0.002559
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from Tkinter import *
from ttk import *
class URLDialogView(object):
_LABEL_CONF = {'column': 0, 'padx': 5, 'pady': 5, 'sticky': W}
_ENTRY_CONF = {'padx': 5, 'pady': 5, 'sticky': E+W}
def __init__(self, parent, controller, data=None, edit=False):
self.parent = parent
self.edit = edit
# define callbacks for actions
self.submit_callback = controller.submit
self.cancel_callback = controller.cancel
# create elements
self._create_variables(data)
self._createWindow()
self._createWidgets()
def focus_set(self):
self.window.focus_set()
self.parent.wait_window(self.window)
def destroy(self):
"""Destroys the current window and returns the focus back to the
Treeview widget of the parent."""
self.parent.monitor_widget.focus_set()
self.window.destroy()
def _create_variables(self, data=None):
"""Create the variables and optionally set the data. Data retrieval is
only possible through these variables."""
self.url_var = StringVar()
self.label_var = StringVar()
self.user_var = StringVar()
self.password_var = StringVar()
if data is None:
return
keys = {'url': self.url_var, 'label': self.label_var}
for key, var in keys.iteritems():
if key in data and data[key] is not None:
var.set(data[key])
if 'auth' in data and data['auth'] is not None:
self.user_var.set(data['auth'][0])
self.password_var.set(data['auth'][1])
def _createWindow(self):
"""Create the main window for this dialog and set the instance variable
self.window."""
window = Toplevel(self.parent)
window.title('URL to watch')
window.transient(self.parent)
window.grab_set()
window.geometry("+%d+%d" % (self.parent.winfo_rootx()+50, self.parent.winfo_rooty()+50))
window.protocol("WM_DELETE_WINDOW", self.cancel_callback)
window.bind("<Return>", self.submit_callback)
window.bind("<Escape>", self.cancel_callback)
self.window = window
def _createWidgets(self):
|
"""Create all the widgets."""
# frame to pack everything
frame = Frame(self.window, padding=10)
frame.pack()
# define labels
Label(frame, text='URL', anchor=W).gr
|
id(self._LABEL_CONF)
Label(frame, text='Label').grid(self._LABEL_CONF)
Label(frame, text='User').grid(self._LABEL_CONF)
Label(frame, text='Password').grid(self._LABEL_CONF)
# entries
url = Entry(frame, width=75, textvariable=self.url_var)
url.grid(column=1, row=0, padx=5, pady=5)
if self.edit is True:
url['state'] = DISABLED
else:
url.focus_set()
url_label = Entry(frame, textvariable=self.label_var)
url_label.grid(column=1, row=1, **self._ENTRY_CONF)
if self.edit is True:
url_label.focus_set()
user = Entry(frame, textvariable=self.user_var)
user.grid(column=1, row=2, **self._ENTRY_CONF)
password = Entry(frame, show="*", textvariable=self.password_var)
password.grid(column=1, row=3, **self._ENTRY_CONF)
# define buttons
ok = Button(frame, text="OK", command=self.submit_callback)
ok.grid(column=0, sticky=W, padx=5, pady=5)
cancel = Button(frame, text="Cancel", command=self.cancel_callback)
cancel.grid(column=1, row=4, sticky=W, padx=5, pady=5)
import widget
class ApplicationView(Frame):
"""Class to handle the application window logic."""
def __init__(self, parent, controller):
Frame.__init__(self, parent)
self.pack()
self.monitor_widget = widget.URLMonitorWidget(self, controller)
self.monitor_widget.grid(column=0, padx=5, pady=5, sticky=E+W)
self.statusbar = widget.StatusBar(self)
self.statusbar.grid(column=0, sticky=E+W)
# callback and shortcuts
self.add_url_callback = controller.add_url
self.quit_app_callback = controller.quit_app
self._register_keyboard_shortcuts()
Style().theme_use('xpnative')
def _register_keyboard_shortcuts(self):
self.bind_all('<Control-KeyPress-n>', self.add_url_callback)
self.bind_all('<Control-Shift-KeyPress-C>', self.quit_app_callback)
|
chrism0dwk/PyTado
|
setup.py
|
Python
|
gpl-3.0
| 1,442
| 0.001387
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
here = lambda *a: os.path.join(os.path.dirname(__file__), *a)
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
readme = open(here('README.md')).read()
requirements = [x.strip() for x in open(here('requirements.txt')).readlines()]
setup(name='python-tado',
version='0.2.9',
description='PyTado from chrism0dwk, modfied by w.malgadey',
long_description=readme,
keywords='tado',
author='chrism0dwk, w.malgadey',
author_email='chrism0dwk@gmail.com, w.malgadey@gmail.com',
url='https://github.com/wmalgadey/PyTado',
install_requires=requirements,
license="GPL3",
zip_safe=False,
|
platforms=["any"],
packages=find_packages(),
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Home Automation',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Nat
|
ural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.5'
],
entry_points={
'console_scripts': [
'pytado = pytado.__main__:main'
]
},
)
|
stangls/omim
|
tools/run_desktop_tests.py
|
Python
|
apache-2.0
| 9,109
| 0.007355
|
#!/usr/bin/env python
"""
This script is mainly for running autotests on the build server, however, it
can also be used by engineers to run the tests locally on their machines.
It takes as optional parameters the path to the folder containing the test
executables (which must have names ending in _tests), and a list of tests that
need to be skipped, this list must be comma separated and contain no spaces. E.g.:
./run_desktop_tests.py -f ./omim-build-release -e drape_tests,some_other_tests
The script outputs the console output of the tests. It also checks the error
code of each test suite, and after all the tests are executed, it prints the
list of the failed tests, passed tests, skipped tests and tests that could not
be found, i.e. the tests that were specified in the skip list, but do not exist.
"""
from __future__ import print_function
from optparse import OptionParser
from os import listdir, remove
from random import shuffle
import random
import socket
import subprocess
import testserver
import time
import urllib2
import logging
TO_RUN = "to_run"
SKIP = "skip"
NOT_FOUND = "not_found"
FAILED = "failed"
PASSED = "passed"
WITH_SERVER = "with_server"
PORT = 34568
TESTS_REQUIRING_SERVER = ["downloader_tests", "storage_tests"]
class TestRunner:
def print_pretty(self, result, tests):
if not tests:
return
logging.info("\n{result}".format(result=result.upper()))
for test in tests:
logging.info("- {test}".format(test=test))
def set_global_vars(self):
parser = OptionParser()
parser.add_option("-o", "--output", dest="output", default="testlog.log", help="resulting log file. Default testlog.log")
parser.add_option("-f", "--folder", dest="folder", default="omim-build-release/out/release", help="specify the folder where the tests reside (absolute path or relative to the location of this script)")
parser.add_option("-d", "--data_path", dest="data_path", help="Path to data files (passed to the test executables as --data_path=<value>)")
parser.add_option("-u", "--user_resource_path", dest="resource_path", help="Path to resources, styles and classificators (passed to the test executables as --user_resource_path=<value>)")
parser.add_option("-i", "--include", dest="runlist", action="append", default=[], help="Include test into execution, comma separated list with no spaces or individual tests, or both. E.g.: -i one -i two -i three,four,five")
parser.add_option("-e", "--exclude", dest="skiplist", action="append", default=[], help="Exclude test from execution, comma separated list with no spaces or individual tests, or both. E.g.: -i one -i two -i three,four,five")
parser.add_option("-b", "--boost_tests", dest="boost_tests", action="store_true", default=False, help="Treat all the tests as boost tests (their output is different and it must be processed differently).")
parser.add_option("-k", "--keep_alive", dest="keep_alive", action="store_true", default=False, help="Keep the server alive after the end of the test. Because the server sometimes fails to start, this reduces the probability of false test failures on CI servers.")
(options, args) = parser.parse_args()
self.skiplist = set()
self.runlist = list()
for tests in options.skiplist:
for test in tests.split(","):
self.skiplist.add(test)
for tests in options.runlist:
self.runlist.extend(tests.split(","))
self.boost_tests = options.boost_tests
if self.runlist:
logging.warn("-i or -b option found, the -e option will be ignored")
self.workspace_path = options.folder
self.logfile = options.output
self.data_path = (" --data_path={0}".format(options.data_path) if options.data_path else "")
self.user_resource_path = (" --user_resource_path={0}".format(options.resource_path) if options.resource_path else "")
self.keep_alive = options.keep_alive
def start_server(self):
server = testserver.TestServer()
server.start_serving()
time.sleep(3)
def stop_server(self):
if self.keep_alive:
return
try:
urllib2.urlopen('http://localhost:{port}/kill'.format(port=PORT), timeout=5)
except (urllib2.URLError, socket.timeout):
logging.info("Failed to stop the server...")
def categorize_tests(self):
tests_to_run = list()
local_skiplist = list()
not_found = list()
test_files_in_dir = filter(lambda x: x.endswith("_tests"), listdir(self.workspace_path))
on_disk = lambda x: x in test_files_in_dir
not_on_disk = lambda x : not on_disk(x)
if not self.runlist:
local_skiplist = filter(on_disk, self.skiplist)
not_found = filter(not_on_disk, self.skiplist)
tests_to_run = filter(lambda x: x not in local_skiplist, test_files_in_dir)
else:
tests_to_run = filter(on_disk, self.runlist)
shuffle(tests_to_run)
not_found
|
= filter(not_on_disk, self.runlist)
# now let's move the tests that need a server either to the beginning or the end of the tests_to_run list
te
|
sts_with_server = list(TESTS_REQUIRING_SERVER)
for test in TESTS_REQUIRING_SERVER:
if test in tests_to_run:
tests_to_run.remove(test)
else:
tests_with_server.remove(test)
return {TO_RUN:tests_to_run, SKIP:local_skiplist, NOT_FOUND:not_found, WITH_SERVER:tests_with_server}
def test_file_with_keys(self, test_file):
boost_keys = " --report_format=xml --report_level=detailed --log_level=test_suite --log_format=xml " if self.boost_tests else ""
return "{test_file}{boost_keys}{data}{resources}".format(test_file=test_file, boost_keys=boost_keys, data=self.data_path, resources=self.user_resource_path)
def run_tests(self, tests_to_run):
failed = list()
passed = list()
for test_file in tests_to_run:
self.log_exec_file(test_file)
test_file_with_keys = self.test_file_with_keys(test_file)
logging.info(test_file_with_keys)
process = subprocess.Popen("{tests_path}/{test_file} 2>> {logfile}".
format(tests_path=self.workspace_path, test_file=test_file_with_keys, logfile=self.logfile),
shell=True,
stdout=subprocess.PIPE)
logging.info("Pid: {0}".format(process.pid))
process.wait()
if process.returncode > 0:
failed.append(test_file)
else:
passed.append(test_file)
self.log_exec_file(test_file, result=process.returncode)
return {FAILED: failed, PASSED: passed}
def log_exec_file(self, filename, result=None):
if self.boost_tests:
return
logstring = "BEGIN" if result is None else "END" #can be 0 or None. If we omit the explicit check for None, we get wrong result
resstring = (" | result: {returncode}".format(returncode=result) if result is not None else "")
with open(self.logfile, "a") as logf:
logf.write("\n{logstring}: {filename}{resstring}\n".format(logstring=logstring, filename=filename, resstring=resstring))
def rm_log_file(self):
try:
remove(self.logfile)
except OSError:
pass
def __init__(self):
self.set_global_vars()
self.rm_log_file()
def merge_dicts_of_lists(self, one, two):
if not one:
return two
if not two:
return one
ret = one.copy()
for key, value in two.iteritems():
if key in one:
ret[key] = ret[key].append(two[key])
else:
ret[key] = two[key]
return ret
def execute(self):
categorized_tests
|
Laurawly/tvm-1
|
python/tvm/relay/backend/contrib/ethosu/te/identity.py
|
Python
|
apache-2.0
| 2,862
| 0.001048
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# re
|
garding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
#
|
"License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,unused-argument
"""Tensor Expression for identity"""
from tvm import te
from .dma import read_compute, write_compute
def identity_compute(
ifm: te.Tensor,
lut: te.Tensor,
ifm_scale: float,
ifm_zero_point: int,
ofm_scale: float,
ofm_zero_point: int,
activation: str,
) -> te.Tensor:
"""A compute operator for the NPU identity operator.
Parameters
----------
ifm : te.Tensor
The Input Feature Map tensor (IFM).
lut : te.Tensor
The look-up table values to use if activation is "LUT", "TANH" or "SIGMOID".
ifm_scale : float
The quantization scale for the Input Feature Map tensor.
ifm_zero_point : int
The quantization zero point for the Input Feature Map tensor.
ofm_scale : float
The quantization scale for the Output Feature Map tensor.
ofm_zero_point : int
The quantization zero point for the Output Feature Map tensor.
activation : str
The activation function to use.
"NONE" - no activation function.
"TANH" - tanh activation function.
"SIGMOID" - sigmoid activation function.
"LUT" - use a look-up table to perform the activation function.
Returns
-------
te.Tensor
The Output Feature Map tensor.
"""
dmaed_ifm = read_compute(ifm, ifm_zero_point, ifm_scale)
id_attrs = {"op": "ethosu_identity", "activation": activation}
has_lut = activation in ("TANH", "LUT", "SIGMOID")
# This is a trick to insert the LUT tensor into the TE graph if LUT is present
lut_expr = (lut[0] + lut[255]).astype(ifm.dtype) if has_lut else 0
# Add the LUT tensor to the attributes to be able to later tell which tensor is the LUT
if has_lut:
id_attrs["lut"] = lut
identity = te.compute(
ifm.shape,
lambda *i: (dmaed_ifm(*i) + lut_expr).astype(ifm.dtype),
name="ethosu_identity",
attrs=id_attrs,
)
dmaed_ofm = write_compute(identity, ofm_zero_point, ofm_scale)
return dmaed_ofm
|
nasi/MyPy
|
MyPy/__init__.py
|
Python
|
bsd-3-clause
| 2,276
| 0.010984
|
import time
import datetime
from MyPy.core.exceptions import (
Warning, Error, InterfaceError, DatabaseError, DataError, OperationalError,
IntegrityError, InternalError, ProgrammingError, NotSupportedError
)
from MyPy.constants import fieldtypes
apilevel = '2.0'
threadsafety = 1
paramstyle = 'format'
def Connect(*args, **kwargs):
from MyPy.core.connection import Connection
return Connection(*args, **kwargs)
connect = Connection = Connect
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def DateFromTicks(ticks):
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6])
Binary = str
class DBAPITypeObject
|
:
def __init__(self, *values):
self.values = values
def __cmp__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
STRING = DBAPITypeObject(fieldtypes.FIELD_TYPE_ENUM, fieldtypes.FIELD_TYPE_STRING,
fieldtypes.FIELD_TYPE_VAR_STRING)
BINARY = DBAPITypeObject(fieldtypes.FIELD_TYPE_BLOB, fieldtypes.FIELD_TYPE_LONG_BLOB,
|
fieldtypes.FIELD_TYPE_MEDIUM_BLOB, fieldtypes.FIELD_TYPE_TINY_BLOB)
NUMBER = DBAPITypeObject(fieldtypes.FIELD_TYPE_DECIMAL, fieldtypes.FIELD_TYPE_DOUBLE,
fieldtypes.FIELD_TYPE_FLOAT, fieldtypes.FIELD_TYPE_INT24,
fieldtypes.FIELD_TYPE_LONG, fieldtypes.FIELD_TYPE_LONGLONG,
fieldtypes.FIELD_TYPE_TINY, fieldtypes.FIELD_TYPE_YEAR)
DATETIME = DBAPITypeObject(fieldtypes.FIELD_TYPE_DATETIME, fieldtypes.FIELD_TYPE_TIMESTAMP)
ROWID = DBAPITypeObject()
__all__ = [
'Connect', 'Connection', 'connect', 'apilevel', 'threadsafety', 'paramstyle',
'Error', 'Warning', 'InterfaceError', 'DatabaseError', 'DataError',
'OperationalError', 'IntegrityError', 'InternalError', 'ProgrammingError',
'NotSupportedError', 'Date', 'Time', 'Timestamp', 'Binary', 'DateFromTicks',
'DateFromTicks', 'TimestampFromTicks', 'STRING', 'BINARY', 'NUMBER',
'DATETIME', 'ROWID',
]
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-consumption/azure/mgmt/consumption/models/marketplace.py
|
Python
|
mit
| 7,474
| 0.000803
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class Marketplace(Resource):
"""An marketplace resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
:ivar billing_period_id: The id of the billing period resource that the
usage belongs to.
:vartype billing_period_id: str
:ivar usage_start: The start of the date time range covered by the usage
detail.
:vartype usage_start: datetime
:ivar usage_end: The end of the date time range covered by the usage
detail.
:vartype usage_end: datetime
:ivar resource_rate: The marketplace resource rate.
:vartype resource_rate: decimal.Decimal
:ivar offer_name: The type of offer.
:vartype offer_name: str
:ivar resource_group: The name of resource group.
:vartype resource_group: str
:ivar order_number: The order number.
:vartype order_number: str
:ivar instance_name: The name of the resource instance that the usage is
about.
:vartype instance_name: str
:ivar instance_id: The uri of the resource instance that the usage is
about.
:vartype instance_id: str
:ivar currency: The ISO currency in which the meter is charged, for
example, USD.
:vartype currency: str
:ivar consumed_quantity: The quantity of usage.
:vartype consumed_quantity: decimal.Decimal
:ivar unit_of_measure: The unit of measure.
:vartype unit_of_measure: str
:ivar pretax_cost: The amount of cost before tax.
:vartype pretax_cost: decimal.Decimal
:ivar is_estimated: The estimated usage is subject to change.
:vartype is_estimated: bool
:ivar meter_id: The meter id.
:vartype meter_id: str
:ivar subscription_guid: Subscription guid.
:vartype subscription_guid: str
:ivar subscription_name: Subscription name.
:vartype subscription_name: str
:ivar account_name: Account name.
:vartype account_name: str
:ivar department_name: Department name.
:vartype department_name: str
:ivar consumed_service: Consumed service name.
:vartype consumed_service: str
:ivar cost_center: The cost center of this department if it is a
department and a costcenter exists
:vartype cost_center: str
:ivar additional_properties: Additional details of this usage item. By
default this is not populated, unless it's specified in $expand.
:vartype additional_properties: str
:ivar publisher_name: The name of publisher.
:vartype publisher_name: str
:ivar plan_name: The name of plan.
:vartype plan_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'tags': {'readonly': True},
'billing_period_id': {'readonly': True},
'usage_start': {'readonly': True},
'usage_end': {'readonly': True},
'resource_rate': {'readonly': True},
'offer_name': {'readonly': True},
'resource_group': {'readonly': True},
'order_number': {'readonly': True},
'instance_name': {'readonly': True},
'instance_id': {'readonly': True},
'currency': {'readonly': True},
'consumed_quantity': {'readonly': True},
'unit_of_measure': {'readonly': True},
'pretax_cost': {'readonly': True},
'is_estimated': {'readonly': True},
'meter_id': {'readonly': True},
'subscription_guid': {'readonly': True},
'subscription_name': {'readonly': True},
'account_name': {'readonly': True},
'department_name': {'readonly': True},
'consumed_service': {'readonly': True},
'cost_center': {'readonly': True},
'additional_properties': {'readonly': True},
'publisher_name': {'readonly': True},
'plan_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'billing_period_id': {'key': 'properties.billingPeriodId', 'type': 'str'},
'usage_start': {'key': 'properties.usageStart', 'type': 'iso-8601'},
'usage_end': {'key': 'properties.usageEnd', 'type': 'iso-8601'},
'resource_rate': {'key': 'properties.resourceRate', 'type': 'decimal
|
'},
'offer_name': {'key': 'properties.offerName', 'type': 'str'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'order_number': {'key': 'properties.orderNumber', 'type': 'str'},
'instance_name': {'key': 'properties.instanceName', 'type': 'str'},
'instance_id': {'key': 'properties.instanceId', 'type': 'str'},
'currency': {'key': 'prop
|
erties.currency', 'type': 'str'},
'consumed_quantity': {'key': 'properties.consumedQuantity', 'type': 'decimal'},
'unit_of_measure': {'key': 'properties.unitOfMeasure', 'type': 'str'},
'pretax_cost': {'key': 'properties.pretaxCost', 'type': 'decimal'},
'is_estimated': {'key': 'properties.isEstimated', 'type': 'bool'},
'meter_id': {'key': 'properties.meterId', 'type': 'str'},
'subscription_guid': {'key': 'properties.subscriptionGuid', 'type': 'str'},
'subscription_name': {'key': 'properties.subscriptionName', 'type': 'str'},
'account_name': {'key': 'properties.accountName', 'type': 'str'},
'department_name': {'key': 'properties.departmentName', 'type': 'str'},
'consumed_service': {'key': 'properties.consumedService', 'type': 'str'},
'cost_center': {'key': 'properties.costCenter', 'type': 'str'},
'additional_properties': {'key': 'properties.additionalProperties', 'type': 'str'},
'publisher_name': {'key': 'properties.publisherName', 'type': 'str'},
'plan_name': {'key': 'properties.planName', 'type': 'str'},
}
def __init__(self):
super(Marketplace, self).__init__()
self.billing_period_id = None
self.usage_start = None
self.usage_end = None
self.resource_rate = None
self.offer_name = None
self.resource_group = None
self.order_number = None
self.instance_name = None
self.instance_id = None
self.currency = None
self.consumed_quantity = None
self.unit_of_measure = None
self.pretax_cost = None
self.is_estimated = None
self.meter_id = None
self.subscription_guid = None
self.subscription_name = None
self.account_name = None
self.department_name = None
self.consumed_service = None
self.cost_center = None
self.additional_properties = None
self.publisher_name = None
self.plan_name = None
|
shownotes/snotes20-restapi
|
statistic/admin.py
|
Python
|
agpl-3.0
| 91
| 0.010989
|
from django.contrib import admin
# Register
|
your models here.
from statistic
|
import models
|
Orav/kbengine
|
kbe/src/lib/python/Lib/unittest/test/testmock/__main__.py
|
Python
|
lgpl-3.0
| 641
| 0.00156
|
import os
import unittest
def load_tests(loader, standard_tests, pattern):
# top level directory cached on loader instance
this_dir = os.path.dirname(__file__)
pattern = pattern or "
|
test*.py"
# We are inside unittest.test.testmock, so the top-level is three notches up
top_level_dir = os.path.dirname(os.path.dirname(os.path.dirname(this_dir)))
package_tests = loader.discover(start_dir=this_dir, pattern=pattern,
|
top_level_dir=top_level_dir)
standard_tests.addTests(package_tests)
return standard_tests
if __name__ == '__main__':
unittest.main()
|
VirusTotal/content
|
Packs/CofenseTriage/Scripts/CofenseTriageThreatEnrichment/CofenseTriageThreatEnrichment.py
|
Python
|
mit
| 1,197
| 0.002506
|
from CommonServerPython import *
'''
|
STANDALONE FUNCTION '''
def get_threat_indicator_list(args: Dict[str, Any]) -> list:
"""
Executes cofense-threat-indicator-list command for given arguments.
|
:type args: ``Dict[str, Any]``
:param args: The script arguments provided by the user.
:return: List of responses.
:rtype: ``list``
"""
# Fetch threat indicators based on threat value provided in the argument.
# cofense-threat-indicator-list command will enrich the information based on value.
threat_indicator = execute_command('cofense-threat-indicator-list',
{'threat_value': f"{args.get('threat_value')}"},
extract_contents=False)
# Populate response
return threat_indicator
''' MAIN FUNCTION '''
def main():
try:
return_results(get_threat_indicator_list(demisto.args()))
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute CofenseTriageThreatEnrichment. Error: {str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
|
jeffames-cs/nnot
|
pyfann/libfann.py
|
Python
|
mit
| 29,340
| 0.00426
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_libfann', [dirname(__file__)])
except ImportError:
import _libfann
return _libfann
if fp is not None:
try:
_mod = imp.load_module('_libfann', fp, pathname, description)
finally:
fp.close()
return _mod
_libfann = swig_import_helper()
del swig_import_helper
else:
import _libfann
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _libfann.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _libfann.SwigPyIterator_value(self)
def incr(self, n=1):
return _libfann.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _libfann.SwigPyIterator_decr(self, n)
def distance(self, x):
return _libfann.SwigPyIterator_distance(self, x)
def equal(self, x):
return _libfann.SwigPyIterator_equal(self, x)
def copy(self):
return _libfann.SwigPyIterator_copy(self)
def next(self):
return _libfann.SwigPyIterator_next(self)
def __next__(self):
return _libfann.SwigPyIterator___next__(self)
def previous(self):
return _libfann.SwigPyIterator_previous(self)
def advance(self, n):
return _libfann.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _libfann.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _libfann.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _libfann.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _libfann.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _libfann.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _libfann.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _libfann.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
_libfann.ERRORFUNC_LINEAR_swigconstant(_libfann)
ERRORFUNC_LINEAR = _libfann.ERRORFUNC_LINEAR
_libfann.ERRORFUNC_TANH_swigconstant(_libfann)
ERRORFUNC_TANH = _libfann.ERRORFUNC_TANH
_libfann.STOPFUNC_MSE_swigconstant(_libfann)
STOPFUNC_MSE = _libfann.STOPFUNC_MSE
_libfann.STOPFUNC_BIT_swigconstant(_libfann)
STOPFUNC_BIT = _libfann.STOPFUNC_BIT
_libfann.TRAIN_INCREMENTAL_swigconstant(_libfann)
TRAIN_INCREMENTAL = _libfann.TRAIN_INCREMENTAL
_libfann.TRAIN_BATCH_swigconstant(_libfann)
TRAIN_BATCH = _libfann.TRAIN_BATCH
_libfann.TRAIN_RPROP_swigconstant(_libfann)
TRAIN_RPROP = _libfann.TRAIN_RPROP
_libfann.TRAIN_QUICKPROP_swigconstant(_libfann)
TRAIN_QUICKPROP = _libfann.TRAIN_QUICKPROP
_libfann.LINEAR_swigconstant(_libfann)
LIN
|
EAR = _libfann.LINEAR
_libfann.THRESHOLD_swigconstant(_libfann)
THRESHOLD = _libfann.THRESHOLD
_libfann.THRESHOLD_SYMMETRIC_swigconstant(_libfann)
THRESHOLD_SYMMETRIC = _libfann.THRESHOLD_SYMMETRIC
_libfann.SIGMOID_swigconstant(_libfa
|
nn)
SIGMOID = _libfann.SIGMOID
_libfann.SIGMOID_STEPWISE_swigconstant(_libfann)
SIGMOID_STEPWISE = _libfann.SIGMOID_STEPWISE
_libfann.SIGMOID_SYMMETRIC_swigconstant(_libfann)
SIGMOID_SYMMETRIC = _libfann.SIGMOID_SYMMETRIC
_libfann.SIGMOID_SYMMETRIC_STEPWISE_swigconstant(_libfann)
SIGMOID_SYMMETRIC_STEPWISE = _libfann.SIGMOID_SYMMETRIC_STEPWISE
_libfann.GAUSSIAN_swigconstant(_libfann)
GAUSSIAN = _libfann.GAUSSIAN
_libfann.GAUSSIAN_SYMMETRIC_swigconstant(_libfann)
GAUSSIAN_SYMMETRIC = _libfann.GAUSSIAN_SYMMETRIC
_libfann.GAUSSIAN_STEPWISE_swigconstant(_libfann)
GAUSSIAN_STEPWISE = _libfann.GAUSSIAN_STEPWISE
_libfann.ELLIOT_swigconstant(_libfann)
ELLIOT = _libfann.ELLIOT
_libfann.ELLIOT_SYMMETRIC_swigconstant(_libfann)
ELLIOT_SYMMETRIC = _libfann.ELLIOT_SYMMETRIC
_libfann.LINEAR_PIECE_swigconstant(_libfann)
LINEAR_PIECE = _libfann.LINEAR_PIECE
_libfann.LINEAR_PIECE_SYMMETRIC_swigconstant(_libfann)
LINEAR_PIECE_SYMMETRIC = _libfann.LINEAR_PIECE_SYMMETRIC
_libfann.SIN_SYMMETRIC_swigconstant(_libfann)
SIN_SYMMETRIC = _libfann.SIN_SYMMETRIC
_libfann.COS_SYMMETRIC_swigconstant(_libfann)
COS_SYMMETRIC = _libfann.COS_SYMMETRIC
_libfann.LAYER_swigconstant(_libfann)
LAYER = _libfann.LAYER
_libfann.SHORTCUT_swigconstant(_libfann)
SHORTCUT = _libfann.SHORTCUT
class training_data_parent(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, training_data_parent, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, training_data_parent, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _libfann.new_training_data_parent(*args)
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _libfann.delete_training_data_parent
__del__ = lambda self: None
def destroy_train(self):
return _libfann.training_data_parent_destroy_train(self)
def read_train_from_file(self, filename):
return _libfann.training_data_parent_read_train_from_file(self, filename)
def save_train(self, filename):
return _libfann.training_data_parent_save_train(self, filename)
def save_train_to_fixed(self, filename, decimal_point):
return _libfann.training_data_parent_save_train_to_fixed(self, filename, decimal_point)
def shuffle_train_data(self):
return _libfann.training_data_parent_shuffle_train_data(self)
def merge_train_data(self, data):
return _libfann.training_data_parent_merge_train_data(self, data)
def length_train_data(self):
return _libfann.training_data_parent_length_train_data(self)
def num_input_train_data(self):
return _libfann.training_data_parent_num_input_train_data(self)
def num_output_train
|
azaghal/ansible
|
test/lib/ansible_test/_internal/coverage/analyze/targets/expand.py
|
Python
|
gpl-3.0
| 1,272
| 0.001572
|
"""Expand target names in an aggregated coverage file."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from .... import types as t
from ....io import (
SortedSetEncoder,
write_json_file,
)
from . import (
CoverageAnalyzeTargetsConfig,
expand_indexes,
format_arc,
read_report,
)
class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets expand` command."""
def __init__(self, args): # type: (t.Any) -> None
super(CoverageAnalyzeTargetsExpandConfig, self).__init__(args)
self.input_file = args.input_file # type: str
self.output_file = args.output_file # type: str
def command_coverage_analyze_
|
targets_expand(args): # type: (CoverageAnalyzeTargetsExpandConfig) -> None
"""Expand target names in an aggregated coverage file."""
covered_targets, covered_path_arcs, covered_path_lines = read_report(args.input_file)
report = dict(
|
arcs=expand_indexes(covered_path_arcs, covered_targets, format_arc),
lines=expand_indexes(covered_path_lines, covered_targets, str),
)
if not args.explain:
write_json_file(args.output_file, report, encoder=SortedSetEncoder)
|
btouchard/piserver
|
src/piserver.py
|
Python
|
gpl-3.0
| 320
| 0.009404
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import os, locale, sys
from core import controller
locale.setlocale(locale.LC_ALL, 'fr_FR.UTF-8')
debug = False
if len(sys.argv) > 1: debug = sys.argv[1] == '-d'
# Initialisation du controller principale
ctrl
|
= controller.Cont
|
roller(debug)
# démarrage du serveur
ctrl.run()
|
dios-game/dios-cocos
|
src/oslibs/cocos/cocos-src/tools/cocos2d-console/plugins/project_compile/build_android.py
|
Python
|
mit
| 28,191
| 0.003689
|
#!/usr/bin/python
# build_native.py
# Build native codes
import sys
import os, os.path
import shutil
from optparse import OptionParser
import cocos
from MultiLanguage import MultiLanguage
import cocos_project
import json
import re
from xml.dom import minidom
import project_compile
BUILD_CFIG_FILE="build-cfg.json"
class AndroidBuilder(object):
CFG_KEY_COPY_TO_ASSETS = "copy_to_assets"
CFG_KEY_MUST_COPY_TO_ASSERTS = "must_copy_to_assets"
CFG_KEY_STORE = "key_store"
CFG_KEY_STORE_PASS = "key_store_pass"
CFG_KEY_ALIAS = "alias"
CFG_KEY_ALIAS_PASS = "alias_pass"
ANT_KEY_STORE = "key.store"
ANT_KEY_ALIAS = "key.alias"
ANT_KEY_STORE_PASS = "key.store.password"
ANT_KEY_ALIAS_PASS = "key.alias.password"
GRADLE_KEY_STORE = "RELEASE_STORE_FILE"
GRADLE_KEY_ALIAS = "RELEASE_KEY_ALIAS"
GRADLE_KEY_STORE_PASS = "RELEASE_STORE_PASSWORD"
GRADLE_KEY_ALIAS_PASS = "RELEASE_KEY_PASSWORD"
def __init__(self, verbose, app_android_root, no_res, proj_obj, use_studio=False):
self._verbose = verbose
self.app_androi
|
d_root = app_android_root
self._no_res = no_res
self._project = proj_obj
self.use_studio = use_studio
# check environment variable
if self.use_studio:
|
self.ant_root = None
self.sign_prop_file = os.path.join(self.app_android_root, 'app', "gradle.properties")
else:
self.ant_root = cocos.check_environment_variable('ANT_ROOT')
self.sign_prop_file = os.path.join(self.app_android_root, "ant.properties")
self.sdk_root = cocos.check_environment_variable('ANDROID_SDK_ROOT')
self._parse_cfg()
def _run_cmd(self, command, cwd=None):
cocos.CMDRunner.run_cmd(command, self._verbose, cwd=cwd)
def _parse_cfg(self):
self.cfg_path = os.path.join(self.app_android_root, BUILD_CFIG_FILE)
try:
f = open(self.cfg_path)
cfg = json.load(f, encoding='utf8')
f.close()
except Exception:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_PARSE_CFG_FAILED_FMT', self.cfg_path),
cocos.CCPluginError.ERROR_PARSE_FILE)
if cfg.has_key(project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES):
if self._no_res:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES]
else:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES] + cfg[project_compile.CCPluginCompile.CFG_KEY_COPY_RESOURCES]
else:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_COPY_RESOURCES]
self.ndk_module_paths = cfg['ndk_module_path']
# get the properties for sign release apk
if self.use_studio:
self.key_store_str = AndroidBuilder.GRADLE_KEY_STORE
self.key_alias_str = AndroidBuilder.GRADLE_KEY_ALIAS
self.key_store_pass_str = AndroidBuilder.GRADLE_KEY_STORE_PASS
self.key_alias_pass_str = AndroidBuilder.GRADLE_KEY_ALIAS_PASS
else:
self.key_store_str = AndroidBuilder.ANT_KEY_STORE
self.key_alias_str = AndroidBuilder.ANT_KEY_ALIAS
self.key_store_pass_str = AndroidBuilder.ANT_KEY_STORE_PASS
self.key_alias_pass_str = AndroidBuilder.ANT_KEY_ALIAS_PASS
move_cfg = {}
self.key_store = None
if cfg.has_key(AndroidBuilder.CFG_KEY_STORE):
self.key_store = cfg[AndroidBuilder.CFG_KEY_STORE]
move_cfg[self.key_store_str] = self.key_store
del cfg[AndroidBuilder.CFG_KEY_STORE]
self.key_store_pass = None
if cfg.has_key(AndroidBuilder.CFG_KEY_STORE_PASS):
self.key_store_pass = cfg[AndroidBuilder.CFG_KEY_STORE_PASS]
move_cfg[self.key_store_pass_str] = self.key_store_pass
del cfg[AndroidBuilder.CFG_KEY_STORE_PASS]
self.alias = None
if cfg.has_key(AndroidBuilder.CFG_KEY_ALIAS):
self.alias = cfg[AndroidBuilder.CFG_KEY_ALIAS]
move_cfg[self.key_alias_str] = self.alias
del cfg[AndroidBuilder.CFG_KEY_ALIAS]
self.alias_pass = None
if cfg.has_key(AndroidBuilder.CFG_KEY_ALIAS_PASS):
self.alias_pass = cfg[AndroidBuilder.CFG_KEY_ALIAS_PASS]
move_cfg[self.key_alias_pass_str] = self.alias_pass
del cfg[AndroidBuilder.CFG_KEY_ALIAS_PASS]
if len(move_cfg) > 0:
# move the config into ant.properties
self._move_cfg(move_cfg)
with open(self.cfg_path, 'w') as outfile:
json.dump(cfg, outfile, sort_keys = True, indent = 4)
outfile.close()
def has_keystore_in_signprops(self):
keystore = None
if self.use_studio:
pattern = re.compile(r"^RELEASE_STORE_FILE=(.+)")
else:
pattern = re.compile(r"^key\.store=(.+)")
try:
file_obj = open(self.sign_prop_file)
for line in file_obj:
str1 = line.replace(' ', '')
str2 = str1.replace('\t', '')
match = pattern.match(str2)
if match is not None:
keystore = match.group(1)
break
file_obj.close()
except:
pass
if keystore is None:
return False
else:
return True
def _write_sign_properties(self, cfg):
file_obj = open(self.sign_prop_file, "a+")
for key in cfg.keys():
str_cfg = "%s=%s\n" % (key, cfg[key])
file_obj.write(str_cfg)
file_obj.close()
def _move_cfg(self, cfg):
if not self.has_keystore_in_signprops():
self._write_sign_properties(cfg)
def remove_c_libs(self, libs_dir):
for file_name in os.listdir(libs_dir):
lib_file = os.path.join(libs_dir, file_name)
if os.path.isfile(lib_file):
ext = os.path.splitext(lib_file)[1]
if ext == ".a" or ext == ".so":
os.remove(lib_file)
def update_project(self, android_platform):
if self.use_studio:
manifest_path = os.path.join(self.app_android_root, 'app')
else:
manifest_path = self.app_android_root
sdk_tool_path = os.path.join(self.sdk_root, "tools", "android")
# check the android platform
target_str = self.check_android_platform(self.sdk_root, android_platform, manifest_path, False)
# update project
command = "%s update project -t %s -p %s" % (cocos.CMDRunner.convert_path_to_cmd(sdk_tool_path), target_str, manifest_path)
self._run_cmd(command)
# update lib-projects
property_path = manifest_path
self.update_lib_projects(self.sdk_root, sdk_tool_path, android_platform, property_path)
if self.use_studio:
# copy the local.properties to the app_android_root
file_name = 'local.properties'
src_path = os.path.normpath(os.path.join(manifest_path, file_name))
dst_path = os.path.normpath(os.path.join(self.app_android_root, file_name))
if src_path != dst_path:
if os.path.isfile(dst_path):
os.remove(dst_path)
shutil.copy(src_path, dst_path)
def get_toolchain_version(self, ndk_root, compile_obj):
ret_version = "4.8"
version_file_path = os.path.join(ndk_root, "RELEASE.TXT")
try:
versionFile = open(version_file_path)
lines = versionFile.readlines()
versionFile.close()
version_num = None
version_char = None
pattern = r'^[a-zA-Z]+(\d+)(\w)'
for line in lines:
str_line = line.lstrip()
match = re.match(pattern, str_line)
if match:
version_num = int(match.group(1))
version_char = match.group(2)
break
i
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.