repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
MobileCloudNetworking/dssaas | dss_sm_so/sm/sm-0.3/sm/so_manager.py | Python | apache-2.0 | 24,446 | 0.002332 | # Copyright 2014-2015 Zuercher Hochschule fuer Angewandte Wissenschaften
# Copyright (c) 2013-2015, Intel Performance Learning Solutions Ltd, Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__author__ = 'andy'
from distutils import dir_util
import json
from mako.template import Template
import os
import random
import shutil
import tempfile
from threading import Thread
from urlparse import urlparse
import uuid
from occi.core_model import Resource, Link
from sm.config import CONFIG
from sm.log import LOG
from sm.retry_http import http_retriable_request
HTTP = 'http://'
WAIT = int(CONFIG.get('cloud_controller', 'wait_time', 2000))
ATTEMPTS = int(CONFIG.get('cloud_controller', 'max_attempts', 5))
class ServiceParameters():
def __init__(self):
self.service_params = {}
service_params_file_path = CONFIG.get('service_manager', 'service_params', '')
if len(service_params_file_path) > 0:
try:
with open(service_params_file_path) as svc_params_content:
self.service_params = json.load(svc_params_content)
svc_params_content.close()
except ValueError as e:
LOG.error("Invalid JSON sent as service config file")
except IOError as e:
LOG.error('Cannot find the specified parameters file: ' + service_params_file_path)
else:
LOG.warn("No service parameters file found in config file, setting internal params to empty.")
def service_parameters(self, state='', content_type='text/occi'):
# takes the internal parameters defined for the lifecycle phase...
# and combines them with the client supplied parameters
if content_type == 'text/occi':
params = []
# get the state specific internal parameters
try:
params = self.service_params[state]
except KeyError as err:
LOG.warn('The requested states parameters are not available: "' + state + '"')
# get the client supplied parameters if any
try:
for p in self.service_params['client_params']:
params.append(p)
except KeyError as err:
LOG.info('No client params')
header = ''
for param in params:
if param['type'] == 'string':
value = '"' + param['value'] + '"'
else:
value = str(param['value'])
header = header + param['name'] + '=' + value + ', '
return header[0:-2]
else:
LOG.error('Content type not supported: ' + content_type)
def add_client_params(self, params={}):
# adds user supplied parameters from the instantiation request of a service
client_params = []
for k, v in params.items():
param_type = 'number'
if (v.startswith('"') or v.startswith('\'')) and (v.endswith('"') or v.endswith('\'')):
param_type = 'string'
v = v[1:-1]
param = {'name': k, 'value': v, 'type': param_type}
client_params.append(param)
self.service_params['client_params'] = client_params
if __name__ == '__main__':
sp = ServiceParameters()
cp = {
'test': '1',
'test.test': '"astring"'
}
sp.add_client_params(cp)
p = sp.service_parameters('initialise')
print p
class AsychExe(Thread):
"""
Only purpose of this thread is to execute a list of tasks sequentially
as a background "thread".
"""
def __init__(self, tasks, registry=None):
super(AsychExe, self).__init__()
self.registry = registry
self.tasks = tasks
def run(self):
super(AsychExe, self).run()
LOG.debug('Starting AsychExe thread')
for task in self.tasks:
entity, extras = task.run()
if self.registry:
LOG.debug('Updating entity in registry')
self.registry.add_resource(key=entity.identifier, resource=entity, extras=extras)
# XXX push common functionality here
class Task():
def __init__(self, entity, extras, state):
self.entity = entity
self.extras = extras
self.state = state
def run(self):
raise NotImplemented()
class InitSO(Task):
def __init__(self, entity, extras):
Task.__init__(self, entity, extras, state='initialise')
self.nburl = CONFIG.get('cloud_controller', 'nb_api', '')
if self.nburl[-1] == '/':
self.nburl = self.nburl[0:-1]
LOG.info('CloudController Northbound API: ' + self.nburl)
if len(entity.attributes) > 0:
LOG.info('Client supplied parameters: ' + entity.attributes.__repr__())
#TODO check that these parameters are valid according to the kind specification
self.extras['srv_prms'].add_client_params(entity.attributes)
else:
LOG.warn('No client supplied parameters.')
def run(self):
self.entity.attributes['mcn.service.state'] = 'initialise'
LOG.debug('Ensuring SM SSH Key...')
self.__ensure_ssh_key()
# create an app for the new SO instance
LOG.debug('Creating SO container...')
if not self.entity.extras:
self.entity.extras = {}
self.entity.extras['repo_uri'] = self.__create_app()
return self.entity, self.extras
def __create_app(self):
# name must be A-Za-z0-9 and <=32 chars
app_name = self.enti | ty.kind.term[0:4] + 'srvinst' + ''.join(random.choice('0123456789ABCDEF') for i in range(16))
heads = {
'Content-Type': 'text/occi',
'Category': 'app; scheme="http://schemas.ogf.org/occi/platform#", '
'python-2.7; scheme="http://schemas.openshift.com/template/app#", '
'small; scheme="http://schemas.openshift.com/template/app#"',
'X-OCCI-Attribute': 'occi.app.name=' | + app_name
}
url = self.nburl + '/app/'
LOG.debug('Requesting container to execute SO Bundle: ' + url)
LOG.info('Sending headers: ' + heads.__repr__())
r = http_retriable_request('POST', url, headers=heads, authenticate=True)
loc = r.headers.get('Location', '')
if loc == '':
raise AttributeError("No OCCI Location attribute found in request")
app_uri_path = urlparse(loc).path
LOG.debug('SO container created: ' + app_uri_path)
LOG.debug('Updating OCCI entity.identifier from: ' + self.entity.identifier + ' to: '
+ app_uri_path.replace('/app/', self.entity.kind.location))
self.entity.identifier = app_uri_path.replace('/app/', self.entity.kind.location)
LOG.debug('Setting occi.core.id to: ' + app_uri_path.replace('/app/', ''))
self.entity.attributes['occi.core.id'] = app_uri_path.replace('/app/', '')
# get git uri. this is where our bundle is pushed to
return self.__git_uri(app_uri_path)
def __git_uri(self, app_uri_path):
url = self.nburl + app_uri_path
headers = {'Accept': 'text/occi'}
LOG.debug('Requesting container\'s git URL ' + url)
LOG.info('Sending headers: ' + headers.__repr__())
r = http_retriable_request('GET', url, headers=headers, authenticate=True)
attrs = r.headers.get('X-OCCI-Attribute', '')
if attrs == '':
raise AttributeError("No occi attributes found in request")
repo_uri = ''
for attr in attr |
openjck/kuma | kuma/wiki/middleware.py | Python | mpl-2.0 | 2,332 | 0 | from django.http import HttpResponseRedirect
from django.shortcuts import render
from kuma.core.utils import urlparams
from .exceptions import ReadOnlyException
f | rom .jobs | import DocumentZoneURLRemapsJob
class ReadOnlyMiddleware(object):
"""
Renders a 403.html page with a flag for a specific message.
"""
def process_exception(self, request, exception):
if isinstance(exception, ReadOnlyException):
context = {'reason': exception.args[0]}
return render(request, '403.html', context, status=403)
return None
class DocumentZoneMiddleware(object):
"""
For document zones with specified URL roots, this middleware modifies the
incoming path_info to point at the internal wiki path
"""
def process_request(self, request):
# https://bugzil.la/1189222
# Don't redirect POST $subscribe requests to GET zone url
if request.method == 'POST' and '$subscribe' in request.path:
return None
remaps = DocumentZoneURLRemapsJob().get(request.LANGUAGE_CODE)
for original_path, new_path in remaps:
if (
request.path_info == original_path or
request.path_info.startswith(u''.join([original_path, '/']))
):
# Is this a request for the "original" wiki path? Redirect to
# new URL root, if so.
new_path = request.path_info.replace(original_path,
new_path,
1)
new_path = '/%s%s' % (request.LANGUAGE_CODE, new_path)
query = request.GET.copy()
if 'lang' in query:
query.pop('lang')
new_path = urlparams(new_path, query_dict=query)
return HttpResponseRedirect(new_path)
elif request.path_info.startswith(new_path):
# Is this a request for the relocated wiki path? If so, rewrite
# the path as a request for the proper wiki view.
request.path_info = request.path_info.replace(new_path,
original_path,
1)
break
|
cjaymes/expatriate | src/expatriate/model/xs/DayTimeDurationType.py | Python | lgpl-3.0 | 1,473 | 0.002037 | # Copyright 2016 Casey Jaymes
# This file is part of Expatriate.
#
# Expatriate is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version | 3 of the License, or
# (at your option) any later version.
| #
# Expatriate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Expatriate. If not, see <http://www.gnu.org/licenses/>.
import logging
import re
from ..decorators import *
from .DurationType import DurationType
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class DayTimeDurationType(DurationType):
def parse_value(self, value):
m = re.fullmatch(r'-?P(\d+D)?(T(\d+H)?(\d+M)?(\d+(\.\d+)?S)?)?', value)
if not m or not re.fullmatch(r'.*[DHMS].*', value) or not re.fullmatch(r'.*[^T]', value):
raise ValueError('Unable to parse xs:DayTimeDurationType value')
return super().parse_value(value)
def produce_value(self, value):
months, seconds = value
if months != 0:
raise ValueError('xs:DayTimeDurationType requires 0 for months value')
return super().produce_value(value)
|
im0rtel/OpenBazaar | node/constants.py | Python | mit | 1,678 | 0.001192 | # ######## KADEMLIA CONSTANTS ###########
BIT_NODE_ID_LEN = 160
HEX_NODE_ID_LEN = BIT_NODE_ID_LEN // 4
# Small number representing the degree of
# parallelism in network calls
ALPHA = 3
# Maximum number of contacts stored in a bucket
# NOTE: Should be an even number.
K = 8 # pylint: disable=invalid-name
# Maximum number of contacts stored in the
# replacement cache of a bucket
# NOTE: Should be an even number.
CACHE_K = 32
# Timeout for network operations
# [seconds]
RPC_TIMEOUT = 0.1
# Delay between iterations of iterative node lookups
# (for loose parallelism)
# [seconds]
ITERAT | IVE_LOOKUP_DELAY = RPC_TIMEOUT / 2
# If a KBucket has not been used for this amount of time, refresh it.
# [seconds]
REFRESH_TIMEOUT = 60 * 60 * 1000 # 1 hour
# The interval at which nodes replicate (republish/refresh)
# the data they hold
# [seconds]
REPLICATE_INTERVAL = REFRESH_TIMEOUT
# The time it takes for data to expire in the network;
# the original publisher of the data will also republish
# the data at this time if it is still valid
# [seconds]
DATE_EXPIRE | _TIMEOUT = 86400 # 24 hours
# ####### IMPLEMENTATION-SPECIFIC CONSTANTS ###########
# The interval in which the node should check whether any buckets
# need refreshing or whether any data needs to be republished
# [seconds]
CHECK_REFRESH_INTERVAL = REFRESH_TIMEOUT / 5
# Max size of a single UDP datagram.
# Any larger message will be spread accross several UDP packets.
# [bytes]
UDP_DATAGRAM_MAX_SIZE = 8192 # 8 KB
DB_PATH = "db/ob.db"
VERSION = "0.3.1"
SATOSHIS_IN_BITCOIN = 100000000
# The IP of the default DNSChain Server used to validate namecoin addresses
DNSCHAIN_SERVER_IP = "192.184.93.146"
|
edineicolli/daruma-exemplo-python | scripts/fiscal/ui_fiscal_retornosstatusecf.py | Python | gpl-2.0 | 31,540 | 0.006603 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_fiscal_retornosstatusecf.ui'
#
# Created: Mon Nov 24 22:26:20 2014
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_ui_FISCAL_RetornosStatusECF(QtGui.QMainWindow):
def __init__(self):
super(Ui_ui_FISCAL_RetornosStatusECF, self).__init__()
self.setupUi(self)
self.actionM_todo_rRetornarInformacao_ECF_Daruma.triggered.connect(self.on_rRetornarInformacao_ECF_Daruma_triggered)
self.actionM_todo_rRetornarInformacaoSeparador_ECF_Daruma.triggered.connect(self.on_rRetornarInformacaoSeparador_ECF_Daruma_triggered)
self.actionM_todo_rLerAliquotas_ECF_Daruma.triggered.connect(self.on_rLerAliquotas_ECF_Daruma_triggered)
self.actionM_todo_rLerMeiosPagto_ECF_Daruma.triggered.connect(self.on_rLerMeiosPagto_ECF_Daruma_triggered)
self.actionM_todo_rLerRG_ECF_Daruma.triggered.connect(self.on_rLerRG_ECF_Daruma_triggered)
self.actionM_todo_rLerCNF_ECF_Daruma.triggered.connect(self.on_rLerCNF_ECF_Daruma_triggered)
self.actionM_todo_rLerDecimais_ECF_Daruma.triggered.connect(self.on_rLerDecimais_ECF_Daruma_triggered)
self.actionM_todo_rLerDecimaisInt_ECF_Daruma.triggered.connect(self.on_rLerDecimaisInt_ECF_Daruma_triggered)
self.actionM_todo_rLerDecimaisStr_ECF_Daruma.triggered.connect(self.on_rLerDecimaisStr_ECF_Daruma_triggered)
self.actionM_todo_rDataHoraImpressora_ECF_Daruma.triggered.connect(self.on_rDataHoraImpressora_ECF_Daruma_triggered)
self.actionM_todo_rVerificaImpressoraLigada_ECF_Daruma.triggered.connect(self.on_rVerificaImpressoraLigada_ECF_Daruma_triggered)
self.actionM_todo_rVerificarReducaoZ_ECF_Daruma.triggered.connect(self.on_rVerificarReducaoZ_ECF_Daruma_triggered)
self.actionM_todo_rRetornarDadosReducaoZ_ECF_Daruma.triggered.connect(self.on_rRetornarDadosReducaoZ_ECF_Daruma_triggered)
self.actionM_todo_rTipoUltimoDocumentoInt_ECF_Daruma.triggered.connect(self.on_rTipoUltimoDocumentoInt_ECF_Daruma_triggered)
self.actionM_todo_rTipoUltimoDocumentoStr_ECF_Daruma.triggered.connect(self.on_rTipoUltimoDocumentoStr_ECF_Daruma_triggered)
self.actionM_todo_rUltimoCMDEnviado_ECF_Daruma.triggered.connect(self.on_rUltimoCMDEnviado_ECF_Daruma_triggered)
self.actionM_todo_rMinasLegal_ECF_Daruma.triggered.connect(self.on_rMinasLegal_ECF_Daruma_triggered)
self.actionM_todo_rCFSaldoAPagar_ECF_Daruma.triggered.connect(self.on_rCFSaldoAPagar_ECF_Daruma_triggered)
self.actionM_todo_rCFSubTotal_ECF_Daruma.triggered.connect(self.on_rCFSubTotal_ECF_Daruma_triggered)
self.actionM_todo_rCFVerificarStatus_ECF_Daruma.triggered.connect(self.on_rCFVerificarStatus_ECF_Daruma_triggered)
self.actionM_todo_rStatusImpr | essora_ECF_Daruma.triggered.connect(self.on_rStatusImpressora_ECF_Daruma_triggered)
self.actionM_todo_rStatusImpressoraBinario_ECF_Daruma.triggered.connect(self.on_rStatusImpressoraBinario_ECF_Daruma_triggered)
self.actionM_todo_rConsultaStatusImpressoraStr_ECF_Daruma.triggered.connect(self.on_rConsultaStatusImpressoraStr_ECF_Daruma_triggered)
self.actionM_todo_rConsultaStatusImpressoraInt_ECF_Daruma.triggered.connect(sel | f.on_rConsultaStatusImpressoraInt_ECF_Daruma_triggered)
self.actionM_todo_rStatusUltimoCmd_ECF_Daruma.triggered.connect(self.on_rStatusUltimoCmd_ECF_Daruma_triggered)
self.actionM_todo_rStatusUltimoCmdInt_ECF_Daruma.triggered.connect(self.on_rStatusUltimoCmdInt_ECF_Daruma_triggered)
self.actionM_todo_rStatusUltimoCmdStr_ECF_Daruma.triggered.connect(self.on_rStatusUltimoCmdStr_ECF_Daruma_triggered)
self.actionM_todo_rInfoEstendida_ECF_Daruma.triggered.connect(self.on_rInfoEstendida_ECF_Daruma_triggered)
self.actionM_todo_rInfoEstendida1_ECF_Daruma.triggered.connect(self.on_rInfoEstendida1_ECF_Daruma_triggered)
self.actionM_todo_rInfoEstendida2_ECF_Daruma.triggered.connect(self.on_rInfoEstendida2_ECF_Daruma_triggered)
self.actionM_todo_rInfoEstendida3_ECF_Daruma.triggered.connect(self.on_rInfoEstendida3_ECF_Daruma_triggered)
self.actionM_todo_rInfoEstendida4_ECF_Daruma.triggered.connect(self.on_rInfoEstendida4_ECF_Daruma_triggered)
self.actionM_todo_rInfoEstendida5_ECF_Daruma.triggered.connect(self.on_rInfoEstendida5_ECF_Daruma_triggered)
self.actionM_todo_eBuscarPortaVelocidade_ECF_Daruma.triggered.connect(self.on_eBuscarPortaVelocidade_ECF_Daruma_triggered)
self.actionM_todo_eVerificarVersaoDLL_Daruma.triggered.connect(self.on_eVerificarVersaoDLL_Daruma_triggered)
self.actionM_todo_eRetornarPortasCOM_ECF_Daruma.triggered.connect(self.on_eRetornarPortasCOM_ECF_Daruma_triggered)
self.actionM_todo_iRelatorioConfiguracao_ECF_Daruma.triggered.connect(self.on_iRelatorioConfiguracao_ECF_Daruma_triggered)
def on_rRetornarInformacao_ECF_Daruma_triggered(self):
pass
def on_rRetornarInformacaoSeparador_ECF_Daruma_triggered(self):
pass
def on_rLerAliquotas_ECF_Daruma_triggered(self):
pass
def on_rLerMeiosPagto_ECF_Daruma_triggered(self):
pass
def on_rLerRG_ECF_Daruma_triggered(self):
pass
def on_rLerCNF_ECF_Daruma_triggered(self):
pass
def on_rLerDecimais_ECF_Daruma_triggered(self):
pass
def on_rLerDecimaisInt_ECF_Daruma_triggered(self):
pass
def on_rLerDecimaisStr_ECF_Daruma_triggered(self):
pass
def on_rDataHoraImpressora_ECF_Daruma_triggered(self):
pass
def on_rVerificaImpressoraLigada_ECF_Daruma_triggered(self):
pass
def on_rVerificarReducaoZ_ECF_Daruma_triggered(self):
pass
def on_rRetornarDadosReducaoZ_ECF_Daruma_triggered(self):
pass
def on_rTipoUltimoDocumentoInt_ECF_Daruma_triggered(self):
pass
def on_rTipoUltimoDocumentoStr_ECF_Daruma_triggered(self):
pass
def on_rUltimoCMDEnviado_ECF_Daruma_triggered(self):
pass
def on_rMinasLegal_ECF_Daruma_triggered(self):
pass
def on_rCFSaldoAPagar_ECF_Daruma_triggered(self):
pass
def on_rCFSubTotal_ECF_Daruma_triggered(self):
pass
def on_rCFVerificarStatus_ECF_Daruma_triggered(self):
pass
def on_rStatusImpressora_ECF_Daruma_triggered(self):
pass
def on_rStatusImpressoraBinario_ECF_Daruma_triggered(self):
pass
def on_rConsultaStatusImpressoraStr_ECF_Daruma_triggered(self):
pass
def on_rConsultaStatusImpressoraInt_ECF_Daruma_triggered(self):
pass
def on_rStatusUltimoCmd_ECF_Daruma_triggered(self):
pass
def on_rStatusUltimoCmdInt_ECF_Daruma_triggered(self):
pass
def on_rStatusUltimoCmdStr_ECF_Daruma_triggered(self):
pass
def on_rInfoEstendida_ECF_Daruma_triggered(self):
pass
def on_rInfoEstendida1_ECF_Daruma_triggered(self):
pass
def on_rInfoEstendida2_ECF_Daruma_triggered(self):
pass
def on_rInfoEstendida3_ECF_Daruma_triggered(self):
pass
def on_rInfoEstendida4_ECF_Daruma_triggered(self):
pass
def on_rInfoEstendida5_ECF_Daruma_triggered(self):
pass
def on_eBuscarPortaVelocidade_ECF_Daruma_triggered(self):
pass
def on_eVerificarVersaoDLL_Daruma_triggered(self):
pass
def on_eRetornarPortasCOM_ECF_Daruma_triggered(self):
pass
def on_iRelatorioConfiguracao_ECF_Daruma_triggered(self):
pass
def setupUi(self, ui_FISCAL_RetornosStatusECF):
ui_FISCAL_RetornosStatusECF.setObjectName("ui_FISCAL_RetornosStatusECF")
ui_FISCAL_RetornosStatusECF.resize(576, 289)
self.centralwidget = QtGui.QWidget(ui_FISCAL_RetornosStatusECF)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.groupBox = QtGui.QGroupBox(self.centralwidget)
self.groupBox.setObjectName("groupB |
googleapis/python-datacatalog | google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py | Python | apache-2.0 | 1,167 | 0.000857 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the | "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LIC | ENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import DataCatalogTransport
from .grpc import DataCatalogGrpcTransport
from .grpc_asyncio import DataCatalogGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]]
_transport_registry["grpc"] = DataCatalogGrpcTransport
_transport_registry["grpc_asyncio"] = DataCatalogGrpcAsyncIOTransport
__all__ = (
"DataCatalogTransport",
"DataCatalogGrpcTransport",
"DataCatalogGrpcAsyncIOTransport",
)
|
spcui/autotest | client/deps/libaio/libaio.py | Python | gpl-2.0 | 809 | 0.001236 | #!/usr/bin/python
import os
from autotest.client import utils
version = 1
def setup(tarball, topdir):
srcdir = os.path.join(topdir, 'src')
utils.extract_tarball_to_dir(tarball, srcdir)
os.chdir(srcdir)
utils.system('patch -p1 < ../00_arches.patch')
utils.make()
utils.make('prefix=%s install' % topdir)
os.chdir(topdir)
# old source was
# http://www.kernel.org/pub/linux/kernel/people/bcrl/aio/libaio-0.3.92.tar.bz2
# now grabbing from debian
# http://ftp.debian.org/debian/pool/main/liba/libaio/libaio_0.3.106.orig.tar.gz
# architecture patch from here
# http://git.hadrons.org/?p=debian/pkgs/libaio.git;a=tree;f=debian/patches
pwd = os.getcwd()
tarball = os.path.join(pwd, 'libaio_0.3.106.orig.tar.gz')
utils. | update_version(pwd+'/src | ', False, version, setup, tarball, pwd)
|
Star2Billing/cdr-stats | cdr_stats/user_profile/views.py | Python | mpl-2.0 | 3,380 | 0.002663 | #
# CDR-Stats License
# http://www.cdr-stats.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
| # You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2015 Star2Billing S.L.
#
# The Initial Developer of the Or | iginal Code is
# Arezqui Belaid <info@star2billing.com>
#
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response, get_object_or_404
#from django.http import HttpResponseRedirect
from django.template.context import RequestContext
from django.utils.translation import ugettext_lazy as _
from user_profile.models import UserProfile
from user_profile.forms import UserChangeDetailForm, UserChangeDetailExtendForm, UserPasswordChangeForm
from cdr.decorators import check_user_detail
@login_required
@check_user_detail('accountcode')
def customer_detail_change(request):
"""User Detail change on Customer UI
**Attributes**:
* ``form`` - UserChangeDetailForm, UserChangeDetailExtendForm, UserPasswordChangeForm
* ``template`` - 'user_profile/user_detail_change.html'
**Logic Description**:
* User is able to change their details.
"""
user_detail = get_object_or_404(User, username=request.user)
user_detail_extened = UserProfile.objects.get(user=user_detail)
user_detail_form = UserChangeDetailForm(request.user, instance=user_detail)
user_detail_extened_form = UserChangeDetailExtendForm(request.user, instance=user_detail_extened)
user_password_form = UserPasswordChangeForm(user=request.user)
msg_detail = ''
msg_pass = ''
error_detail = ''
error_pass = ''
action = ''
if 'action' in request.GET:
action = request.GET['action']
if request.method == 'POST':
if request.POST['form-type'] == "change-detail":
user_detail_form = UserChangeDetailForm(request.user, request.POST, instance=user_detail)
user_detail_extened_form = UserChangeDetailExtendForm(request.user, request.POST, instance=user_detail_extened)
action = 'tabs-1'
if user_detail_form.is_valid() and user_detail_extened_form.is_valid():
user_detail_form.save()
user_detail_extened_form.save()
msg_detail = _('detail has been changed.')
else:
error_detail = _('please correct the errors below.')
else:
# change-password
user_password_form = UserPasswordChangeForm(user=request.user, data=request.POST)
action = 'tabs-2'
if user_password_form.is_valid():
user_password_form.save()
msg_pass = _('your password has been changed.')
else:
error_pass = _('please correct the errors below.')
data = {
'user_detail_form': user_detail_form,
'user_detail_extened_form': user_detail_extened_form,
'user_password_form': user_password_form,
'msg_detail': msg_detail,
'msg_pass': msg_pass,
'error_detail': error_detail,
'error_pass': error_pass,
'action': action,
}
return render_to_response('user_profile/user_detail_change.html', data, context_instance=RequestContext(request))
|
synw/django-chartflo | chartflo/templatetags/chartflo_tags.py | Python | mit | 374 | 0 | # -*- coding: utf-8 | -*-
import os
from django import template
register = template.Library()
@register.simple_tag
def get_altair_scripts(dashboard_slug):
path = "dashboards/" + dashboard_slug + "/altair_scripts"
scripts = os.listdir("templates/" + path)
includes = []
for script in scripts:
includes.append(path + "/" | + script)
return includes
|
ericmjl/bokeh | examples/models/file/trail.py | Python | bsd-3-clause | 4,605 | 0.004778 | from math import atan2, cos, radians, sin, sqrt
import numpy as np
import scipy.ndimage as im
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.models import (Column, ColumnDataSource, GMapOptions, GMapPlot,
Grid, Label, Line, LinearAxis, PanTool, Patches,
Plot, Range1d, ResetTool, WheelZoomTool,)
from bokeh.resources import INLINE
from bokeh.sampledata.mtb import obiszow_mtb_xcm
from bokeh.util.browser import view
def haversin(theta):
return sin(0.5 * theta) ** 2
def distance(p1, p2):
"""Distance between (lat1, lon1) and (lat2, lon2). """
R = 6371
lat1, lon1 = p1
lat2, lon2 = p2
phi1 = radians(lat1)
phi2 = radians(lat2)
delta_lat = radians(lat2 - lat1)
delta_lon = radians(lon2 - lon1)
a = haversin(delta_lat) + cos(phi1) * cos(phi2) * haversin(delta_lon)
return 2 * R * atan2(sqrt(a), sqrt(1 - a))
def prep_data(dataset):
df = dataset.copy()
latlon = list(zip(df.lat, df.lon))
dist = np.array([distance(latlon[i + 1], latlon[i]) for i in range(len((latlon[:-1])))])
df["dist"] = np.concatenate(([0], np.cumsum(dist)))
slope = np.abs(100 * np.diff(df.alt) / (1000 * dist))
slope[np.where( slope < 4) ] = 0 # "green"
slope[np.where((slope >= 4) & (slope < 6))] = 1 # "yellow"
slope[np.where((slope >= 6) & (slope < 10))] = 2 # "pink"
slope[np.where((slope >= 10) & (slope < 15))] = 3 # "orange"
slope[np.where( slope >= 15 )] = 4 # "red"
slope = im.median_filter(slope, 6)
colors = np.empty_like(slope, dtype=object)
colors[np.where(slope == 0)] = "green"
colors[np.where(slope == 1)] = "yellow"
colors[np.where(slope == 2)] = "pink"
colors[np.where(slope == 3)] = "orange"
colors[np.where(slope == 4)] = "red"
df["colors"] = list(colors) + [None] # NOTE: add [None] just make pandas happy
return df
name = "Obiszów MTB XCM"
# Google Maps now requires an API key. You can find out how to get one here:
# https://developers.google.com/maps/documentation/javascript/get-api-key
API_KEY = "GOOGLE_API_KEY"
def trail_map(data):
lon = (min(data.lon) + max(data.lon)) / 2
lat = (min(data.lat) + max(data.lat)) / 2
map_options = GMapOptions(lng=lon, lat=lat, zoom=13)
plot = GMapPlot(plot_width=800, plot_height=800, map_options=map_options, api_key=API_KEY)
plot.title.text = "%s - Trail Map" % name
plot.x_range = Range1d()
plot.y_range = Range1d()
plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
line_source = ColumnDataSource(dict(x=data.lon, y=data.lat, dist=data.dist))
line = Line(x="x", y="y", line_color="blue", line_width=2)
plot.add_glyph(line_source, line)
if plot.api_key == "GOOGLE_API_KEY":
plot.add_layout(Label(x=240, y=700, x_units='screen', y_units='screen',
text='Replace GOOGLE_API_KEY with your own key',
text_co | lor='red'))
return plot
def altitude_profile(data):
plot = Plot(plot_width=800, plot_height=400)
plot.title.text = "%s - Altitude Profile" % name
plot.y_range.range_padding = 0
xaxis = LinearAxis(axis_label="Distance (km)")
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis(axis_ | label="Altitude (m)")
plot.add_layout(yaxis, 'left')
plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker)) # x grid
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker)) # y grid
plot.add_tools(PanTool(), WheelZoomTool(), ResetTool())
X, Y = data.dist, data.alt
y0 = min(Y)
patches_source = ColumnDataSource(dict(
xs=[[X[i], X[i+1], X[i+1], X[i]] for i in range(len(X[:-1])) ],
ys=[[y0, y0, Y[i+1], Y[i]] for i in range(len(Y[:-1])) ],
color=data.colors[:-1]
))
patches = Patches(xs="xs", ys="ys", fill_color="color", line_color="color")
plot.add_glyph(patches_source, patches)
line_source = ColumnDataSource(dict(x=data.dist, y=data.alt))
line = Line(x='x', y='y', line_color="black", line_width=1)
plot.add_glyph(line_source, line)
return plot
data = prep_data(obiszow_mtb_xcm)
trail = trail_map(data)
altitude = altitude_profile(data)
layout = Column(children=[altitude, trail])
doc = Document()
doc.add_root(layout)
if __name__ == "__main__":
doc.validate()
filename = "trail.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Trail map and altitude profile"))
print("Wrote %s" % filename)
view(filename)
|
RedHatInsights/insights-core | insights/parsers/tests/test_ls_edac_mc.py | Python | apache-2.0 | 875 | 0.001143 | from insights.parsers import ls_edac_mc
from insights.parsers.ls_edac_mc import LsEdacMC
from insights.tests import context_wrap
import doctest
LS_EDAC_MC = """
/sys/devices/system/eda | c/mc:
total 37592
drwxr-xr-x. 3 0 0 0 Jan | 10 10:33 .
drwxr-xr-x. 3 0 0 0 Jan 10 10:33 ..
drwxr-xr-x. 2 0 0 0 Jan 10 10:33 power
drwxr-xr-x. 2 0 0 0 Jan 10 10:33 mc0
drwxr-xr-x. 2 0 0 0 Jan 10 10:33 mc1
drwxr-xr-x. 2 0 0 0 Jan 10 10:33 mc2
"""
def test_ls_edac_mc():
ls_edac_mc = LsEdacMC(context_wrap(LS_EDAC_MC))
assert '/sys/devices/system/edac/mc' in ls_edac_mc
assert ls_edac_mc.dirs_of('/sys/devices/system/edac/mc') == ['.', '..', 'power', 'mc0', 'mc1', 'mc2']
def test_ls_etc_documentation():
failed_count, tests = doctest.testmod(
ls_edac_mc,
globs={'ls_edac_mc': ls_edac_mc.LsEdacMC(context_wrap(LS_EDAC_MC))}
)
assert failed_count == 0
|
cherrygirl/micronaet7 | production_line_statistic/statistic.py | Python | agpl-3.0 | 4,479 | 0.005135 | # -*- coding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from openerp.osv import osv, fields
class mrp_workcenter_motive(osv.osv):
''' List of motive, work and not work, for the line
'''
_name = 'mrp.workcenter.work.motive'
_description = 'Workcenter work motive'
_order = 'sequence, name'
# -------------------------------------------------------------------------
# Database
# -------------------------------------------------------------------------
_columns = {
'sequence': fields.integer('Seq.'),
'name': fields.char('Motive', size=64 | , required=True),
'note': fields.text('Note'),
'uptime': fields.boolean('Uptime', help='Motivation that means up time'),
}
_defaults = {
'sequence': 100,
'uptime': False,
}
class mrp_workcenter_work(osv.osv):
''' WOrkcenter statistic (work and extra motives)
'''
_name = 'mrp.workcenter.work'
_description = 'Workcenter work'
_rec_name = 'workcenter_id'
_order = 'date,workcenter_id'
# ------------ | -------------------------------------------------------------
# Utility function
# -------------------------------------------------------------------------
def load_days_statistic(self, cr, uid, from_date, to_date, line=False, forced=False, context=None):
''' Load all statistic for range of date passed for all line (or line
passed)
If in a day there's no lavoration the day is considered not
working except if forced
'''
domain = []
# Get total per day:
work_ids = self.search(cr, uid, [], context=context)
statistic = {}
for item in self.browse(cr, uid, work_ids, context=context):
if item.date not in statistic:
statistic[item.date] = {}
if item.workcenter_id.id not in statistic[item.date]:
statistic[item.date][item.workcenter.id] = [0.0, 0.0] # work, dont work
if item.motive_id.work:
statistic[item.date][item.workcenter.id][0] += item.hour
else:
statistic[item.date][item.workcenter.id][1] += item.hour
wc_pool = self.pool.get('mrp.production.workcenter.line')
wc_ids = wc_pool.search(cr, uid, domain, context=context)
#for wc in wc_pool.browse(cr, uid, wc_ids, context=context):
# date = wc.real_date_planned[:10]
# work_ids = self.search(cr, uid, [
# ('workcenter_id', '=', wc.workcenter_id.id),
# ('date', '=', date),
# #('work', '=', True),
# ]
# remain_work = (wc.workcenter_id.max_hour_day or 16) -
# for work in
# if work_ids =
return
# -------------------------------------------------------------------------
# Database
# -------------------------------------------------------------------------
_columns = {
'workcenter_id':fields.many2one('mrp.workcenter', 'Line',
required=True),
'motive_id':fields.many2one('mrp.workcenter.work.motive', 'Motive'),
'date': fields.date('Date'),
'hour': fields.integer('Hour'),
'work': fields.boolean('Work', help='If true the element if a working reason'),
}
_defaults = {
'work': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
saltstack/salt | salt/returners/memcache_return.py | Python | apache-2.0 | 5,855 | 0.001366 | """
Return data to a memcache server
To enable this returner the minion will need the python client for memcache
installed and the following values configured in the minion or master
config, these are the defaults.
.. code-block:: yaml
memcache.host: 'localhost'
memcache.port: '11211'
Alternative configuration values can be used by prefacing the configuration.
Any values not found in the alternative configuration will be pulled from
the default location.
.. code-block:: yaml
alternative.memcache.host: 'localhost'
alternative.memcache.port: '11211'
python2-memcache uses 'localhost' and '11211' as syntax on connection.
To use the memcache returner, append '--return memcache' to the salt command.
.. code-block:: bash
salt '*' test.ping --return memcache
To use the alternative configuration, append '--return_config alternative' to the salt command.
.. versionadded:: 2015.5.0
.. code-block:: bash
salt '*' test.ping --return memcache --return_config alternative
To override individual configuration items, append --return_kwargs '{"key:": "value"}' to the salt command.
.. versionadded:: 2016.3.0
.. code-block:: bash
salt '*' test.ping --return memcache --return_kwargs '{"host": "hostname.domain.com"}'
"""
import logging
import salt.returners
import salt.utils.jid
import salt.utils.json
log = logging.getLogger(__name__)
try:
import memcache
HAS_MEMCACHE = True
except ImportError:
HAS_MEMCACHE = False
# Define the module's virtual name
__virtualname__ = "memcache"
def __virtual__():
if not HAS_MEMCACHE:
return (
False,
"Could not import memcache returner; "
"memcache python client is not installed.",
)
return __virtualname__
def _get_options(ret=None):
"""
Get the memcache options from salt.
"""
attrs = {"host": "host", "port": "port"}
_options = salt.returners.get_returner_options(
__virtualname__, ret, attrs, __salt__=__salt__, __opts__=__opts__
)
return _options
def _get_serv(ret):
"""
Return a memcache server object
"""
_options = _get_options(ret)
host = _options.get("host")
port = _options.get("port")
log.debug("memcache server: %s:%s", host, port)
if not host or not port:
log.error("Host or port not defined in salt config")
return
# Combine host and port to conform syntax of python memcache client
memcacheoptions = (host, port)
return memcache.Client(["{}:{}".format(*memcacheoptions)], debug=0)
# # TODO: make memcacheoptions cluster aware
# Servers can be passed in two forms:
# 1. Strings of the form C{"host:port"}, which implies a default weight of 1
# 2. Tuples of the form C{("host:port", weight)}, where C{weight} is
# an integer weight value.
def _get_list(serv, key):
value = serv.get(key)
if value:
return value.strip(",").split(",")
return []
def _append_list(serv, key, value):
if value in _get_list(serv, key):
return
r = serv.append(key, "{},".format(value))
if not r:
serv.add(key, "{},".format(value))
def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument
"""
Do any work necessary to prepare a JID, including sending a custom id
"""
return passed_jid if passed_jid is not None else salt.utils.jid.gen_jid(__opts__)
def returner(ret):
"""
Return data to a memcache data store
"""
serv = _get_serv(ret)
minion = ret["id"]
jid = ret["jid"]
fun = ret["fun"]
rets = salt.utils.json.dumps(ret)
serv.set("{}:{}".format(jid, minion), rets) # cache for get_jid
serv.set("{}:{}".format(fun, minion), rets) # cache for get_fun
# The following operations are neither efficient nor atomic.
# If there is a way to make them so, this should be updated.
_append_list(serv, "minions", minion)
_append_list(serv, "jids", jid)
def save_load(jid, load, minions=None):
"""
Save the load to the specified jid
"""
serv = _get_serv(ret=None)
serv.set(jid, salt.utils.json.dumps(load))
_append_list(serv, "jids", jid)
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
"""
Included for API consistency
"""
def get_load(jid):
"""
Return the load data that marks a specified jid
"""
serv = _get_serv(ret=None)
data = serv.get(jid)
if data:
return salt.utils.json.loads(data)
return {}
def get_jid(jid):
"""
Return the information returned when the specified job id was executed
"""
serv = _get_serv(ret=None)
minions = _get_list(ser | v, "minions")
returns = serv.get_multi(minions, key_prefix="{}:".format(jid))
# returns = {minion: return, minion: return, ...}
ret = {}
for minion, data in returns.items():
ret[minion] = salt.utils.json.loads(data)
return ret
def get_fun(fun):
"""
Return a dict of the last function called for all minions
"""
| serv = _get_serv(ret=None)
minions = _get_list(serv, "minions")
returns = serv.get_multi(minions, key_prefix="{}:".format(fun))
# returns = {minion: return, minion: return, ...}
ret = {}
for minion, data in returns.items():
ret[minion] = salt.utils.json.loads(data)
return ret
def get_jids():
"""
Return a list of all job ids
"""
serv = _get_serv(ret=None)
jids = _get_list(serv, "jids")
loads = serv.get_multi(jids) # {jid: load, jid: load, ...}
ret = {}
for jid, load in loads.items():
ret[jid] = salt.utils.jid.format_jid_instance(jid, salt.utils.json.loads(load))
return ret
def get_minions():
"""
Return a list of minions
"""
serv = _get_serv(ret=None)
return _get_list(serv, "minions")
|
tedsunnyday/Tweet2DowJones | json/dev/gen.py | Python | mit | 683 | 0.01757 | #!/usr/bin/python
import json
from random import randint
INPUT = "Tweet.size1000page1cnt849.json"
OUTPUT = 'new.json'
objs = json.load(open(INPUT,'r'))
print len(objs)
# for k,v in objs[ | 0].items():
# print "%s=\n\t%s | " % (str(k),str(v))
def date_handler(obj):
import datetime
return obj.strft | ime("%Y-%m-%d %H:%M:%S") if isinstance(obj,datetime.datetime) else obj
se = []
for o in objs:
# se.append( {'x':o['created_at'],'y':randint(0,1000)} )
se.append([o['created_at'],randint(0,1000)])
di = {'name':'LA','series':se}
# print json.dumps(di)
# print type(di['series'][0][0])
f = open(OUTPUT,'w+')
f.write(json.dumps(di,default=date_handler))
f.close()
|
GoogleCloudPlatform/declarative-resource-client-library | python/services/cloudresourcemanager/project.py | Python | apache-2.0 | 5,106 | 0.001371 | # Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from connector import channel
from google3.cloud.graphite.mmv2.services.google.cloud_resource_manager import (
project_pb2,
)
from google3.cloud.graphite.mmv2.services.google.cloud_resource_manager import (
project_pb2_grpc,
)
from typing import List
class Project(object):
def __init__(
self,
labels: dict = None,
lifecycle_state: str = None,
displayName: str = None,
parent: str = None,
name: str = None,
project_number: int = None,
service_account_file: str = "",
):
channel.initialize()
self.labels = labels
self.displayName = displayName
self.parent = parent
self.name = name
self.service_account_file = service_account_file
def apply(self):
stub = project_pb2_grpc.CloudresourcemanagerProjectServiceStub(
channel.Channel()
)
request = project_pb2.ApplyCloudresourcemanagerProjectRequest()
if Primitive.to_proto(self.labels):
request.resource.labels = Primitive.to_proto(self | .labels)
if Primitive.to_proto(self.displayName):
request.resource.displayName = Primitive.to_proto(self.displayName)
if Primitive.to_proto(self.parent):
request.resource.par | ent = Primitive.to_proto(self.parent)
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
request.service_account_file = self.service_account_file
response = stub.ApplyCloudresourcemanagerProject(request)
self.labels = Primitive.from_proto(response.labels)
self.lifecycle_state = ProjectLifecycleStateEnum.from_proto(
response.lifecycle_state
)
self.displayName = Primitive.from_proto(response.displayName)
self.parent = Primitive.from_proto(response.parent)
self.name = Primitive.from_proto(response.name)
self.project_number = Primitive.from_proto(response.project_number)
def delete(self):
stub = project_pb2_grpc.CloudresourcemanagerProjectServiceStub(
channel.Channel()
)
request = project_pb2.DeleteCloudresourcemanagerProjectRequest()
request.service_account_file = self.service_account_file
if Primitive.to_proto(self.labels):
request.resource.labels = Primitive.to_proto(self.labels)
if Primitive.to_proto(self.displayName):
request.resource.displayName = Primitive.to_proto(self.displayName)
if Primitive.to_proto(self.parent):
request.resource.parent = Primitive.to_proto(self.parent)
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
response = stub.DeleteCloudresourcemanagerProject(request)
@classmethod
def list(self, service_account_file=""):
stub = project_pb2_grpc.CloudresourcemanagerProjectServiceStub(
channel.Channel()
)
request = project_pb2.ListCloudresourcemanagerProjectRequest()
request.service_account_file = service_account_file
return stub.ListCloudresourcemanagerProject(request).items
def to_proto(self):
resource = project_pb2.CloudresourcemanagerProject()
if Primitive.to_proto(self.labels):
resource.labels = Primitive.to_proto(self.labels)
if Primitive.to_proto(self.displayName):
resource.displayName = Primitive.to_proto(self.displayName)
if Primitive.to_proto(self.parent):
resource.parent = Primitive.to_proto(self.parent)
if Primitive.to_proto(self.name):
resource.name = Primitive.to_proto(self.name)
return resource
class ProjectLifecycleStateEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return project_pb2.CloudresourcemanagerProjectLifecycleStateEnum.Value(
"CloudresourcemanagerProjectLifecycleStateEnum%s" % resource
)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return project_pb2.CloudresourcemanagerProjectLifecycleStateEnum.Name(resource)[
len("CloudresourcemanagerProjectLifecycleStateEnum") :
]
class Primitive(object):
@classmethod
def to_proto(self, s):
if not s:
return ""
return s
@classmethod
def from_proto(self, s):
return s
|
kaplun/Invenio-OpenAIRE | modules/bibformat/lib/elements/bfe_cited_by.py | Python | gpl-2.0 | 2,193 | 0.013224 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints reference to documents citing this one
"""
__revision__ = "$Id$"
import cgi
def format_el | ement(bfo, separator='; '):
"""
Prints a list of records citing this record
@param separator: a s | eparator between citations
"""
from urllib import quote
from invenio.config import CFG_SITE_URL
primary_report_numbers = bfo.fields('037__a')
additional_report_numbers = bfo.fields('088__a')
primary_citations = ['<a href="' + CFG_SITE_URL + \
'/search?f=reference&p=' + quote(report_number) + \
'&ln='+ bfo.lang +'">' + \
cgi.escape(report_number) + '</a>' \
for report_number in primary_report_numbers]
additional_citations = ['<a href="' + CFG_SITE_URL + \
'/search?f=reference&p=' + quote(report_number)+ \
'&ln='+ bfo.lang + '">' + \
cgi.escape(report_number) + '</a>' \
for report_number in additional_report_numbers]
citations = primary_citations
citations.extend(additional_citations)
return separator.join(citations)
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
|
m0re4u/LeRoT-SCLP | lerot/evaluation/DcgEval.py | Python | gpl-3.0 | 2,915 | 0 | # This file is part of Lerot.
#
# Lerot is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Lerot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Lerot. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from .AbstractEval import AbstractEval
class DcgEval(AbstractEval):
"""Compute DCG (with gain = 2**rel-1 and log2 discount)."""
def get_dcg(self, ranked_labels, cutoff=-1):
"""
Get the dcg value of a list ranking.
Does not check if the numer for ranked labels is smaller than cutoff.
"""
if (cutoff == -1):
cutoff = len(ranked_labels)
rank = np.arange(cutoff)
return ((np.power(2, np.asarray(ranked_labels[:cutoff])) - 1) /
np.log2(2 + rank)).sum()
def evaluate_ranking(self, ranking, query, cutoff=-1):
"""Compute NDCG for the provided ranking. The ranking is expected
to contain document ids in rank order."""
if cutoff == -1 or cutoff > len(ranking):
cutoff = len(ranking)
if query.has_ideal():
ideal_dcg = query.get_ideal()
else:
ideal_labels = list(reversed(sorted(query.get_labels())))[:cutoff]
ideal_dcg = self.get_dcg(ideal_labels, cutoff)
query.set_ideal(ideal_dcg)
if ideal_dcg == .0:
# return 0 when there are no relevant documents. This is consistent
# with letor evaluation tools; an alternative would be to return
# 0.5 (e.g., used by the yahoo learning to rank challenge tools)
return 0.0
# get labels for the sorted docids
sorted_labels = [0] * cutoff
for i in range(cutoff):
sorted_labels[i] = query.get_label(ranking[i])
dcg = self.get_dcg(sorted_labels, cutoff)
return dcg / ideal_dcg
def get_value(self, ranking, labels, orientations, cutoff=-1):
"""
Compute the value of th | e metric
- ranking contains the list of documents to evaluate
- labels are the relevance labels for all the documents, even those
that are not in the ranking; labels[doc.get_id()] is the relevance of
doc
- orientations contains orientation values for the verticals;
orientations[doc.get_type()] is the orientation value for | the
doc (from 0 to 1).
"""
return self.get_dcg([labels[doc.get_id()] for doc in ranking], cutoff)
|
2015fallhw/cdw2 | users/s2b/g4/scrum5_task1.py | Python | agpl-3.0 | 3,680 | 0.005582 | 各組分別在各自的 .py 程式中建立應用程式 (第1步/總共3步)
from flask import Blueprint, render_template
# 利用 Blueprint建立 ag1, 並且 url 前綴為 /ag1, 並設定 template 存放目錄
scrum5_task1 = Blueprint('scrum5_task1', __name__, url_prefix='/bg4', template_folder='templates')
# scrum5_task1 為完整可以單獨執行的繪圖程式
@scrum5_task1.route('/scrum5_task1')
def task1():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
</head>
<body>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
<script type="text/python">
from javascript import JSConstructor
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -250, 500, 500)
# 決定要不要畫座標軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
#cgo.drawText("使用 Cango 繪圖程式庫!", 0, 0, {"fontSize":60, "fontWeight": 1200, "lorg":5 })
deg = math.pi/180
def O(x, y, rx, ry, rot, color, border, linewidth):
# 旋轉必須要針對相對中心 rot not working yet
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
cmbr = cobj(cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# basic1 轉 120 度
basic1.rotate(120)
basic2 = cmbr.dup()
basic2.rotate(60)
basic2.translate(0, -20)
basic3 = cmbr.dup()
basic3.rotate(60)
basic3.translate(20*math.cos(30*deg), 20*math.sin(30*deg))
basic4 = cmbr.dup()
basic4.rotate(120)
basic4.translate(20*math.cos(30*deg), -20*math.sin(30*deg)-20)
basic5 = cmbr.dup()
basic5.translate(2*20*math.cos(30*deg), 0)
cmbr.appendPath(basic1)
cmbr.appendPath(basic2)
cmbr.appendPath(basic3)
cmbr.appendPath(basic4)
cmbr.appendPath(basic5)
# hole 為原點位置
hole = cobj(shapedefs.circle(4), "PATH")
cmbr.appendPath(hole)
# 表示放大 3 倍
#cgo.render(cmbr, x, y, 3, rot)
| # 放大 5 倍
cgo.render(cmbr, x, y, 5, rot)
O(0, 0, 0, 0, 0, "lightyellow", True, 4)
</script>
<!-- 以協同方式加上 ag100 的 scrum-2 組員所寫的 task1 程式碼 -->
<!-- <script type="text/python" src="/ag100/scrum2_task1"></script>
<!-- 以協同方式加上 ag100 的 sc | rum-3 組員所寫的 task1 程式碼 -->
<!-- <script type="text/python" src="/ag100/scrum3_task1"></script>
</body>
</html>
'''
return outstring |
spahan/unixdmoain | wwwlib/rest.py | Python | bsd-3-clause | 2,782 | 0.009346 | from mod_python import apache
from mod_python import util
import os.path
import urllib
import logging
debug = True
def handler(req):
"""
This is called by Apache and maps the request to the resource class.
Process of maping:
1. Try import a python script which handles this resource.
The name will be determined by the *path_info* (see mod_python or apache cgi docs for details). while the last path part is treated as the resource ID.
If no script was found, we return HTTP_NOT_FOUND
2. Check if the request method is in the allowedMethodes list of the imported script.
If not, we set the allowed Methodes and return HTTP_METHOD_NOT_ALLOWED
If the imported script does not define a allowedMethodes list, we return HTTP_NOT_FOUND
assuming this is not a script to call, but some other thing.
3. Parse the form data.
#TODO: add support for JSON and XML. Currently only url-form-data is supported.
4. Call METHOD(req, id, args) |
req is the request object,
id is the parsed id or None
args is the mp_table object (may be empty)
returns the return code from the function
if the method is not defined, we return HTTP_NOT_IMPLEMENTED
"""
#Set log level here. For Production, disable both lines
logging.basicConfig(level=logging.DEBUG) #Used for debug, lot of data, not recommend | ed for simple error search.
#logging.basicConfig(level=logging.INFO) #Used for error search with config.
# 1.
try:
(mtype, mid) = req.path_info.lstrip('/').split('/',1)
except ValueError, err:
mtype = req.path_info.lstrip('/')
mid = ''
try:
resourceModule = apache.import_module(mtype.strip('/').replace('/','.'), path=os.path.dirname(__file__))
except Exception, err:
if debug: raise
return apache.HTTP_NOT_FOUND
# 2.
try:
allowedMethodes = resourceModule.allowedMethodes
except AttributeError, err:
if debug: raise
return apache.HTTP_HTTP_NOT_FOUND
if not req.method in allowedMethodes:
req.allow_methods(resourceModule.allowedMethodes, 1)
return apache.HTTP_METHOD_NOT_ALLOWED
# 3.
if not 'form' in dir(req):
req.form = util.FieldStorage(req, True)
# 4.
try:
return getattr(resourceModule, req.method)(req, urllib.unquote(mid))
except AttributeError, err:
if debug: raise
return apache.HTTP_NOT_IMPLEMENTED
def writeError(req, error, message):
"""Send a error page to client. Replaces http error page."""
req.status = apache.HTTP_FORBIDDEN
req.content_type = 'text/plain'
req.write(message)
return apache.OK |
kzganesan/you-get | src/you_get/common.py | Python | mit | 30,127 | 0.006738 | #!/usr/bin/env python
import getopt
import json
import locale
import os
import re
import sys
from urllib import request, parse
import platform
import threading
from .version import __version__
from .util import log, sogou_proxy_server, get_filename, unescape_html
dry_run = False
force = False
player = None
sogou_proxy = None
sogou_env = None
cookies_txt = None
fake_headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'UTF-8,*;q=0.5',
'Accept-Encoding': 'gzip,deflate,sdch',
'Accept-Language': 'en-US,en;q=0.8',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:13.0) Gecko/20100101 Firefox/13.0'
}
if sys.stdout.isatty():
default_encoding = sys.stdout.encoding.lower()
else:
default_encoding = locale.getpreferredencoding().lower()
def tr(s):
try:
s.encode(default_encoding)
return s
except:
return str(s.encode('utf-8'))[2:-1]
# DEPRECATED in favor of match1()
def r1(pattern, text):
m = re.search(pattern, text)
if m:
return m.group(1)
# DEPRECATED in favor of match1()
def r1_of(patterns, text):
for p in patterns:
x = r1(p, text)
if x:
return x
def match1(text, *patterns):
"""Scans through a string for substrings matched some patterns (first-subgroups only).
Args:
text: A string to be scanned.
patterns: Arbitrary number of regex patterns.
Returns:
When only one pattern is given, returns a string (None if no match found).
When more than one pattern are given, returns a list of strings ([] if no match found).
"""
if len(patterns) == 1:
pattern = patterns[0]
match = re.search(pattern, text)
if match:
return match.group(1)
else:
return None
else:
ret = []
for pattern in patterns:
match = re.search(pattern, text)
if match:
ret.append(match.group(1))
return ret
def launch_player(player, urls):
import subprocess
import shlex
subprocess.call(shlex.split(player) + list(urls))
def parse_query_param(url, param):
"""Parses the query string of a URL and returns the value of a parameter.
Args:
url: A URL.
param: A string representing the name of the parameter.
Returns:
The value of the parameter.
"""
try:
return parse.parse_qs(parse.urlparse(url).query)[param][0]
except:
return None
def unicodize(text):
return re.sub(r'\\u([0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f])', lambda x: chr(int(x.group(0)[2:], 16)), text)
# DEPRECATED in favor of util.legitimize()
def escape_file_path(path):
path = path.replace('/', '-')
path = path.replace('\\', '-')
path = path.replace('*', '-')
path = path.replace('?', '-')
return path
# DEPRECATED in favor of util.legitimize()
def filenameable(text):
"""Converts a string to a legal filename through various OSes.
"""
# All POSIX systems
text = text.translate({
0: None,
ord('/'): '-',
})
if platform.system() == 'Windows': # For Windows
text = text.translate({
ord(':'): '-',
ord('*'): '-',
ord('?'): '-',
ord('\\'): '-',
ord('\"'): '\'',
ord('<'): '-',
ord('>'): '-',
ord('|'): '-',
ord('+'): '-',
ord('['): '(',
ord(']'): ')',
})
else:
if text.startswith("."):
text = text[1:]
if platform.system() == 'Darwin': # For Mac OS
text = text.translate({
ord(':'): '-',
})
return text
def ungzip(data):
"""Decompresses data for Content-Encoding: gzip.
"""
from io import BytesIO
import gzip
buffer = BytesIO(data)
f = gzip.GzipFile(fileobj=buffer)
return f.read()
def undeflate(data):
"""Decompresses data for Content-Encoding: deflate.
(the zlib compression is used.)
"""
import zlib
decompressobj = zlib.decompressobj(-zlib.MAX_WBITS)
return decompressobj.decompress(data)+decompressobj.flush()
# DEPRECATED in favor of get_content()
def get_response(url, faker = False):
if faker:
response = request.urlopen(request.Request(url, headers = fake_headers), | None)
e | lse:
response = request.urlopen(url)
data = response.read()
if response.info().get('Content-Encoding') == 'gzip':
data = ungzip(data)
elif response.info().get('Content-Encoding') == 'deflate':
data = undeflate(data)
response.data = data
return response
# DEPRECATED in favor of get_content()
def get_html(url, encoding = None, faker = False):
content = get_response(url, faker).data
return str(content, 'utf-8', 'ignore')
# DEPRECATED in favor of get_content()
def get_decoded_html(url, faker = False):
response = get_response(url, faker)
data = response.data
charset = r1(r'charset=([\w-]+)', response.headers['content-type'])
if charset:
return data.decode(charset, 'ignore')
else:
return data
def get_content(url, headers={}, decoded=True):
"""Gets the content of a URL via sending a HTTP GET request.
Args:
url: A URL.
headers: Request headers used by the client.
decoded: Whether decode the response body using UTF-8 or the charset specified in Content-Type.
Returns:
The content as a string.
"""
req = request.Request(url, headers=headers)
if cookies_txt:
cookies_txt.add_cookie_header(req)
req.headers.update(req.unredirected_hdrs)
response = request.urlopen(req)
data = response.read()
# Handle HTTP compression for gzip and deflate (zlib)
content_encoding = response.getheader('Content-Encoding')
if content_encoding == 'gzip':
data = ungzip(data)
elif content_encoding == 'deflate':
data = undeflate(data)
# Decode the response body
if decoded:
charset = match1(response.getheader('Content-Type'), r'charset=([\w-]+)')
if charset is not None:
data = data.decode(charset)
else:
data = data.decode('utf-8')
return data
def url_size(url, faker = False):
if faker:
response = request.urlopen(request.Request(url, headers = fake_headers), None)
else:
response = request.urlopen(url)
size = int(response.headers['content-length'])
return size
def urls_size(urls):
return sum(map(url_size, urls))
def url_info(url, faker = False):
if faker:
response = request.urlopen(request.Request(url, headers = fake_headers), None)
else:
response = request.urlopen(request.Request(url))
headers = response.headers
type = headers['content-type']
mapping = {
'video/3gpp': '3gp',
'video/f4v': 'flv',
'video/mp4': 'mp4',
'video/MP2T': 'ts',
'video/quicktime': 'mov',
'video/webm': 'webm',
'video/x-flv': 'flv',
'video/x-ms-asf': 'asf',
'audio/mpeg': 'mp3'
}
if type in mapping:
ext = mapping[type]
else:
type = None
if headers['content-disposition']:
try:
filename = parse.unquote(r1(r'filename="?([^"]+)"?', headers['content-disposition']))
if len(filename.split('.')) > 1:
ext = filename.split('.')[-1]
else:
ext = None
except:
ext = None
else:
ext = None
if headers['transfer-encoding'] != 'chunked':
size = int(headers['content-length'])
else:
size = None
return type, ext, size
def url_locations(urls, faker = False):
locations = []
for url in urls:
if faker:
response = request.urlopen(request.Request(url, headers = fake_headers), None)
else:
response = request.urlopen(request.Request(url))
locations.append(response.url)
return locations
def url_save(url, filepath, bar, refer = N |
apuigsech/BittrexAPI | BittrexAPI/__init__.py | Python | gpl-3.0 | 1,505 | 0.000664 | #!/usr/bin/env python
# BittrexAPI: Python Bittrex API implementatio | n
#
# Copyright (c) 2014 - Albert Puigsech Galicia (albert@puigsech.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights | to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from BittrexAPI import *
__title__ = 'BittrexAPI'
__description__ = 'Python Bittrex API implementation'
__version__ = '1.0'
__url__ = ''
__author__ = 'Albert Puigsech Galicia'
__author_email__ = 'albert@puigsech.com'
__license__ = 'GPL 3.0'
__copyright__ = 'Copyright (c) 2014 - Albert Puigsech Galicia' |
buffer/peepdf | tests/test_pee.py | Python | gpl-3.0 | 1,359 | 0.000736 | # peepdf is a tool to analyse and modify PDF files
# http://peepdf.eternal-todo.com
# By Jose Miguel Esparza <jesparza AT eternal-todo.com>
#
# Copyright (C) 2016 Jose Miguel Esparza
#
# This file is part of peepdf.
#
# peepdf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# peepdf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with peepdf. If not, see <http://www.gnu.org/licenses/>.
#
import peepdf
def test_whitespace_after_opening():
p = peepdf.PDFCore.PDFParser()
r, f = p.parse(
"tests/files/BB-1-Overview.pdf",
forceMode=True, looseMode=True, manualAnalysis=False
)
assert not r
for obj in f.body[1].objects.values():
if obj.object.type == "stream":
| assert obj.object.errors != [
"Decoding error: Error de | compressing string"
]
|
metabrainz/listenbrainz-server | listenbrainz/db/tests/test_stats.py | Python | gpl-2.0 | 16,778 | 0.004232 | import json
from copy import deepcopy
from datetime import datetime, timezone
import listenbrainz.db.stats as db_stats
import listenbrainz.db.user as db_user
from data.model.common_stat import StatRange
from data.model.user_artist_map import UserArtistMapRecord
from data.model.user_daily_activity import DailyActivityRecord
from data.model.user_entity import EntityRecord
from data.model.user_listening_activity import ListeningActivityRecord
from listenbrainz.db.testing import DatabaseTestCase
class StatsDatabaseTestCase(DatabaseTestCase):
def setUp(self):
DatabaseTestCase.setUp(self)
self.user = db_user.get_or_create(1, 'stats_user')
self.create_user_with_id(db_stats.SITEWIDE_STATS_USER_ID, 2, "listenbrainz-stats-user")
self.maxDiff = None
def test_insert_user_artists(self):
""" Test if artist stats are inserted correctly """
with open(self.path_to_data_file('user_top_artists_db.json')) as f:
artists_data = json.load(f)
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='artists',
stats=StatRange[EntityRecord](**artists_data))
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='all_time', stats_type='artists')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), artists_data)
def test_insert_user_releases(self):
""" Test if release stats are inserted correctly """
with open(self.path_to_data_file('user_top_releases_db.json')) as f:
releases_data = json.load(f)
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='releases',
stats=StatRange[EntityRecord](**releas | es_data))
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='all_time', stats_type='releases')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), releases_data)
|
def test_insert_user_recordings(self):
""" Test if recording stats are inserted correctly """
with open(self.path_to_data_file('user_top_recordings_db.json')) as f:
recordings_data = json.load(f)
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='recordings',
stats=StatRange[EntityRecord](**recordings_data))
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='all_time', stats_type='recordings')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), recordings_data)
def test_insert_user_listening_activity(self):
""" Test if listening activity stats are inserted correctly """
with open(self.path_to_data_file('user_listening_activity_db.json')) as f:
listening_activity_data = json.load(f)
db_stats.insert_user_jsonb_data(
user_id=self.user['id'], stats_type='listening_activity',
stats=StatRange[ListeningActivityRecord](**listening_activity_data)
)
def test_insert_user_daily_activity(self):
""" Test if daily activity stats are inserted correctly """
with open(self.path_to_data_file('user_daily_activity_db.json')) as f:
daily_activity_data = json.load(f)
db_stats.insert_user_jsonb_data(
user_id=self.user['id'], stats_type='daily_activity',
stats=StatRange[DailyActivityRecord](**daily_activity_data)
)
result = db_stats.get_user_daily_activity(user_id=self.user['id'], stats_range='all_time')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated', 'count'}), daily_activity_data)
def test_insert_user_artist_map(self):
""" Test if daily activity stats are inserted correctly """
with open(self.path_to_data_file('user_artist_map_db.json')) as f:
artist_map_data = json.load(f)
db_stats.insert_user_jsonb_data(
user_id=self.user['id'], stats_type='artist_map',
stats=StatRange[UserArtistMapRecord](**artist_map_data)
)
result = db_stats.get_user_artist_map(user_id=self.user['id'], stats_range='all_time')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated', 'count'}), artist_map_data)
def test_insert_user_stats_mult_ranges_artist(self):
""" Test if multiple time range data is inserted correctly """
with open(self.path_to_data_file('user_top_artists_db.json')) as f:
artists_data = json.load(f)
artists_data_year = deepcopy(artists_data)
artists_data_year['stats_range'] = 'year'
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='artists',
stats=StatRange[EntityRecord](**artists_data))
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='artists',
stats=StatRange[EntityRecord](**artists_data_year))
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='all_time', stats_type='artists')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), artists_data)
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='year', stats_type='artists')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), artists_data_year)
def test_insert_user_stats_mult_ranges_release(self):
""" Test if multiple time range data is inserted correctly """
with open(self.path_to_data_file('user_top_releases_db.json')) as f:
releases_data = json.load(f)
releases_data_year = deepcopy(releases_data)
releases_data_year['stats_range'] = 'year'
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='releases',
stats=StatRange[EntityRecord](**releases_data))
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='releases',
stats=StatRange[EntityRecord](**releases_data_year))
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='all_time', stats_type='releases')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), releases_data)
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='year', stats_type='releases')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), releases_data_year)
def test_insert_user_stats_mult_ranges_recording(self):
""" Test if multiple time range data is inserted correctly """
with open(self.path_to_data_file('user_top_recordings_db.json')) as f:
recordings_data = json.load(f)
recordings_data_year = deepcopy(recordings_data)
recordings_data_year['stats_range'] = 'year'
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='recordings',
stats=StatRange[EntityRecord](**recordings_data))
db_stats.insert_user_jsonb_data(user_id=self.user['id'], stats_type='recordings',
stats=StatRange[EntityRecord](**recordings_data_year))
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='all_time', stats_type='recordings')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), recordings_data)
result = db_stats.get_user_stats(user_id=self.user['id'], stats_range='year', stats_type='recordings')
self.assertDictEqual(result.dict(exclude={'user_id', 'last_updated'}), recordings_data_year)
def test_insert_user_stats_mult_ranges_listening_activity(self):
""" Test if multiple time range data is inserted correctly """
with open(self.path_to_data_file('user_listening_activity_db.json')) as f:
listening_activity_data = json.load(f)
listening_activity_data_year = deepcopy(listening_activity_data)
listening_activity_data_year['stats_range'] = 'year'
db_stats.insert_user_jsonb_data(
|
enthought/pikos | pikos/monitors/function_monitor.py | Python | bsd-3-clause | 3,191 | 0.000627 | # -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# Package: Pikos toolkit
# File: monitors/function_monitor.py
# License: LICENSE.TXT
#
# Copyright (c) 2012, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
from __future__ import absolute_import
from pikos._internal.profile_function_manager import ProfileFunctionManager
from pikos._internal.keep_track import KeepTrack
from pikos.monitors.monitor import Monitor
from pikos.monitors.records import FunctionRecord
class FunctionMonitor(Monitor):
""" Record python function events.
The class hooks on the setprofile function to receive function events and
record them.
"""
def __init__(self, recorder, record_type=None):
""" Initialize the monitoring class.
Parameters
----------
recorder : object
A subclass of :class:`~.AbstractRecorder` or a class that
implements the same interface to handle the values to be logged.
| record_type : type
A class object to be used for r | ecords. Default is
:class:`~.FunctionRecord`.
"""
self._recorder = recorder
self._record = recorder.record
self._profiler = ProfileFunctionManager()
self._index = 0
self._call_tracker = KeepTrack()
if record_type is None:
self._record_type = FunctionRecord
else:
self._record_type = record_type
self._use_tuple = self._record_type is tuple
def enable(self):
""" Enable the monitor.
The first time the method is called (the context is entered) it will
set the setprofile hooks and initialize the recorder.
"""
if self._call_tracker('ping'):
self._recorder.prepare(self._record_type)
self._profiler.replace(self.on_function_event)
def disable(self):
""" Disable the monitor.
The last time the method is called (the context is exited) it will
unset the setprofile hooks and finalize the recorder.
"""
if self._call_tracker('pong'):
self._profiler.recover()
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
""" Record the current function event.
Called on function events, it will retrieve the necessary information
from the `frame`, create a :class:`FunctionRecord` and send it to the
recorder.
"""
record = self.gather_info(frame, event, arg)
if not self._use_tuple:
record = self._record_type(*record)
self._record(record)
self._index += 1
def gather_info(self, frame, event, arg):
""" Gather information for the record.
"""
if '_' == event[1]:
return (
self._index, event, arg.__name__,
frame.f_lineno, frame.f_code.co_filename)
else:
code = frame.f_code
return (
self._index, event, code.co_name,
frame.f_lineno, code.co_filename)
|
jballanc/openmicroscopy | components/tools/OmeroPy/test/integration/scriptstest/test_make_movie.py | Python | gpl-2.0 | 1,111 | 0.0036 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Integration test which checks the various parameters for makemovie.py
Copyright 2010-2013 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import test.integration.library as lib
import os, sys
import omero.processor
class TestMakeMovie(lib.ITest):
"""
Requires Pillow being installed
| """
def setup_method(self, method):
lib.ITest.setup_method(self, method)
self.svc = self.client.sf.getScriptService()
def testNoParams(self):
makeMovieID = self.svc.getScriptID("/omero/export_scripts/Make_Movie.py")
imported_pix = ",".join(self.import_image())
imported_img = self.query.findByQuery("select i from Image i join fetch i.pixels pixels where pixels.id in | (%s)" % imported_pix, None)
inputs = {"IDs": omero.rtypes.rlist([imported_img.id])}
impl = omero.processor.usermode_processor(self.root)
try:
process = self.svc.runScript(makeMovieID, inputs, None)
finally:
impl.cleanup()
|
vengefuldrx/dillonhicks.io | render.py | Python | mit | 4,597 | 0.003482 | import argparse
import os
import re
from pathlib import Path, PurePath
from jinja2 import Template, FileSystemLoader, Environment
from collections import namedtuple, defaultdict, MappingView, OrderedDict
PhotoCredit = namedtuple('PhotoCredit', ('link', 'name'))
Link = namedtuple('Link', ('link', 'caption'))
Project = namedtuple('Project', ('date', 'name', 'link', 'image', 'description', 'photo_credit'))
excludes = set((
'base.html',
))
navigation = (
Link('index.html', 'Dillon'),
Link('dillonhicks-resume.pdf', 'Résumé'),
Link('projects.html', 'Projects'),
Link('contact.html', 'Contact'),
)
ctx_by_match = defaultdict(lambda: dict)
ctx_by_name = defaultdict(lambda: dict)
def register_named_ctx(name, func):
if name in ctx_by_name:
raise RuntimeError(
'function {}() duplicates context for template {}'
.format(func.__name__, name))
ctx_by_name[name] = func
return func
def register_matched_ctx(pattern, func):
if pattern in ctx_by_match:
raise RuntimeError(
'function {}() duplicates context for templates matching {}'
.format(func.__name__, pattern))
ctx_by_match[pattern] = func
return func
class context_for(object):
"""register context generator methods by template name with decorators"""
def __init__(self, string, regex=False):
self.string = string
self.regex = regex
def __call__(self, func):
if self.regex:
retur | n register_matched_ctx(self.string, func)
return register_named_ctx(self.string, func)
def create_context(name):
ctx_funcs = []
for pattern, func in ctx_by_match.items():
if re.match(pattern, name):
ctx_funcs.append(func)
ctx_funcs.append(ctx_by_name[name])
context = {}
for f in ctx_funcs:
context.update(f())
return context
@context_for( | '^.+html$', regex=True)
def base_ctx():
return {
'navigation' : navigation
}
@context_for('projects.html')
def proj_ctx():
dillonio = Project(
date='April 2016',
name='DillonHicks.io',
link='https://github.com/vengefuldrx/dillonhicks.io',
image='img/dillonhicksioflavicon.jpg',
description='My own personal website to use as a portfolio and a centralized place to prototype.',
photo_credit=None)
breakout = Project(
date='April 2016',
name='Breakout',
link='proj-breakout.html',
image='img/breakout.png',
description='A game I developed on a Saturday morning to learn more about JavaScript.',
photo_credit=None)
chatserver = Project(
date='April 2016',
name='Golang Chat Server',
link='proj-chatserver.html',
image='https://golang.org/doc/gopher/talks.png',
description='Definitely a work in progress. Dedicating some time to learning golang by making a '
'websocket powered chat server. This is modified version of '
'the heroku <a href=\"https://github.com/heroku-examples/go-websocket-chat-demo\">'
'websocket chat demo</a>. If the heroku dyno on which it is running sleeps then it causes the'
'system to fall over when it wakes up.',
photo_credit=PhotoCredit(link='http://golang.org', name='golang.org'))
return {
'projects' : (
dillonio,
breakout,
chatserver,
)
}
def parse_args():
parser = argparse.ArgumentParser(
description='Render templates',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i', '--in', type=str, action='store',
required=True, help='Source directory',
dest='src_path')
parser.add_argument('-o', '--out', type=Path, action='store',
required=True, help='Output directory',
dest='dest_path')
args = parser.parse_args()
return args
def main():
args = parse_args()
env = Environment(loader=FileSystemLoader(args.src_path))
template_names = env.list_templates(filter_func=lambda t: t not in excludes)
for t_name in template_names:
path = args.dest_path / PurePath(t_name)
context = create_context(t_name)
try:
path.parent.mkdir(parents=True)
except FileExistsError: pass
print('Writing: {}'.format(path))
with path.open('w') as outfile:
outfile.write(env.get_template(t_name).render(context))
if __name__ == '__main__':
main()
|
CartoDB/cartodb-python | examples/import_from_database.py | Python | bsd-3-clause | 2,669 | 0.001499 | import argparse
import json
import logging
import os
import re
import warnings
from carto.auth import APIKeyAuthClient
from carto.datasets import DatasetManager
warni | ngs.filterwarnings('ignore')
# python import_from_database.py --connection='{ \
# "connector": { \
# "provider": "hive", \
# "conn | ection": { \
# "server":"YOUR_SERVER_IP", \
# "database":"default", \
# "username":"cloudera", \
# "password":"cloudera" \
# },
# "schema": "default", \
# "table": "order_items" \
# }
# }'
# Logger (better than print)
logging.basicConfig(
level=logging.INFO,
format=' %(asctime)s - %(levelname)s - %(message)s',
datefmt='%I:%M:%S %p')
logger = logging.getLogger()
# set input arguments
parser = argparse.ArgumentParser(
description='External database connector')
parser.add_argument('--connection', type=str, dest='connection',
help='An external database connection JSON object')
parser.add_argument('--organization', type=str, dest='organization',
default=os.environ['CARTO_ORG'] if 'CARTO_ORG' in os.environ else '',
help='Set the name of the organization' +
' account (defaults to env variable CARTO_ORG)')
parser.add_argument('--base_url', type=str, dest='CARTO_BASE_URL',
default=os.environ['CARTO_API_URL'] if 'CARTO_API_URL' in os.environ else '',
help='Set the base URL. For example:' +
' https://username.carto.com/ ' +
'(defaults to env variable CARTO_API_URL)')
parser.add_argument('--api_key', dest='CARTO_API_KEY',
default=os.environ['CARTO_API_KEY'] if 'CARTO_API_KEY' in os.environ else '',
help='Api key of the account' +
' (defaults to env variable CARTO_API_KEY)')
args = parser.parse_args()
# Set authentification to CARTO
if args.CARTO_BASE_URL and args.CARTO_API_KEY and args.organization:
auth_client = APIKeyAuthClient(
args.CARTO_BASE_URL, args.CARTO_API_KEY, args.organization)
else:
logger.error('You need to provide valid credentials, run with -h parameter for details')
import sys
sys.exit(1)
# get username from base_url
substring = re.search('https://(.+?).carto.com', args.CARTO_BASE_URL)
if substring:
username = substring.group(1)
# Dataset manager
dataset_manager = DatasetManager(auth_client)
connection = json.loads(args.connection.replace("\\", ""))
logger.info(connection)
table = dataset_manager.create(None, None, connection=connection)
logger.info(
'Table imported: {table}'.format(table=table.name))
|
matteocereda/RNAmotifs | m3_light/utils/__init__.py | Python | mit | 2,230 | 0.003139 | def merge_int | ervals(data):
"""
data = [(10,20), (15,30), (100, 200)]
out = [(10,30), (100,200)]
"""
if len(data)==0:
return data
result = []
saved = list(data[0])
for st, en in sorted([sorted(t) for t in data]):
if st <= saved[1]:
saved[1] = max(saved[1], en)
| else:
result.append((saved[0], saved[1]))
saved[0] = st
saved[1] = en
result.append(saved)
return result
class Bedgraph():
def __init__(self, filename):
self.data = {}
self.sites = 0
self.strand_file = {"+":"", "-": "-"}
self.load(filename)
def load(self, filename):
if filename.endswith(".gz"):
f = gzip.open(filename, "rb")
else:
f = open(filename, "rb")
r = f.readline()
while r:
if r.startswith("#"):
r = f.readline()
continue
r = r.rstrip("\r").rstrip("\n").split("\t")
chr = r[0]
start = int(r[1])
stop = int(r[2])
cDNA = float(r[3])
strand = "+" if cDNA>=0 else "-"
self.data.setdefault(chr, {}).setdefault(strand, {})
for p in xrange(start, stop):
self.data[chr][strand][p] = abs(cDNA)
r = f.readline()
f.close()
def chromosomes(self):
return self.data.keys()
def get_value(self, chr, strand, pos):
return self.data.get(chr, {}).get(strand, {}).get(pos, 0)
def region(self, chr, strand, pos_from, pos_to):
return sum([self.get_value(chr, strand, i) for i in xrange(pos_from, pos_to+1)])
def cluster(self, hws):
data_window = {}
self.data_sum = 0
for chr, chr_data in self.data.iteritems():
data_window[chr] = {}
for strand, strand_data in chr_data.iteritems():
data_window[chr][strand] = {}
for pos, value in strand_data.iteritems():
pos_from = max(0, pos-hws)
pos_to = max(0, pos+hws)
data_window[chr][strand][pos] = self.region(chr, strand, pos_from, pos_to)
self.data = data_window |
dimagi/commcare-hq | corehq/ex-submodules/dimagi/utils/couch/database.py | Python | bsd-3-clause | 6,069 | 0.001648 | from time import sleep
from django.conf import settings
from couchdbkit import ResourceConflict
from couchdbkit.client import Database
from memoized import memoized
from requests.models import Response
from requests.exceptions import RequestException
from dimagi.ext.couchdbkit import Document
from dimagi.utils.chunked import chunked
from dimagi.utils.couch.bulk import BulkFetchException, get_docs
from ..retry import retry_on
class DocTypeMismatchException(Exception):
pass
class DesignDoc(object):
"""Data structure representing a design doc"""
def __init__(self, database, id):
self.id = id
self._doc = database.get(id)
self.name = id.replace("_design/", "")
@property
def views(self):
views = []
if "views" in self._doc:
for view_name, _ in self._doc["views"].items():
views.append(view_name)
return views
def get_db(postfix=None):
"""
Get the couch database.
"""
# this is a bit of a hack, since it assumes all the models talk to the same
# db. that said a lot of our code relies on that assumption.
# this import is here because of annoying dependencies
db_url = settings.COUCH_DATABASE
if postfix:
db_url = settings.EXTRA_COUCHDB_DATABASES[postfix]
return Database(db_url, create=True)
def get_design_docs(database):
design_doc_rows = database.view("_all_docs", startkey="_design/",
endkey="_design/zzzz")
ret = []
for row in design_doc_rows:
ret.append(DesignDoc(database, row["id"]))
return ret
def iter_docs(database, ids, chunksize=100, **query_params):
for doc_ids i | n chunked(ids, chunksize):
for doc in get_docs(database, keys=doc_ids, **query_params):
yield doc
def iter_bulk_delete(database, ids, chunksize=100, doc_callback=None, wait_time=None,
max_fetch_attempts=1):
total_count = 0
for doc_ids in chunked(ids, chunksize):
for i in range(max_fetch_attempts):
try:
doc_dicts = get_d | ocs(database, keys=doc_ids)
break
except RequestException:
if i == (max_fetch_attempts - 1):
raise
sleep(30)
if doc_callback:
for doc in doc_dicts:
doc_callback(doc)
total_count += len(doc_dicts)
database.bulk_delete(doc_dicts)
if wait_time:
sleep(wait_time)
return total_count
def iter_bulk_delete_with_doc_type_verification(database, ids, doc_type, chunksize=100, wait_time=None,
max_fetch_attempts=1):
def verify_doc_type(doc):
actual_doc_type = doc.get('doc_type')
if actual_doc_type != doc_type:
raise DocTypeMismatchException("Expected %s, got %s" % (doc_type, actual_doc_type))
return iter_bulk_delete(database, ids, chunksize=chunksize, doc_callback=verify_doc_type, wait_time=wait_time,
max_fetch_attempts=max_fetch_attempts)
def is_bigcouch():
# this is a bit of a hack but we'll use it for now
return 'cloudant' in settings.COUCH_DATABASE or getattr(settings, 'BIGCOUCH', False)
def bigcouch_quorum_count():
"""
The number of nodes to force an update/read in bigcouch to make sure
we have a quorum. Should typically be the number of copies of a doc
that end up in the cluster.
"""
return (3 if not hasattr(settings, 'BIGCOUCH_QUORUM_COUNT')
else settings.BIGCOUCH_QUORUM_COUNT)
def get_safe_write_kwargs():
return {'w': bigcouch_quorum_count()} if is_bigcouch() else {}
def get_safe_read_kwargs():
return {'r': bigcouch_quorum_count()} if is_bigcouch() else {}
class SafeSaveDocument(Document):
"""
A document class that overrides save such that any time it's called in bigcouch
mode it saves with the maximum quorum count (unless explicitly overridden).
"""
def save(self, **params):
if is_bigcouch() and 'w' not in params:
params['w'] = bigcouch_quorum_count()
return super(SafeSaveDocument, self).save(**params)
def safe_delete(db, doc_or_id):
if not isinstance(doc_or_id, str):
doc_or_id = doc_or_id._id
db.delete_doc(doc_or_id, **get_safe_write_kwargs())
def apply_update(doc, update_fn, max_tries=5):
"""
A function for safely applying a change to a couch doc. For getting around ResourceConflict
errors that stem from the distributed cloudant nodes
"""
tries = 0
while tries < max_tries:
try:
update_fn(doc)
doc.save()
return doc
except ResourceConflict:
doc = doc.__class__.get(doc._id)
tries += 1
raise ResourceConflict("Document update conflict. -- Max Retries Reached")
def _is_couch_error(err):
if isinstance(err, BulkFetchException):
return True
request = err.request
if request is None:
request = _get_request_from_traceback(err.__traceback__)
return request and request.url.startswith(_get_couch_base_urls())
# Decorator to retry function call on Couch error
#
# Retry up to 5 times with exponential backoff. Raise the last
# received error from Couch if all calls fail.
retry_on_couch_error = retry_on(
BulkFetchException,
RequestException,
should_retry=_is_couch_error,
)
def _get_request_from_traceback(tb):
# Response.iter_content() raises errors without request context.
# Maybe https://github.com/psf/requests/pull/5323 will get merged?
while tb.tb_next is not None:
tb = tb.tb_next
if "self" in tb.tb_frame.f_locals:
obj = tb.tb_frame.f_locals["self"]
if isinstance(obj, Response) and obj.request:
return obj.request
return None
@memoized
def _get_couch_base_urls():
urls = set()
for config in settings.COUCH_DATABASES.values():
protocol = 'https' if config['COUCH_HTTPS'] else 'http'
urls.add(f"{protocol}://{config['COUCH_SERVER_ROOT']}")
return tuple(urls)
|
EdLogan18/logan-repository | plugin.video.exodus/resources/lib/sources/uflix_mv_tv.py | Python | gpl-2.0 | 5,626 | 0.009598 | # | -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it w | ill be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
class source:
def __init__(self):
self.domains = ['uflix.ws']
self.base_link = 'http://uflix.ws'
self.search_link = '/index.php?menu=search&query=%s'
def movie(self, imdb, title, year):
try:
query = self.search_link % urllib.quote_plus(cleantitle.query(title))
query = urlparse.urljoin(self.base_link, query)
t = cleantitle.get(title)
r = client.request(query).decode('iso-8859-1').encode('utf-8')
r = client.parseDOM(r, 'div', attrs = {'id': 'movies'})[0]
r = client.parseDOM(r, 'figcaption')
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), client.parseDOM(i, 'a')) for i in r]
r = [(i[0][0], i[1][0], i[2][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
r = [(i[0], re.findall('(?:^Watch |)(.+?)(?: Online|)$', i[1]), re.findall('(\d{4})', i[2])) for i in r]
r = [(i[0], i[1][0], i[2][0]) for i in r if len(i[1]) > 0 and len(i[2]) > 0]
r = [(i[0], i[1].replace(i[2], ''), i[2]) for i in r]
r = [i[0] for i in r if t == cleantitle.get(i[1]) and year == i[2]][0]
url = re.findall('(?://.+?|)(/.+)', r)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, year):
try:
query = self.search_link % urllib.quote_plus(cleantitle.query(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
t = cleantitle.get(tvshowtitle)
r = client.request(query).decode('iso-8859-1').encode('utf-8')
r = client.parseDOM(r, 'div', attrs = {'id': 'series'})[0]
r = client.parseDOM(r, 'figcaption')
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), client.parseDOM(i, 'a')) for i in r]
r = [(i[0][0], i[1][0], i[2][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
r = [(i[0], re.findall('(?:^Watch |)(.+?)(?: Online|)$', i[1]), re.findall('(\d{4})', i[2])) for i in r]
r = [(i[0], i[1][0], i[2][0]) for i in r if len(i[1]) > 0 and len(i[2]) > 0]
r = [(i[0], i[1].replace(i[2], ''), i[2]) for i in r]
r = [i[0] for i in r if t == cleantitle.get(i[1]) and year == i[2]][0]
url = re.findall('(?://.+?|)(/.+)', r)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.urljoin(self.base_link, url)
r = client.request(url).decode('iso-8859-1').encode('utf-8')
r = zip(client.parseDOM(r, 'a', ret='href', attrs = {'class': 'link'}), client.parseDOM(r, 'a', attrs = {'class': 'link'}))
r = [i for i in r if '/season/%01d/episode/%01d' % (int(season), int(episode)) in i[0]][0]
t = client.parseDOM(r[1], 'span')[0]
if not cleantitle.get(title) == cleantitle.get(t): raise Exception()
url = re.findall('(?://.+?|)(/.+)', r[0])[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
r = client.request(url).decode('iso-8859-1').encode('utf-8')
if not ('/episode/' in url or 'fullhdbr.png' in r or 'Blu-Ray.gif' in r): raise Exception()
links = re.findall('url=(.+?)&', r)
links = [x for y,x in enumerate(links) if x not in links[:y]]
for i in links:
try:
url = base64.b64decode(i)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': 'SD', 'provider': 'uFlix', 'url': url, 'direct': False, 'debridonly': False})
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
|
novirael/arkanoid-pygame | levels.py | Python | gpl-2.0 | 1,317 | 0.025816 | #-------------------------------------------------------------------------------
# Name: levels
# Purpose:
#
# Author: novirael
#
# Created: 17-04-2012
# Copyright: (c) novirael 2012
# Licence: <your licence>
#-------------------------------------------------------------------------------
#!/usr/bin/env python
# Import
from sprites import Kafel
# Blocks
kafelek = [ "img/blue.png", "img/green.png", "img/red.png", "img/yellow.png",
"img/grey.png", "img/purple.png" ]
# Colors
black = (0,0,0)
white = (255,255,255)
blue = (0,100,200)
green = (0,200,0)
red = (255,0,0)
yellow = (235,235,0)
purple = (113,0,185)
|
# Variables
SW, SH = 900, 600
k_width, k_height = 45, 20
def draw_level(n):
if n == 1:
# The top of the block (y position)
top = 80
for i in range(15):
block = Kafel(blue, kafelek[0], i*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
return alls | prites, blocks
# --- Create blocks
"""
# Five rows of blocks
for row in range(2):
for column in range(0,20):
block = Kafel(blue, kafelek[0], column*(k_width+2), top)
blocks.add(block)
allsprites.add(block)
# Move the top of the next row down
top += k_height + 2
"""
|
UPPMAX/nsscache | nss_cache/util/timestamps_test.py | Python | gpl-2.0 | 2,605 | 0.003839 | # Copyright 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Unit tests for nss_cache/util/timestamps.py."""
__author__ = 'jaq@google.com (Jamie Wilkinson)'
import os
import shutil
import tempfile
import time
import unittest
import mox
from nss_cache.util import timestamps
class TestTimestamps(mox.MoxTestBase):
def setUp(self):
super(TestTimestamps, self).setUp()
self.workdir = tempfile.mkdtemp()
def tearDown(self):
super(TestTimestamps, self).tearDown()
shutil.rmtree(self.workdir)
def testReadTimestamp(self):
ts_filename = os.path.join(self.workdir, 'tsr')
ts_file = open(ts_filename | , 'w')
ts_file.write('1970-01-01T00:00:01Z\n')
ts_file.close()
ts = timestamps.ReadTimestamp(ts_filename)
self.assertEqual(time.gmtime(1), ts)
def testReadTimestamp(self):
# TZ=UTC date -d @1306428781
# Thu May 26 16:53:01 UTC 2011
ts_filename | = os.path.join(self.workdir, 'tsr')
ts_file = open(ts_filename, 'w')
ts_file.write('2011-05-26T16:53:01Z\n')
ts_file.close()
ts = timestamps.ReadTimestamp(ts_filename)
self.assertEqual(time.gmtime(1306428781), ts)
def testReadTimestampInFuture(self):
ts_filename = os.path.join(self.workdir, 'tsr')
ts_file = open(ts_filename, 'w')
ts_file.write('2011-05-26T16:02:00Z')
ts_file.close()
now = time.gmtime(1)
self.mox.StubOutWithMock(time, 'gmtime')
time.gmtime().AndReturn(now)
self.mox.ReplayAll()
ts = timestamps.ReadTimestamp(ts_filename)
self.assertEqual(now, ts)
def testWriteTimestamp(self):
ts_filename = os.path.join(self.workdir, 'tsw')
good_ts = time.gmtime(1)
timestamps.WriteTimestamp(good_ts, ts_filename)
self.assertEqual(good_ts, timestamps.ReadTimestamp(ts_filename))
ts_file = open(ts_filename, 'r')
self.assertEqual('1970-01-01T00:00:01Z\n', ts_file.read())
if __name__ == '__main__':
unittest.main()
|
nsnam/ns-3-dev-git | src/internet/test/examples-to-run.py | Python | gpl-2.0 | 607 | 0.003295 | #! /usr/bin/env python3
## -*- Mode | : python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("main-simple", "True", "True"),
]
# A list of Python examples to run in order to ensure that they rem | ain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = []
|
MikeDacre/mike_tools | bin/wasp_pipeline.py | Python | unlicense | 13,352 | 0.000899 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Run the entire WASP pipeline with either tophat or STAR.
============================================================================
FILE: wasp_pipeline.py
DIR: /scratch/users/dacre
AUTHOR: Michael D Dacre, mike.dacre@gmail.com
ORGANIZATION: Stanford University
LICENSE: MIT License, property of Stanford, use as you wish
VERSION: 0.2
CREATED: 2016-07-12 09:02
Last modified: 2016-03-27 08:20
============================================================================
"""
import os
import sys
import argparse
import slurmy
import pickle
import logme
MAX_JOBS = 2000
PARTITION = 'hbfraser'
STAR_MEM = '40258653917'
STAR_CORES = 28
logme.MIN_LEVEL = 'info' # Change to debug for verbose logging
logme.LOGFILE = 'wasp_submit_log.log'
def run_mapping(name, infiles, genome, algorithm='STAR', gtf=None,
dependency=None):
"""Run read mapping using either tophat or STAR.
:name: A name prefix to use for the output.
:infiles: List of fastqs, space separated for paired end, comma
separated for batches. Must be a string.
Note: if gzipped and using STAR, they will be unzipped
and rezipped during mapping
:genome: The genome or STAR genome index.
:algorithm: STAR or tophat. Case ignored.
:gtf: A GTF of genes for tophat, not required.
:dependency: The job number of the remapping step.
:returns: Job number of mapping step and name of output bam.
"""
if algorithm.lower() == 'star':
cmnd = []
new_list = []
zipped = False
for fl in infiles.split(' '):
b = []
for i in fl.split(','):
if i.endswith('.gz'):
zipped = True
cmnd.append('/home/dacre/usr/bin/unpigz -p 16 ' + i)
b.append(i[:-3])
else:
b.append(i)
new_list.append(','.join(b))
infiles = ' '.join(new_list)
cmnd.append('/home/dacre/usr/bin/STAR --runThreadN 16 ' +
'--genomeDir {} '.format(genome) +
'--readFilesIn {} '.format(infiles) +
'--outFilterMultimapNmax 1 ' +
'--outFileNamePrefix {} '.format(name) +
'--outSAMtype BAM SortedByCoordinate ' +
'--outSAMattributes MD NH ' +
'--clip5pNbases 6 ' +
'--limitBAMsortRAM {}'.format(STAR_MEM))
if zipped:
for fl in new_list:
for i in fl.split(','):
cmnd.append(
'/home/dacre/usr/bin/pigz -p 16 {}'.format(i))
command = '\n'.join(cmnd)
outbam = name + 'Aligned.sortedByCoord.out.bam'
modules = ['STAR']
elif algorithm.lower() == 'tophat':
command = 'tophat --microexon-search -o {}'.f | ormat(name + '_tophat')
command = command + ' -G ' + gtf if gtf else command
command = command + ' -p 16 {} {}\n'.format | (genome, infiles)
outbam = name + '_accepted_hits.bam'
command = command + 'mv {}/accepted_hits.bam {}'.format(
name + '_tophat', outbam)
modules = ['python/2.7.5', 'tophat']
else:
raise Exception('Invalid algorithm: {}'.format(algorithm))
return (slurmy.monitor_submit(slurmy.make_job_file(
command, name, '24:00:00', STAR_CORES, partition=PARTITION, modules=modules),
dependency, MAX_JOBS), outbam)
def wasp_step_1(fl, snp_dir, pipeline=None, dependency=None):
"""Run find_intersecting_snps.py on fl.
:fl: The sam or bam file to run on.
:snp_dir: The SNP directory required by WASP.
:pipeline: The path to the WASP pipeline.
:dependency: The job number of the remapping step.
:returns: The job number.
"""
command = os.path.join(os.path.abspath(pipeline),
'find_intersecting_snps.py') \
if pipeline else 'find_intersecting_snps.py'
logme.log('Submitting wasp step 1 for {}'.format(fl), level='debug')
return slurmy.monitor_submit(slurmy.make_job_file(
'python2 {} -m 1000000 {} {}'.format(command, fl, snp_dir),
fl + '_step1', '16:00:00', 8, '30000', partition=PARTITION,
modules=['python/2.7.5']), dependency, MAX_JOBS)
def wasp_step_2(name, remapped, pipeline=None, dependency=None):
"""Run filter_remapped_reads.py following second mapping.
:name: The name of the original mapped bam or sam, used to make file
names
:remapped: The file created by the second mapping.
:pipeline: The path to the WASP pipeline.
:dependency: The job number of the remapping step.
:returns: The job number.
"""
command = os.path.join(os.path.abspath(pipeline),
'filter_remapped_reads.py') \
if pipeline else 'filter_remapped_reads.py'
# Trim the name
shortname = '.'.join(name.split('.')[:-1]) if name.endswith('.bam') \
or name.endswith('.sam') else name
logme.log('Submitting wasp step 2 for {}'.format(shortname), level='debug')
return slurmy.monitor_submit(slurmy.make_job_file(
'python2 {} {} {} {} {}'.format(command,
shortname + '.to.remap.bam',
remapped,
shortname + '.remap.keep.bam',
shortname + '.to.remap.num.gz'),
shortname + '_step2', '16:00:00', 8, '30000', partition=PARTITION,
modules=['python/2.7.5']), dependency, MAX_JOBS)
def merge_bams(name, dependency=None):
"""Use samtools to merge two bam files."""
shortname = '.'.join(name.split('.')[:-1]) if name.endswith('.bam') \
or name.endswith('.sam') else name
orig_reads = shortname + '.keep.bam'
remapped = shortname + '.remap.keep.bam'
uname = shortname + '_wasp_final_unsorted.bam'
final_name = shortname + '_wasp_final.bam'
return slurmy.monitor_submit(slurmy.make_job_file(
'samtools merge -f {} {} {}\n'.format(uname, orig_reads, remapped) +
'samtools sort -o {} {}'.format(final_name, uname),
shortname + '_merge', '16:00:00', 4, '26000', partition=PARTITION,
modules='samtools'), dependency, MAX_JOBS)
def run_wasp(files, snp_dir, genome, algorithm='star', gtf=None, pipeline=None,
step=1, remapped_bam=None):
"""Run the complete WASP pipeline.
:files: All the files to run on, can be fastq or sam/bam. If fastq, or
directory, an initial mapping is done.
:snp_dir: The SNP directory required by WASP.
:genome: A genome directory for tophat, or the STAR index directory for
STAR
:algorithm: 'star' or 'tophat'
:gtf: A GTF of genes for tophat, not required.
:pipeline: The location of the WASP pipeline
:step: Start at steps 1, 2, 3, or 4 instead of at the beginning,
ignored if files are fastq.
:returns: None.
"""
all_jobs = {}
save_file = 'wasp_jobs.pickle'
# Detect if need to run mapping
if files[0].endswith('.fq') or files[0].endswith('.fastq') \
or os.path.isdir(files[0]):
logme.log('File contains fastq, running initial mapping',
also_write='stderr')
initial_map = True
else:
initial_map = False
initial_step = step
if step == 2:
step_1 = None
elif step == 3:
remapped = None
remap = None
elif step == 4:
step_2 = None
# Loop through every file and run all steps of the pipeline.
for fl in files:
step = initial_step
map_job = None
# Initial mapping
if initial_map:
if os.path.isdir(fl):
fl = []
for i in os.listdir(fl):
if os.path.isfile(i):
if 'fq' in i.split('.') \
|
olivergs/pyevo | pyevo/api/googletranslate.py | Python | mit | 730 | 0.017833 | # -*- coding: utf-8 -*-
"""
===============================================
.. module::
:platform: Unix, Windows
:synopsis:
:deprecated:
.. moduleauthor:: (C) 2014 Oliver Gutiérrez
"""
from pyevo.http import nvp_request
def translate(apikey,text,from_lang,to_lang):
"""
Translation to english function
"""
if text:
data={
"key": apikey,
"q": text,
"source": from_lang,
"target": to_lang,
}
try:
resp=nvp_request("https://www.googleapis.com/language/translate/v2",data,json=True)
return resp['data']['translations'][0]['translatedText']
except:
| pass
retur | n None |
davidt/reviewboard | reviewboard/hostingsvcs/tests/test_hosting_service_auth_form.py | Python | mit | 16,079 | 0 | from __future__ import unicode_literals
from reviewboard.hostingsvcs.errors import (AuthorizationError,
TwoFactorAuthCodeRequiredError)
from reviewboard.hostingsvcs.forms import HostingServiceAuthForm
from reviewboard.hostingsvcs.models import HostingServiceAccount
from reviewboard.hostingsvcs.service import (register_hosting_service,
unregister_hosting_service)
from reviewboard.scmtools.models import Tool
from reviewboard.site.models import LocalSite
from reviewboard.testing import TestCase
from reviewboard.testing.hosting_services import (SelfHostedTestService,
TestService)
class HostingServiceAuthFormTests(TestCase):
"""Unit tests for reviewboard.hostingsvcs.forms.HostingServiceAuthForm."""
fixtures = ['test_scmtools']
def setUp(self):
super(HostingServiceAuthFormTests, self).setUp()
register_hosting_service('test', TestService)
register_hosting_service('self_hosted_test', SelfHostedTestService)
self.git_tool_id = Tool.objects.get(name='Git').pk
def tearDown(self):
super(HostingServiceAuthFormTests, self).tearDown()
unregister_hosting_service('self_hosted_test')
unregister_hosting_service('test')
def test_override_help_texts(self):
"""Testing HostingServiceAuthForm subclasses overriding help texts"""
class MyAuthForm(HostingServiceAuthForm):
class Meta:
help_texts = {
'hosting_account_username': 'My help text.',
}
form = MyAuthForm(hosting_service_cls=TestService)
self.assertEqual(form.fields['hosting_account_username'].help_text,
'My help text.')
def test_override_labels(self):
"""Testing HostingServiceAuthForm subclasses overriding labels"""
class MyAuthForm(HostingServiceAuthForm):
class Meta:
labels = {
'hosting_account_username': 'My label.',
}
form = MyAuthForm(hosting_service_cls=TestService)
self.assertEqual(form.fields['hosting_account_username'].label,
'My label.')
def test_get_credentials_default(self):
"""Testing HostingServiceAuthForm.get_credentials default behavior"""
form = HostingServiceAuthForm(
{
'hosting_account_username': 'myuser',
'hosting_account_password': 'mypass',
},
hosting_service_cls=TestService)
self.assertTrue(form.is_valid())
self.assertEqual(
form.get_credentials(),
{
'username': 'myuser',
'password': 'mypass',
})
def test_get_credentials_default_with_2fa_code(self):
"""Testing HostingServiceAuthForm.get_credentials default behavior
with two-factor auth code
"""
form = HostingServiceAuthForm(
{
'hosting_account_username': 'myuser',
'hosting_account_password': 'mypass',
'hosting_account_two_factor_auth_code': '123456',
},
hosting_service_cls=TestService)
self.assertTrue(form.is_valid())
self.assertEqual(
form.get_credentials(),
{
'username': 'myuser',
'password': 'mypass',
'two_factor_auth_code': '123456',
})
def test_get_credentials_with_form_prefix(self):
"""Testing HostingServiceAuthForm.get_credentials default behavior
with form prefix
"""
form = HostingServiceAuthForm(
{
'myservice-hosting_account_username': 'myuser',
'myservice-hosting_account_password': 'mypass',
'myservice-hosting_account_two_factor_auth_code': '123456',
},
hosting_service_cls=TestService,
prefix='myservice')
self.assertTrue(form.is_valid())
self.assertEqual(
form.get_credentials(),
{
'username': 'myuser',
'password': 'mypass',
'two_factor_auth_code': '123456',
})
def test_save_new_account(self):
"""Testing Host | ingServiceAuthForm.save with new account"""
form = HostingServiceAuthForm(
{
'hosting_account_username': 'myuser',
'ho | sting_account_password': 'mypass',
},
hosting_service_cls=TestService)
self.assertTrue(form.is_valid())
hosting_account = form.save()
self.assertIsNotNone(hosting_account.pk)
self.assertEqual(hosting_account.service_name, 'test')
self.assertEqual(hosting_account.username, 'myuser')
self.assertEqual(hosting_account.data['password'], 'mypass')
self.assertIsNone(hosting_account.hosting_url)
self.assertIsNone(hosting_account.local_site)
def test_save_new_account_with_existing_stored(self):
"""Testing HostingServiceAuthForm.save with new account matching
existing stored account information
"""
form = HostingServiceAuthForm(
{
'hosting_account_username': 'myuser',
'hosting_account_password': 'mypass',
},
hosting_service_cls=TestService)
self.assertTrue(form.is_valid())
orig_account = HostingServiceAccount.objects.create(
service_name='test',
username='myuser')
hosting_account = form.save()
self.assertIsNotNone(hosting_account.pk)
self.assertEqual(hosting_account.pk, orig_account.pk)
self.assertEqual(hosting_account.service_name, 'test')
self.assertEqual(hosting_account.username, 'myuser')
self.assertEqual(hosting_account.data['password'], 'mypass')
self.assertIsNone(hosting_account.hosting_url)
self.assertIsNone(hosting_account.local_site)
def test_save_new_account_with_hosting_url(self):
"""Testing HostingServiceAuthForm.save with new account and hosting URL
"""
form = HostingServiceAuthForm(
{
'hosting_account_username': 'myuser',
'hosting_account_password': 'mypass',
'hosting_url': 'example.com',
},
hosting_service_cls=SelfHostedTestService)
self.assertTrue(form.is_valid())
hosting_account = form.save()
self.assertIsNotNone(hosting_account.pk)
self.assertEqual(hosting_account.service_name, 'self_hosted_test')
self.assertEqual(hosting_account.username, 'myuser')
self.assertEqual(hosting_account.data['password'], 'mypass')
self.assertEqual(hosting_account.hosting_url, 'example.com')
self.assertIsNone(hosting_account.local_site)
def test_save_new_account_with_hosting_url_not_self_hosted(self):
"""Testing HostingServiceAuthForm.save with new account and hosting URL
with non-self-hosted service
"""
form = HostingServiceAuthForm(
{
'hosting_account_username': 'myuser',
'hosting_account_password': 'mypass',
'hosting_url': 'example.com',
},
hosting_service_cls=TestService)
self.assertTrue(form.is_valid())
self.assertNotIn('hosting_url', form.cleaned_data)
hosting_account = form.save()
self.assertIsNone(hosting_account.hosting_url)
def test_save_new_account_without_hosting_url_self_hosted(self):
"""Testing HostingServiceAuthForm.save with new account and no
hosting URL with a self-hosted service
"""
form = HostingServiceAuthForm(
{
'hosting_account_username': 'myuser',
'hosting_account_password': 'mypass',
},
hosting_service_cls=SelfHostedTestService)
self.assertFalse(form.is_valid())
self.assertEqual(
|
timqian/sms-tools | lectures/4-STFT/plots-code/window-size.py | Python | agpl-3.0 | 1,498 | 0.012016 | import math
import matplotlib.pyplot as plt
import numpy as np
import time, os, sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../software/models/'))
import dftModel as DF
import utilFunctions as UF
(fs, x) = UF.wavread('../../../sounds/oboe-A4.wav')
N = 128
start = .81*fs
x1 = x[start:start+N]
plt.figure(1, figsize=(9.5, 6))
plt.subplot(321)
plt.plot(np.arange(start, (start+N), 1.0)/fs, x1*np.hamming(N), ' | b', lw=1.5)
plt.axis([start/fs, (start+N)/fs, min(x1*np.hamming(N)), max(x1*np.hamming(N))])
plt.title('x1, M = 128')
mX, pX = DF.dftAnal(x1, np.hamming(N), N)
plt.subplot(323)
plt.plot((fs/2.0)*np.arange(mX.size)/float(mX.size), mX, 'r', lw=1.5)
plt.axis([0,fs/2.0,-90,max(mX)])
plt.title('mX1')
plt.subplot(325)
plt.plot((fs/2.0)*np.arange(mX.size)/float(mX.size), pX, 'c', lw=1.5)
plt.axis([0,fs/2.0,min(pX),max(pX)])
plt.title('pX1')
N = 1024
start = .81*fs
x2 | = x[start:start+N]
mX, pX = DF.dftAnal(x2, np.hamming(N), N)
plt.subplot(322)
plt.plot(np.arange(start, (start+N), 1.0)/fs, x2*np.hamming(N), 'b', lw=1.5)
plt.axis([start/fs, (start+N)/fs, min(x2), max(x2)])
plt.title('x2, M = 1024')
plt.subplot(324)
plt.plot((fs/2.0)*np.arange(mX.size)/float(mX.size), mX, 'r', lw=1.5)
plt.axis([0,fs/2.0,-90,max(mX)])
plt.title('mX2')
plt.subplot(326)
plt.plot((fs/2.0)*np.arange(mX.size)/float(mX.size), pX, 'c', lw=1.5)
plt.axis([0,fs/2.0,min(pX),max(pX)])
plt.title('pX2')
plt.tight_layout()
plt.savefig('window-size.png')
plt.show()
|
Syncano/syncano-cli | syncano_cli/base/prompter.py | Python | mit | 536 | 0.003731 | import click
from syncano_cli.base.options import ColorSchema
class Promp | ter(object):
indent = ' '
def prompt(self, text, color=None, **kwargs):
styles_kwargs = {'fg': color or ColorSchema.PROMPT}
return click.prompt(click.style("{}{}".format(self.indent, text), **styles_kwargs), **kwargs)
def confirm(self, text, color=None, | **kwargs):
styles_kwargs = {'fg': color or ColorSchema.PROMPT}
return click.confirm(click.style("{}{}".format(self.indent, text), **styles_kwargs), **kwargs)
|
gonicus/gosa | backend/src/tests/backend/components/test_jsonrpc_service.py | Python | lgpl-2.1 | 6,629 | 0.00181 | # This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import unittest.mock
from tornado.web import Application
from gosa.common.gjson import dumps, loads
from gosa.backend.components.jsonrpc_service import JsonRpcHandler, AUTH_SUCCESS, AUTH_FAILED, AUTH_LOCKED
from gosa.common.components import PluginRegistry
from tests.GosaTestCase import slow
from tests.RemoteTestCase import RemoteTestCase
@slow
class JsonRpcHandlerTestCase(RemoteTestCase):
def get_app(self):
return Application([('/rpc', JsonRpcHandler)], cookie_secret='TecloigJink4', xsrf_cookies=True)
def setUp(self):
super(JsonRpcHandlerTestCase, self).setUp()
self.mocked_resolver = unittest.mock.MagicMock()
self.mocked_resolver.return_value.check.return_value = True
self.patcher = unittest.mock.patch.dict(PluginRegistry.modules, {'ACLResolver': self.mocked_resolver})
self.patcher.start()
def tearDown(self):
super(JsonRpcHandlerTestCase, self).tearDown()
| self.patcher.stop()
def test_login(self):
# successful login
with unittest.mock.patch.object(JsonRpcHandler, 'authenticate', return_value='cn=System Administrator,ou=people,dc=example,'
| 'dc=net') as m:
response = self.login()
assert response.code == 200
json = loads(response.body)
assert json['result']['state'] == AUTH_SUCCESS
assert json['error'] is None
assert json['id'] == 0
# failed login
with unittest.mock.patch.object(JsonRpcHandler, 'authenticate', return_value=False) as m:
response = self.login()
json = loads(response.body)
assert json['result']['state'] == AUTH_LOCKED
# reset lock
JsonRpcHandler._JsonRpcHandler__dos_manager = {}
def test_bad_method_name(self):
# fetch the xsrf cookie
self.fetch('/rpc', method='GET')
data = dumps({
"id": 1,
"method": "_somemethod",
"params": []
})
response = self.fetch('/rpc',
method='POST',
body=data
)
assert response.code == 403
def test_xsrf(self):
data = dumps({
"id": 3,
"method": "login",
"params": ["admin", "tester"]
})
response = self.fetch('/rpc',
method='POST',
body=data
)
# without requesting the xsrf cookie we get the 403 code
assert response.code == 403
def test_logout(self):
# fetch the xsrf cookie
self.fetch('/rpc', method='GET')
data = dumps({
"id": 3,
"method": "logout",
"params": []
})
response = self.fetch('/rpc',
method='POST',
body=data
)
# logging out before beeing logged in is not allowed
assert response.code == 401
self.login()
response = self.fetch('/rpc',
method='POST',
body=data
)
assert response.code == 200
json = loads(response.body)
assert json['result'] is True
assert json['error'] is None
assert json['id'] == 3
# check if we are logged out
data = dumps({
"id": 3,
"method": "getSessionUser",
"params": []
})
response = self.fetch('/rpc',
method='POST',
body=data
)
assert response.code == 401
def test_unknown(self):
self.login()
data = dumps({
"id": 1,
"method": "unknownmethod",
"params": []
})
response = self.fetch('/rpc',
method='POST',
body=data
)
assert response.code == 500
json = loads(response.body)
assert json['error']['code'] == 100
assert json['error']['name'] == "JSONRPCError"
def test_missingparameter(self):
# fetch the xsrf cookie
self.fetch('/rpc', method='GET')
data = dumps({
"id": 1,
"params": []
})
response = self.fetch('/rpc',
method='POST',
body=data
)
assert response.code == 400
def test_invalidjson(self):
# fetch the xsrf cookie
self.fetch('/rpc', method='GET')
response = self.fetch('/rpc',
method='POST',
body="this is no json://"
)
assert response.code == 400
def test_wrong_parameter_format(self):
# fetch the xsrf cookie
self.fetch('/rpc', method='GET')
data = dumps({
"id": 1,
"method": "login",
"params": 'no list or dict'
})
response = self.fetch('/rpc',
method='POST',
body=data
)
assert response.code == 400
def test_getSessionUser(self):
self.login()
data = dumps({
"id": 1,
"method": "getSessionUser",
"params": []
})
response = self.fetch('/rpc',
method='POST',
body=data
)
assert response.code == 200
json = loads(response.body)
assert json['result'] == "admin"
def test_exception(self):
self.login()
data = dumps({
"id": 1,
"method": "getSessionUser",
"params": {'test': 'test'}
})
response = self.fetch('/rpc',
method='POST',
body=data
)
assert response.code == 500
json = loads(response.body)
assert json['error']['code'] == 500
assert json['error']['name'] == "JSONRPCError"
|
makinacorpus/django | tests/httpwrappers/tests.py | Python | bsd-3-clause | 23,131 | 0.001601 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import copy
import os
import pickle
import unittest
import warnings
from django.core.exceptions import SuspiciousOperation
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (QueryDict, HttpResponse, HttpResponseRedirect,
HttpResponsePermanentRedirect, HttpResponseNotAllowed,
HttpResponseNotModified, StreamingHttpResponse,
SimpleCookie, BadHeaderError,
parse_cookie)
from django.test import TestCase
from django.utils.encoding import smart_str
from django.utils._os import upath
from django.utils import six
class QueryDictTests(unittest.TestCase):
def test_missing_key(self):
q = QueryDict(str(''))
self.assertRaises(KeyError, q.__getitem__, 'foo')
def test_immutability(self):
q = QueryDict(str(''))
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
def test_immutable_get_with_default(self):
q = QueryDict(str(''))
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict(str(''))
self.assertEqual(q.getlist('foo'), [])
if not six.PY3:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [])
self.assertEqual(list(six.iterlists(q)), [])
self.assertEqual(list(six.iterkeys(q)), [])
self.assertEqual(list(six.itervalues(q)), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict(str('foo=bar'))
self.assertEqual(q['foo'], 'bar')
self.assertRaises(KeyError, q.__getitem__, 'bar')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if not six.PY3:
self.assertTrue(q.has_key('foo'))
self.assertTrue('foo' in q)
if not six.PY3:
self.assertFalse(q.has_key('bar'))
self.assertFalse('bar' in q)
self.assertEqual(list(six.iteritems(q)), [('foo', 'bar')])
self.assertEqual(list(six.iterlists(q)), [('foo', ['bar'])])
self.assertEqual(list(six.iterkeys(q)), ['foo'])
self.assertEqual(list(six.itervalues(q)), ['bar'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict(str(''), mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict(str(''), mutable=True)
q['next'] = '/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict(str('')).copy()
self.assertRaises(KeyError, q.__getitem__, "foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict(str('')).copy()
q['name'] = 'john'
del q['name']
self.assertFalse('name' in q)
def test_basic_mutable_operations(self):
q = QueryDict(str('')).copy()
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
if not six.PY3:
self.assertTrue(q.has_key('foo'))
self.assertTrue('foo' in q)
self.assertListEqual(sorted(list(six.iteritems(q))),
[('foo', 'another'), ('name', 'john')])
self.assertListEqual(sorted(list(six.iterlists(q))),
[('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
self.assertListEqual(sorted(list(six.iterkeys(q))),
['foo', 'name'])
self.assertListEqual(sorted(list(six.itervalues(q))),
['another', 'john'])
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict(str('vote=yes&vote=no'))
self.assertEqual(q['vote'], 'no')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('vote', 'default'), 'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
self.assertEqual(q.getlist('foo'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if not six.PY3:
self.assertEqual(q.has_key('vote'), True)
self.assertEqual('vote' in q, True)
if not six.PY3:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [('vote', 'no')])
self.assertEqual(list(six.iterlists(q)), [('vote', ['yes', 'no'])])
self.assertEqual(list(six.iterkeys(q)), ['vote'])
self.assertEqual(list(six.itervalues(q)), ['no'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assert | Raises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertRaises(AttributeError, q.__ | delitem__, 'vote')
if not six.PY3:
def test_invalid_input_encoding(self):
"""
Query |
tylere/earthengine-api | python/ee/tests/_cloud_api_utils_test.py | Python | apache-2.0 | 13,942 | 0.003228 | #!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import warnings
import unittest
from ee import _cloud_api_utils
from ee import ee_exception
class CloudApiUtilsTest(unittest.TestCase):
def setUp(self):
super(CloudApiUtilsTest, self).setUp()
_cloud_api_utils.set_cloud_api_user_project('earthengine-legacy')
def test_convert_dict_simple(self):
result = _cloud_api_utils._convert_dict({
'x': 99,
'y': 2
}, {
'x': 'a',
'y': ('c', lambda x: x + 1)
})
self.assertEqual({'a': 99, 'c': 3}, result)
def test_convert_dict_with_defaults(self):
result = _cloud_api_utils._convert_dict(
{
'x': 99
}, {'x': 'a'}, defaults={
'q': 'r',
'a': 'aaa'
})
self.assertEqual({'a': 99, 'q': 'r'}, result)
def test_convert_dict_with_warnings(self):
with warnings.catch_warnings(record=True) as w:
result = _cloud_api_utils._convert_dict({
'x': 1,
'y': 2
}, {
'x': 'flan',
'y': 'flan'
})
self.assertEqual(1, len(w))
self.assertEqual('Multiple request parameters converted to flan',
str(w[0].message))
with warnings.catch_warnings(record=True) as w:
result = _cloud_api_utils._convert_dict({'x': 1, 'y': 2}, {'x': 'flan'})
self.assertEqual({'flan': 1}, result)
self.assertEqual(0, len(w))
with warnings.catch_warnings(record=True) as w:
result = _cloud_api_utils._convert_dict(
{
'x': 1,
'y': 2
}, {'x': 'flan'}, key_warnings=True)
self.assertEqual({'flan': 1}, result)
self.assertEqual(1, len(w))
self.assertEqual('Unrecognized key y ignored', str(w[0].message))
def test_convert_value(self):
self.assertEqual('x', _cloud_api_utils._convert_value('a', {'a': 'x'}, 'z'))
self.assertEqual('z', _cloud_api_utils._convert_value('b', {'a': 'x'}, 'z'))
def test_convert_msec_to_timestamp(self):
self.assertEqual('2018-07-31T20:46:28.888000Z',
_cloud_api_utils._convert_msec_to_timestamp(1533069988888))
def test_convert_timestamp_to_msec(self):
self.assertEqual(
1533069988888,
_cloud_api_utils._convert_timestamp_to_msec(
'2018-07-31T20:46:28.888000Z'))
self.assertEqual(
1533069988000,
_cloud_api_utils._convert_timestamp_to_msec(
'2018-07-31T20:46:28Z'))
def test_convert_bounding_box_to_geo_json(self):
geo_json = _cloud_api_utils._convert_bounding_box_to_geo_json(
[-105, 45, 10, 60])
geo_json_parsed = json.loads(geo_json)
self.assertEqual({
'type':
'Polygon',
'coordinates': [[[-105, | 45], [10, 45], [10, 60], [-105, 60], [-105, 45]]
]
}, geo_json_parsed)
def test_convert_asset_type_for_cre | ate_asset(self):
self.assertEqual(
'IMAGE_COLLECTION',
_cloud_api_utils.convert_asset_type_for_create_asset('ImageCollection'))
self.assertEqual(
'IMAGE_COLLECTION',
_cloud_api_utils.convert_asset_type_for_create_asset(
'IMAGE_COLLECTION'))
self.assertEqual(
'FOLDER',
_cloud_api_utils.convert_asset_type_for_create_asset('Folder'))
self.assertEqual(
'FOLDER',
_cloud_api_utils.convert_asset_type_for_create_asset('FOLDER'))
def test_convert_asset_id_to_asset_name(self):
self.assertEqual(
'projects/earthengine-public/assets/path/to/asset',
_cloud_api_utils.convert_asset_id_to_asset_name('path/to/asset'))
self.assertEqual(
'projects/earthengine-legacy/assets/users/path/to/asset',
_cloud_api_utils.convert_asset_id_to_asset_name('users/path/to/asset'))
self.assertEqual(
'projects/earthengine-legacy/assets/projects/path/to/asset',
_cloud_api_utils.convert_asset_id_to_asset_name(
'projects/path/to/asset'))
self.assertEqual(
'projects/foobar/assets/baz',
_cloud_api_utils.convert_asset_id_to_asset_name(
'projects/foobar/assets/baz'))
def test_split_asset_name(self):
parent, asset_id = _cloud_api_utils.split_asset_name(
'projects/earthengine-legacy/assets/users/path/to/asset')
self.assertEqual('projects/earthengine-legacy', parent)
self.assertEqual('users/path/to/asset', asset_id)
def test_convert_operation_name_to_task_id(self):
self.assertEqual(
'taskId',
_cloud_api_utils.convert_operation_name_to_task_id(
'operations/taskId'))
self.assertEqual(
'taskId',
_cloud_api_utils.convert_operation_name_to_task_id(
'projects/test/operations/taskId'))
self.assertEqual(
'taskId',
_cloud_api_utils.convert_operation_name_to_task_id(
'projects/operations/operations/taskId'))
self.assertEqual(
'taskId', _cloud_api_utils.convert_operation_name_to_task_id('taskId'))
def test_convert_task_id_to_operation_name(self):
self.assertEqual(
'projects/earthengine-legacy/operations/taskId',
_cloud_api_utils.convert_task_id_to_operation_name('taskId'))
def test_encode_number_as_cloud_value(self):
self.assertEqual({
'constantValue': 112233
}, _cloud_api_utils.encode_number_as_cloud_value(112233))
# Large integer not representable as double.
self.assertEqual({
'integerValue': '112233445566778899'
}, _cloud_api_utils.encode_number_as_cloud_value(112233445566778899))
# Large integer representable as double.
self.assertEqual({
'constantValue': 3.402823669209385e+38
}, _cloud_api_utils.encode_number_as_cloud_value(1 << 128))
def test_convert_algorithms(self):
result = _cloud_api_utils.convert_algorithms({
'algorithms': [
{
'name': 'algorithms/algNoArgs',
'description': 'desc',
'returnType': 'ret'
},
{
'name': 'algorithms/algNoDesc',
'arguments': [
{
'argumentName': 'argNoDesc'
},
{
'argumentName': 'argOptional',
'description': 'descArg',
'type': 't',
'optional': True,
'defaultValue': [1, 2]
}]
},
{
'name': 'algorithms/algHidden',
'description': 'desc',
'returnType': 'ret',
'hidden': True
},
{
'name': 'algorithms/algDeprecated',
'description': 'desc',
'returnType': 'ret',
'deprecated': True,
'deprecationReason': 'reason'
}]
})
self.assertEqual({
'algNoArgs': {
'description': 'desc',
'returns': 'ret',
'args': []
},
'algNoDesc': {
'description': '',
'returns': '',
'args': [
{
'name': 'argNoDesc',
'type': '',
'description': ''
},
{
'name': 'argOptional',
'description': 'descArg',
'type': 't',
'optional': True,
'default': [1, 2]
}]
},
'algHidden': {
'description': 'desc',
'returns': 'ret',
'args': [],
'hidden': True
},
'algDeprecated': {
'description': 'desc',
'returns': 'ret',
'args': [],
'deprecated': 'reason'
}
}, result)
def test_convert_to_image_file_format(self):
self.assertEqual('AUTO_JPEG_PNG',
_cloud_api_utils.convert_to_image_file_format(None))
self.assertEqual('AUTO_JPEG_PNG',
|
FlashXT/XJTU_WorkLog | 2017.10/Engineering/PythonCrawler/Crawler_cookielib2.py | Python | gpl-3.0 | 692 | 0.018456 | #coding=utf-8
#2017.10.6,Flash,cookielib
import urllib2
import cookielib
filename="cookiebd.txt"
#声明一个cookieJar对象实例来保存cookie
cookie = cookielib.MozillaCookieJar(filename)
#利用urllib2库的HTTPCookieProcessor对象来创建cookie处理器
handler = urllib2.HTTPCookieProcessor(cookie)
#通过handler来构建opener
opene | r = urllib2.build_opener(handler)
#此处的open方法同urllib2的urlopen方法,也可以传入request
response = opener.open("http://www.baidu.com")
#for item in cookie:
# print "Name = " +item.name
# print "Value = " + item.value
#保存cookie到文件
cookie.save(ignore_discard=True | , ignore_expires=True)
print "OK !"
|
guanxiLi/django_check | d3jscluster/cluster/rewardprocess.py | Python | apache-2.0 | 373 | 0.034853 | imp | ort threading
import time
import os
class CheckThread(threading.Thread):
def __init__(self, files):
threading.Thread.__init__(self)
self.files = files
def check_reward(self,files):
os.remove("./cluster/log/checklog.log")
check_log = open('./cluster/log/checklog.log', 'a')
check_log.write("hehehehhfffff\n")
def run(self):
self.check_reward(self.files) | |
alerta/alerta-contrib | integrations/consul/consulalerta.py | Python | mit | 2,770 | 0.002888 | #!/usr/bin/env python
import json
import os
import consul
import sys
import time
from alertaclient.api import Client
CONSUL_HOST = os.environ.get('CONSUL_HOST', '127.0.0.1')
CONSUL_PORT = int(os.environ.get('CONSUL_PORT', 8500))
client = consul.Consul(host=CONSUL_HOST, port=CONSUL_PORT, token=None, scheme='http', consistency= | 'default', dc=None, verify=True)
j = json.load(sys.stdin)
print("Request:")
print(j)
try:
url = client.kv.get('alerta/apiurl')[1]['Value']
except Exception:
print("No URL defined, exiting")
sys.exit(1)
try:
key = client.kv.get('alerta/apikey')[1]['Value']
except Exception:
| print("No key defined, exiting")
sys.exit(1)
try:
max_retries = int(client.kv.get('alerta/max_retries')[1]['Value'])
except TypeError:
print("No value defined, using default")
max_retries = 3
try:
sleep = int(client.kv.get('alerta/sleep')[1]['Value'])
except TypeError:
print("No value defined, using default")
sleep = 2
try:
timeout = int(client.kv.get('alerta/timeout')[1]['Value'])
except TypeError:
print("No value defined, using default")
timeout = 900
try:
origin = client.kv.get('alerta/origin')[1]['Value']
except TypeError:
print("No value defined, using default")
origin = "consul"
try:
alerttype = client.kv.get('alerta/alerttype')[1]['Value']
except TypeError:
print("No value defined, using default")
alerttype = "ConsulAlert"
api = Client(endpoint=url, key=key)
SEVERITY_MAP = {
'critical': 'critical',
'warning': 'warning',
'passing': 'ok',
}
def createalert( data ):
try:
environment = client.kv.get('alerta/env/{0}'.format(data['Node']))[1]['Value']
except Exception:
try:
environment = client.kv.get('alerta/defaultenv')[1]['Value']
except Exception:
environment = "Production"
for _ in range(max_retries):
try:
print("Response:")
response = api.send_alert(
resource=data['Node'],
event=data['CheckId'],
value=data['Status'],
correlate=SEVERITY_MAP.keys(),
environment=environment,
service=[data['CheckId']],
severity=SEVERITY_MAP[data['Status']],
text=data['Output'],
timeout=timeout,
origin=origin,
type=alerttype
)
print(response)
except Exception as e:
print("HTTP Error: {}".format(e))
time.sleep(sleep)
continue
else:
break
else:
print("api is down")
def main():
for item in enumerate(j):
i=item[0]
createalert(j[i])
if __name__ == "__main__":
main()
|
gencer/python-phonenumbers | python/phonenumbers/shortdata/region_KM.py | Python | apache-2.0 | 547 | 0.009141 | """Auto-generated file, do not edit by hand. KM m | etadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_KM = PhoneMetadata(id='KM', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d', possible_length=(2,)), |
emergency=PhoneNumberDesc(national_number_pattern='1[78]', example_number='17', possible_length=(2,)),
short_code=PhoneNumberDesc(national_number_pattern='1[78]', example_number='17', possible_length=(2,)),
short_data=True)
|
haystack/eyebrowse-server | api/migrations/0046_auto__add_unique_ratings_user_page.py | Python | mit | 16,679 | 0.008394 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'Ratings', fields ['user', 'page']
db.create_unique('api_ratings', ['user_id', 'page_id'])
def backwards(self, orm):
# Removing unique constraint on 'Ratings', fields ['user', 'page']
db.delete_unique('api_ratings', ['user_id', 'page_id'])
models = {
'api.blacklistitem': {
'Meta': {'unique_together': "(('user', 'url'),)", 'object_name': 'BlackListItem'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 5, 11, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {'default': '80'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'api.chatmessage': {
'Meta': {'object_name': 'ChatMessage'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'author'", 'to': "orm['auth.User']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '2000'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '300'})
},
'api.domain': {
'Meta': {'object_name': 'Domain'},
'agg_score': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'})
},
'api.eyehistory': {
'Meta': {'object_name': 'EyeHistory'},
'domain': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '2000'}),
'end_event': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'favIconUrl': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '2000'}),
'favicon_url': ('django.db.models.fields.TextField', [], {'default': "''"}),
'humanize_time': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['api.Page']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'src': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'start_event': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2000'}),
'total_time': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '2000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'api.eyehistorymessage': {
'Meta': {'ordering': "['-post_time']", 'object_name': 'EyeHistoryMessage'},
'eyehistory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['api.EyeHistory']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'highlight': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['api.Highlight']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300'}),
'parent_comment': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'po | st_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
' | api.eyehistoryraw': {
'Meta': {'object_name': 'EyeHistoryRaw'},
'domain': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '2000'}),
'end_event': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {}),
'favIconUrl': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '2000'}),
'favicon_url': ('django.db.models.fields.TextField', [], {'default': "''"}),
'humanize_time': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'src': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'start_event': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2000'}),
'total_time': ('django.db.models.fields.IntegerField', [], {}),
'url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '2000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'api.highlight': {
'Meta': {'object_name': 'Highlight'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'highlight': ('django.db.models.fields.CharField', [], {'max_length': '10000'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['api.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'api.mutelist': {
'Meta': {'object_name': 'MuteList'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'word': ('django.db.models.fields.URLField', [], {'max_length': '300', 'null': 'True'})
},
'api.page': {
'Meta': {'object_name': 'Page'},
'agg_score': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''"}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['api.Domain']"}),
'favIconUrl': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '2000'}),
'favicon_url': ('django.db.models.fields.TextField', [], {'default': "''"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '2000'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2000'}),
'url': ('django.db.models.fields.URLF |
alejandrobernardis/tornado-heroku | main.py | Python | mit | 385 | 0 | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Asumi Kamikaze Inc.
# Licensed under | the MIT License.
# Author: Alejandro M. Bernardis
# Email: alejandro (dot) bernardis (at) asumikamikaze (dot) com
# Created: 08/Jun/2015 12:42
import os
# Global settin | gs
os.environ.setdefault('SERVER_SETTINGS_MODULE', 'settings_example')
if __name__ == '__main__':
pass
|
memtoko/django | django/db/utils.py | Python | bsd-3-clause | 9,585 | 0.000939 | import os
import pkgutil
from importlib import import_module
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils import six
from django.utils._os import upath
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
DEFAULT_DB_ALIAS = 'default'
DJANGO_VERSION_PICKLE_KEY = '_django_version'
class Error(Exception if six.PY3 else StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class DataError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class InternalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DatabaseErrorWrapper(object):
"""
Context manager and decorator that re-throws backend-specific database
exceptions using Django's common wrappers.
"""
def __init__(self, wrapper):
"""
wrapper is a database wrapper.
It must have a Database attribute defining PEP-249 exceptions.
"""
self.wrapper = wrapper
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
return
for dj_exc_type in (
DataError,
OperationalError,
IntegrityError,
InternalError,
ProgrammingError,
NotSupportedError,
DatabaseError,
InterfaceError,
Error,
):
db_exc_type = getattr(self.wrapper.Database, dj_exc_type.__name__)
if issubclass(exc_type, db_exc_type):
dj_exc_value = dj_exc_type(*exc_value.args)
dj_exc_value.__cause__ = exc_value
# Only set the 'errors_occurred' flag for errors that may make
# the connection unusable.
if dj_exc_type not in (DataError, IntegrityError):
self.wrapper.errors_occurred = True
six.reraise(dj_exc_type, dj_exc_value, traceback)
def __call__(self, func):
# Note that we are intentionally not using @wraps here for performance
# reasons. Refs #21 | 109.
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
def load_backend(backend_name):
# Look for a fully qualifie | d database backend name
try:
return import_module('%s.base' % backend_name)
except ImportError as e_user:
# The database backend wasn't found. Display a helpful error message
# listing all possible (built-in) database backends.
backend_dir = os.path.join(os.path.dirname(upath(__file__)), 'backends')
try:
builtin_backends = [
name for _, name, ispkg in pkgutil.iter_modules([backend_dir])
if ispkg and name != 'dummy']
except EnvironmentError:
builtin_backends = []
if backend_name not in ['django.db.backends.%s' % b for b in
builtin_backends]:
backend_reprs = map(repr, sorted(builtin_backends))
error_msg = ("%r isn't an available database backend.\n"
"Try using 'django.db.backends.XXX', where XXX "
"is one of:\n %s\nError was: %s" %
(backend_name, ", ".join(backend_reprs), e_user))
raise ImproperlyConfigured(error_msg)
else:
# If there's some other error, this must be an error in Django
raise
class ConnectionDoesNotExist(Exception):
pass
class ConnectionHandler(object):
def __init__(self, databases=None):
"""
databases is an optional dictionary of database definitions (structured
like settings.DATABASES).
"""
self._databases = databases
self._connections = local()
@cached_property
def databases(self):
if self._databases is None:
self._databases = settings.DATABASES
if self._databases == {}:
self._databases = {
DEFAULT_DB_ALIAS: {
'ENGINE': 'django.db.backends.dummy',
},
}
if DEFAULT_DB_ALIAS not in self._databases:
raise ImproperlyConfigured("You must define a '%s' database" % DEFAULT_DB_ALIAS)
return self._databases
def ensure_defaults(self, alias):
"""
Puts the defaults into the settings dictionary for a given connection
where no settings is provided.
"""
try:
conn = self.databases[alias]
except KeyError:
raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias)
conn.setdefault('ATOMIC_REQUESTS', False)
conn.setdefault('AUTOCOMMIT', True)
conn.setdefault('ENGINE', 'django.db.backends.dummy')
if conn['ENGINE'] == 'django.db.backends.' or not conn['ENGINE']:
conn['ENGINE'] = 'django.db.backends.dummy'
conn.setdefault('CONN_MAX_AGE', 0)
conn.setdefault('OPTIONS', {})
conn.setdefault('TIME_ZONE', 'UTC' if settings.USE_TZ else settings.TIME_ZONE)
for setting in ['NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']:
conn.setdefault(setting, '')
def prepare_test_settings(self, alias):
"""
Makes sure the test settings are available in the 'TEST' sub-dictionary.
"""
try:
conn = self.databases[alias]
except KeyError:
raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias)
test_settings = conn.setdefault('TEST', {})
for key in ['CHARSET', 'COLLATION', 'NAME', 'MIRROR']:
test_settings.setdefault(key, None)
def __getitem__(self, alias):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
self.ensure_defaults(alias)
self.prepare_test_settings(alias)
db = self.databases[alias]
backend = load_backend(db['ENGINE'])
conn = backend.DatabaseWrapper(db, alias)
setattr(self._connections, alias, conn)
return conn
def __setitem__(self, key, value):
setattr(self._connections, key, value)
def __delitem__(self, key):
delattr(self._connections, key)
def __iter__(self):
return iter(self.databases)
def all(self):
return [self[alias] for alias in self]
def close_all(self):
for alias in self:
try:
connection = getattr(self._connections, alias)
except AttributeError:
continue
connection.close()
class ConnectionRouter(object):
def __init__(self, routers=None):
"""
If routers is not specified, will default to settings.DATABASE_ROUTERS.
"""
self._routers = routers
@cached_property
def routers(self):
if self._routers is None:
self._routers = settings.DATABASE_ROUTERS
routers = []
for r in self._routers:
if isinstance(r, six.string_types):
router = import_string(r)()
else:
router = r
routers.append(router)
return routers
def _router_func(action):
def _route_db(self, model, **hints):
chosen_db = None
for router in self.routers:
try:
method = getattr(router, action)
except AttributeError:
# If the router doesn't have a method, skip to the next one.
pass
else:
chosen_db = method(model, **hints)
if chosen_db:
return chosen_db
instance = hints.get('instance')
if instance is not None and instance._state.db:
return instance._state.db
|
WubbaDuck/OverwatchTracker | graph.py | Python | gpl-3.0 | 3,048 | 0.00164 | import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import pyqtgraph as pg
import numpy as np
class SRGraph(QWidget):
def __init__(self):
super(SRGraph, self).__init__()
#self.setFixedSize(500,375)
#self.labelFont = QFont("Arial", 10)
# Initialize the RGB values
self.redVal = 0
self.greenVal = 0
self.blueVal = 0
self.whiteVal = 0
# Build the GUI
self.home()
def home(self):
self.grid = QGridLayout()
self.grid.setSpacing(10)
'''
# Add sent button
saveButton = QPushButton("Save") # Create the button
self.grid.addWidget(saveButton,5,4) # Add to Gui
saveButton.clicked.connect(self.saveSettings) # Add button functionality
# Add IP address textbox
ipLabel = QLabel("IP Address")
ipLabel.setAlignment(Qt.AlignRight) # Set Alignment
#ipLabel.setFont(self.labelFont) # Set Font
ipBox = QLineEdit()
ipBox.setInputMask("000.000.000.000;0")
ipBox.setMaximumWidth(92)
ipBox.setAlignment(Qt.AlignLeft)
self.grid.addWidget(ipLabel,1,1)
self.grid.addWidget(ipBox,1,2)
# Add LED Pulse Functionality Options
self.ledPulseEnable = QRadioButton("Pulse LEDs")
#self.ledPulseEnable.setFont(self.labelFont)
self.ledSolidEnable = QRadioButton("Solid LEDs")
#self.ledSolidEnable.setFont(self.labelFont)
self.radioButtonGroup = QButtonGroup()
self.radioButtonGroup.addButton(self.ledPulseEnable)
self.radioButtonGroup.addButton(self.ledSolidEnable)
self.ledPulseSpeed = QDoubleSpinBox()
self.ledPulseSpeed.setMinimum(0)
self.ledPulseSpeed.setDisabled(True)
self.ledPulseLabel = QLabel("LED Pulse Time (Seconds)")
#self.ledPulseLabel.setFont(self.labelFont)
self.ledPulseLabel.setDisabled(True)
#self.radioButtonGroup.buttonClicked.connect(self.ledPulseState)
#self.radioButtonGroup.buttonClicked.connect(self.ledPulseState)
self.ledPulseEnable.toggled.connect(self.ledPulseState)
#self.connect(self.ledPulseEnable, SIGNAL"clicked()")
self.grid.addWid | get(self.ledPulseEnable,3,0)
self.grid.addWidget(self.ledSolidEnable,2,0)
self.grid.addWidget(self.ledPulseSpeed,3,1)
self.grid.addWidget(self.ledPulseLabel,3,2)
'''
x = np.random.normal(size=1000)
y = np.random.normal(size=1000)
plotWidget = pg.PlotWidget()
plotWidget.plot(x, y)
self.grid.addWidget(plotWidget | )
self.setLayout(self.grid) # Set Gui layout
# Enable/Disbale the LED Pulse settings
def ledPulseState(self, state):
self.ledPulseLabel.setEnabled(state)
self.ledPulseSpeed.setEnabled(state)
# Button press output
def saveSettings(self):
pass
|
elainenaomi/sciwonc-dataflow-examples | dissertation2017/Experiment 2/instances/11_2_wikiflow_1sh_1s_annot/sessioncompute_3/SessionCompute_3.py | Python | gpl-3.0 | 2,784 | 0.001796 | #!/usr/bin/env python
from sciwonc.dataflow.DataStoreClient import DataStoreClient
import ConfigDB_SessionCompute_3
imp | ort pprint
# connector and config
client = DataStoreClient("mo | ngodb", ConfigDB_SessionCompute_3)
config = ConfigDB_SessionCompute_3
# according to config
dataList = client.getData() # return an array of docs (like a csv reader)
output = []
ONE_HOUR_IN_SECONDS = 3600
if(dataList):
for i in dataList:
contributor_username = i[config.COLUMN]
current_user = contributor_username
start_time = None
end_time = None
duration = None
last_start_timestamp = None
count = 1
if contributor_username:
print "\n\n"
print contributor_username.encode('utf-8')
while True:
doc = i['data'].next()
if doc is None:
break;
print doc["timestamp"]
if start_time is None:
start_time = float(doc["timestamp"])
if end_time is None:
end_time = start_time + ONE_HOUR_IN_SECONDS
else:
if float(doc["timestamp"]) <= end_time:
end_time = float(doc["timestamp"]) + ONE_HOUR_IN_SECONDS
count += 1
else:
new_doc = {}
new_doc["start time"] = start_time
new_doc["end time"] = end_time
new_doc["duration"] = (end_time - start_time)
new_doc["edition_counts"] = count
new_doc["contributor_username"] = contributor_username
output.append(new_doc)
start_time = float(doc["timestamp"])
end_time = start_time + ONE_HOUR_IN_SECONDS
count = 1
if start_time:
new_doc = {}
new_doc["start time"] = start_time
new_doc["end time"] = end_time
new_doc["duration"] = (end_time - start_time)
new_doc["edition_counts"] = count
new_doc["contributor_username"] = contributor_username
output.append(new_doc)
pprint.pprint(output)
clientOutput = DataStoreClient("mongodb", ConfigDB_SessionCompute_3)
clientOutput.saveData(output)
# import datetime
# print(
# datetime.datetime.fromtimestamp(
# int("1176585742")
# ).strftime('%Y-%m-%d %H:%M:%S')
# )
# {
# start time:
# end time:
# duration:
# user:
# }
# import time
# timestamp2 = time.mktime(d.timetuple()) # DO NOT USE IT WITH UTC DATE
# datetime.fromtimestamp(timestamp2)
# datetime.datetime(2011, 1, 1, 0, 0)
|
simula67/Coding | python/AAD PoC/naive_fibinocci.py | Python | gpl-3.0 | 241 | 0.024896 | #!/usr/bin/python
import sys
def fib(n):
if(n<=2):
return (n-1)
else:
return fib(n-1)+fib(n-2)
if ( len(sys.argv) == 2 ): |
print fib(int(sys.argv[1]))
els | e:
print "Usage : "+sys.argv[0]+" <term required>"
|
baigk/compass-core | compass/hdsdiscovery/vendors/appliance/plugins/mac.py | Python | apache-2.0 | 1,508 | 0 | # Copyright 2014 Huawei Technologies Co. Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compass Appliance Mac module."""
from compass.hdsdiscovery import base
from compass.utils import setting_wrapper as setting
from compass.utils import util
import logging
CLASS_NAME = "Mac"
| class Mac(base.BaseSnmpMacPlugin):
"""Processes MAC address."""
def __init__(self, host, credential):
self.host = host
# self.credential = credential
# return
d | ef scan(self):
"""Implemnets the scan method in BasePlugin class.
.. note::
Dummy scan function for compass appliance.
Returns fixed mac addresses.
"""
mac_list = None
machine_lists = util.load_configs(setting.MACHINE_LIST_DIR)
for items in machine_lists:
for item in items['MACHINE_LIST']:
for k, v in item.items():
if k == self.host:
mac_list = v
return mac_list
|
0todd0000/spm1d | spm1d/stats/normality/__init__.py | Python | gpl-3.0 | 637 | 0.032967 |
'''
Normality tests for 1D data
'''
# Copyright (C) 2016 Todd Pataky
from . import k2,sw
def dagostinoK2(x):
return k2.residuals(x) |
def shapirowilk(x):
return sw.residuals(x)
residuals = k2.residuals
anova1 = k2.anova1
anova1rm = k2.anova1rm
anova2 = k2.anova2
anova2nested = k2.anova2nested
anova2onerm = k2.anova2onerm
anova2rm = k2.anova2rm
anova3 = k2.anova3
anova3nested = k2.anova3nested
anova3onerm = k2.anova3onerm
anova3tworm = k2.anova3tworm
anova3rm = k2.anova3rm
regress = k2.regress
ttest = k2.ttest
tt | est_paired = k2.ttest_paired
ttest2 = k2.ttest2
|
nop33/indico | indico/modules/rb/models/room_bookable_hours.py | Python | gpl-3.0 | 1,910 | 0 | # This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without | even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PAR | TICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from datetime import time
from indico.core.db import db
from indico.util.string import return_ascii
class BookableHours(db.Model):
__tablename__ = 'room_bookable_hours'
__table_args__ = {'schema': 'roombooking'}
start_time = db.Column(
db.Time,
nullable=False,
primary_key=True
)
end_time = db.Column(
db.Time,
nullable=False,
primary_key=True
)
room_id = db.Column(
db.Integer,
db.ForeignKey('roombooking.rooms.id'),
primary_key=True,
nullable=False
)
# relationship backrefs:
# - room (Room.bookable_hours)
@return_ascii
def __repr__(self):
return u'<BookableHours({0}, {1}, {2})>'.format(
self.room_id,
self.start_time,
self.end_time
)
def fits_period(self, st, et):
st = _tuplify(st, False)
et = _tuplify(et, True)
period_st = _tuplify(self.start_time, False)
period_et = _tuplify(self.end_time, True)
return period_st <= st and period_et >= et
def _tuplify(t, end):
if end and t == time(0):
return 24, 0
return t.hour, t.minute
|
NilsGuo/nilsblog | app/main/__init__.py | Python | mit | 243 | 0.00823 | # -*- coding:utf-8 -*-
from flask import Blueprint
main = Blue | print('main', __name__)
from . import views, errors
from ..models import Permission
@main.app_context_processor
def inject_permissions():
return dict(Pe | rmission=Permission)
|
florensacc/snn4hrl | algos/npo_snn_rewards.py | Python | mit | 16,007 | 0.004123 | import collections
import numpy as np
import theano
import theano.tensor as TT
import rllab.misc.logger as logger
from rllab.algos.batch_polopt import BatchPolopt, BatchSampler
from rllab.algos.npo import NPO
from rllab.misc import ext
from rllab.misc.overrides import overrides
from rllab.sampler import parallel_sampler
from sandbox.snn4hrl.distributions.categorical import from_index, from_onehot
from sandbox.snn4hrl.regressors.latent_regressor import Latent_regressor
from sandbox.snn4hrl.sampler.utils import rollout
from sandbox.snn4hrl.sampler.utils_snn import rollout_snn
class BatchSampler_snn(BatchSampler):
"""
Allows giving bonus for MI and other bonus_evaluators, hallucinate if needed (not used in the paper)
and switching latent every certain number of time-steps.
"""
def __init__(self,
*args, # this collects algo, passing it to BatchSampler in the super __init__
bonus_evaluator=None, # list of bonus evals
reward_coef_bonus=None, # this is the total bonus from the bonus evaluator. it's a LIST
latent_regressor=None, # Latent_regressor object for MI. Provides logging AND bonus if needed
reward_regressor_mi=0, # this is for the regressor bonus, not the grid
self_normalize=False, # this is for the hallucinated samples importance weight
switch_lat_every=0,
**kwargs
):
super(BatchSampler_snn, self).__init__(*args, **kwargs) # this should be giving a self.algo
self.bonus_evaluator = bonus_evaluator if bonus_evaluator else []
self.reward_coef_bonus = reward_coef_bonus if reward_coef_bonus else [0] * len(self.bonus_evaluator)
self.reward_regressor_mi = reward_regressor_mi
self.latent_regressor = latent_regressor
self.self_normalize = self_normalize
self.switch_lat_every = switch_lat_every
def _worker_collect_one_path_snn(self, G, max_path_length, switch_lat_every=0, scope=None):
G = parallel_sampler._get_scoped_G(G, scope)
path = rollout_snn(G.env, G.policy, max_path_length, switch_lat_every=switch_lat_every)
return path, len(path["rewards"])
def sample_paths(
self,
policy_params,
max_samples,
max_path_length=np.inf,
env_params=None,
scope=None):
"""
:param policy_params: parameters for the policy. This will be updated on each worker process
:param max_samples: desired maximum number of samples to be collected. The actual number of collected samples
might be greater since all trajectories will be rolled out either until termination or until max_path_length is
reached
:param max_path_length: horizon / maximum length of a single trajectory
:return: a list of collected paths
"""
parallel_sampler.singleton_pool.run_each(
parallel_sampler._worker_set_policy_params,
[(policy_params, scope)] * parallel_sampler.singleton_pool.n_parallel
)
if env_params is not None:
parallel_sampler.singleton_pool.run_each(
parallel_sampler._worker_set_env_params,
[(env_params, scope)] * parallel_sampler.singleton_pool.n_parallel
)
return parallel_sampler.singleton_pool.run_collect(
| # parallel_sampler._worker_collect_one_path_snn, # now this is defined | in parallel_sampler also!
self._worker_collect_one_path_snn, # now this is defined in parallel_sampler also!
threshold=max_samples,
args=(max_path_length, self.switch_lat_every, scope),
show_prog_bar=True
)
def obtain_samples(self, itr):
cur_params = self.algo.policy.get_param_values()
paths = self.sample_paths( # use the sample function above
policy_params=cur_params,
max_samples=self.algo.batch_size,
max_path_length=self.algo.max_path_length,
scope=self.algo.scope,
)
if self.algo.whole_paths:
return paths
else:
paths_truncated = parallel_sampler.truncate_paths(paths, self.algo.batch_size)
return paths_truncated
@overrides
def process_samples(self, itr, paths):
# count visitations or whatever the bonus wants to do. This should not modify the paths
for b_eval in self.bonus_evaluator:
logger.log("fitting bonus evaluator before processing...")
b_eval.fit_before_process_samples(paths)
logger.log("fitted")
# save real undiscounted reward before changing them
undiscounted_returns = [sum(path["rewards"]) for path in paths]
logger.record_tabular('TrueAverageReturn', np.mean(undiscounted_returns))
for path in paths:
path['true_rewards'] = list(path['rewards'])
# If using a latent regressor (and possibly adding MI to the reward):
if isinstance(self.latent_regressor, Latent_regressor):
with logger.prefix(' Latent_regressor '):
self.latent_regressor.fit(paths)
if self.reward_regressor_mi:
for i, path in enumerate(paths):
path['logli_latent_regressor'] = self.latent_regressor.predict_log_likelihood(
[path], [path['agent_infos']['latents']])[0] # this is for paths usually..
path['rewards'] += self.reward_regressor_mi * path[
'logli_latent_regressor'] # the logli of the latent is the variable of the mutual information
# for the extra bonus
for b, b_eval in enumerate(self.bonus_evaluator):
for i, path in enumerate(paths):
bonuses = b_eval.predict(path)
path['rewards'] += self.reward_coef_bonus[b] * bonuses
real_samples = ext.extract_dict(
BatchSampler.process_samples(self, itr, paths),
# I don't need to process the hallucinated samples: the R, A,.. same!
"observations", "actions", "advantages", "env_infos", "agent_infos"
)
real_samples["importance_weights"] = np.ones_like(real_samples["advantages"])
return real_samples
def log_diagnostics(self, paths):
for b_eval in self.bonus_evaluator:
b_eval.log_diagnostics(paths)
if isinstance(self.latent_regressor, Latent_regressor):
with logger.prefix(' Latent regressor logging | '):
self.latent_regressor.log_diagnostics(paths)
class NPO_snn(NPO):
"""
Natural Policy Optimization for SNNs:
- differentiable reward bonus for L2 or KL between conditional distributions (commented out: not used in paper).
- allows to give rewards for serveral divergence metrics among conditional distributions (through BatchSampler_snn)
- logg individually for every latent as well as some "hierarchy" metric or the deterministic policy
"""
def __init__(
self,
# some extra logging. What of this could be included in the sampler?
log_individual_latents=False, # to log the progress of each individual latent
log_deterministic=False, # log the performance of the policy with std=0 (for each latent separate)
log_hierarchy=False,
bonus_evaluator=None,
reward_coef_bonus=None,
latent_regressor=None,
reward_regressor_mi=0, # kwargs to the sampler (that also processes)
switch_lat_every=0,
**kwargs):
# some logging
self.log_individual_latents = log_individual_latents
self.log_deterministic = log_deterministic
self.log_hierarchy = log_hierarchy
sampler_cls = BatchSampler_snn
sampler_args = {'switch_lat_every': switch_lat_every,
'latent_regressor': latent_regressor,
'bonus_evaluator': bonus_evaluator,
'reward_coef_bonus': reward_coef_bonus,
|
gentoo/layman | layman/overlays/overlay.py | Python | gpl-2.0 | 27,365 | 0.003545 | #!/usr/bin/python
# -*- coding: utf-8 -*-
################################################################################
# LAYMAN OVERLAY BASE CLASS
################################################################################
# File: overlay.py
#
# Base class for the different overlay types.
#
# Copyright:
# (c) 2005 - 2009 Gunnar Wrobel
# (c) 2009 Sebastian Pipping
# (c) 2009 Christian Groschupp
# (c) 2015 Devan Franchini
# Distributed under the terms of the GNU General Public License v2
#
# Author(s):
# Gunnar Wrobel <wrobel@gentoo.org>
# Sebastian Pipping <sebastian@pipping.org>
# Christian Groschupp <christian@groschupp.org>
# Devan Franchini <twitch153@gentoo.org>
#
'''Basic overlay class.'''
from __future__ import unicode_literals
__version__ = "0.2"
#===============================================================================
#
# Dependencies
#
#-------------------------------------------------------------------------------
import codecs
import locale
import os
import os.path
import re
import sys
import xml.etree.ElementTree as ET # Python 2.5
from layman.compatibility import encode
from layman.module import Modules, InvalidModuleName
from layman.utils import pad, terminal_width, get_encoding, encoder
#===============================================================================
#
# Constants
#
#-------------------------------------------------------------------------------
MOD_PATH = path = os.path.join(os.path.dirname(__file__), 'modules')
QUALITY_LEVELS = 'core|stable|testing|experimental|graveyard'.split('|')
WHITESPACE_REGEX = re.compile('\s+')
class Overlay(object):
''' Derive the real implementations from this.'''
def __init__(self, config, json=None, ovl_dict=None, xml=None, ignore=0):
self.config = config
self.output = config['output']
self.module_controller = Modules(path=MOD_PATH,
namepath='layman.overlays.modules',
output=self.output)
self._encoding_ = get_encoding(self.output)
if xml is not None:
self.from_xml(xml, ignore)
elif ovl_dict is not None:
self.from_dict(ovl_dict, ignore)
elif json is not None:
self.from_json(json, ignore)
def __eq__(self, other):
for i in ('descriptions', 'homepage', 'name', 'owners', 'priority',
'status'):
if getattr(self, i) != getattr(other, i):
return False
for i in self.sources + other.sources:
if not i in self.sources:
return False
if not i in other.sources:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def add(self, base):
res = 1
first_s = True
self.output.debug('Overlay.add()', 5)
self.sources = self.filter_protocols(self.sources)
self.output.debug('Overlay.add(), filtered protocols, sources:' + str(self.sources), 5)
if not self.sources:
msg = 'Overlay.add() error: overlay "%(name)s" does not support '\
' the given\nprotocol(s) %(protocol)s and cannot be '\
'installed.'\
% {'name': self.name,
'protocol': str(self.config['protocol_filter'])}
self.output.error(msg)
return 1
for s in self.sources:
if not first_s:
self.output.info('\nTrying next source of listed sources...', 4)
try:
self.output.debug('Overlay.add(), s.add(base)', 5)
res = s.add(base)
if res == 0:
# Worked, throw other sources away
self.sources = [s]
self.output.debug('Overlay.add(), back from s.add(base)', 5)
break
except Exception as error:
self.output.warn(str(error), 4)
first_s = False
return res
def delete(self, base):
assert len(self.sources) == 1
return self.sources[0].delete(base)
def filter_protocols(self, sources):
'''
Filters any protocols not specified in self.config['protocol_filter']
from the overlay's sources.
'''
_sources = []
self.output.debug('Overlay.filter_protocols()', 5)
self.output.debug('Overlay.filter_protocols() filters:' + str(type(self.config['protocol_filter'])), 5)
if not self.config['protocol_filter'] and not self.config['protocol_filter'] == []:
self.output.debug('Overlay.filter_protocols() no protocol_filter, returning', 5)
return sources
self.output.debug('Overlay.filter_protocols() sources:' + str(sources), 5)
for source in sources:
self.output.debug('Overlay.filter_protocols() source:' + str(type(source)), 5)
self.output.debug('Overlay.filter_protocols() filters:' + str(self.config['protocol_filter']), 5)
for protocol in self.config['protocol_filter']:
self.output.debug('Overlay.filter_protocols() protocol: ' + protocol + ' ' + str(type(protocol)), 5)
protocol = protocol.lower()
#re.search considers "\+" as the literal "+".
if protocol == 'git+ssh':
protocol = 'git\+ssh'
protocol += '://'
if re.search('^' + protocol, source.src):
_sources.append(source)
self.output.debug('Overlay.filter_protocols(), returning sources' + str(_sources), 5)
return _sources
| def from_dict(self, overlay, ignore):
'''
Process an overlay dictionary definition
'''
msg = 'Overlay from_dict(); overlay %(ovl)s' % {'ovl': str(overlay)}
self.ou | tput.debug(msg, 6)
_name = overlay['name']
if _name != None:
self.name = encode(_name)
else:
msg = 'Overlay from_dict(), "name" entry missing from dictionary!'
raise Exception(msg)
if 'source' in overlay:
_sources = overlay['source']
else:
_sources = None
if _sources == None:
msg = 'Overlay from_dict(), "%(name)s" is missing a "source" '\
'entry!' % {'name': self.name}
raise Exception(msg)
def create_dict_overlay_source(source_):
_src, _type, _sub = source_
self.ovl_type = _type
try:
_class = self.module_controller.get_class(_type)
except InvalidModuleName:
_class = self.module_controller.get_class('stub')
_location = encode(_src)
if _sub:
self.branch = encode(_sub)
else:
self.branch = None
return _class(parent=self, config=self.config,
_location=_location, ignore=ignore)
self.sources = [create_dict_overlay_source(e) for e in _sources]
self.owners = []
if 'owner' in overlay:
for _owner in overlay['owner']:
owner = {}
if 'name' in _owner and _owner['name']:
owner['name'] = encode(_owner['name'])
else:
owner['name'] = None
if 'email' in _owner:
owner['email'] = encode(_owner['email'])
else:
owner['email'] = None
msg = 'Overlay from_dict(), "%(name)s" is missing an '\
'"owner.email" entry!' % {'name': self.name}
if not ignore:
raise Exception(msg)
elif ignore == 1:
self.output.warn(msg, 4)
self.owners.append(owner)
if 'description' in overlay:
self.descriptions = []
_descs = |
pombredanne/marshmallow-polyfield | tests/shapes.py | Python | apache-2.0 | 2,889 | 0 | # -*- coding: utf-8 -*-
from marshmallow import Schema, fields, post_load
class Shape(object):
def __init__(self, color):
self.color = color
def __eq__(self, other):
return self.__dict__ == other.__dict__
class ShapeSchema(Schema):
color = fields.Str(allow_none=True)
class Triangle(Shape):
def __init__(self, color, base, height):
super(Triangle, self).__init__(color)
self.base = base
self.height = height
class TriangleSchema(ShapeSchema):
base = fields.Int(required=True)
height = fields.Int(required=True)
@post_load
def make_object(self, data):
return Triangle(
color=data['color'],
base=data['base'],
height=data['height']
)
class Rectangle(Shape):
def __init__(self, color, length, width):
super(Rectangle, self).__init__(color)
self.length = length
self.width = width
class RectangleSchema(ShapeSchema):
length = fields.Int(required=True)
width = fields.Int(required=True)
@post_load
def make_object(self, data):
return Rectangle(
color=data['color'],
length=data['length'],
width=data['width']
)
def shape_schema_serialization_disambiguation(base_object, _):
class_to_schema = {
Rectangle.__name__: RectangleSchema,
Triangle.__name__: TriangleSchema
}
try:
return class_to_schema[base_object.__class__.__name__]()
except KeyError:
pass
raise TypeError("Could not detect type. "
"Did not have a base or a length. "
"Are you sure this is a shape?")
def shape_property_schema_serialization_disambiguation(base_object, obj):
type_to_schema = {
'rectangle': RectangleSchema,
'triangle': TriangleSchema
}
try:
return type_to_schema[obj.type]()
except KeyError:
pass
raise TypeError("Could not detect type. "
"Did not have a base or a length. "
"Are you sure this is a shape?")
def shape_schema_deserialization_disambiguation(object_dict, _):
if object_dict.get("base"):
return TriangleSchema()
elif object | _dict.get("length"):
return RectangleSchema()
raise TypeError("Could not detect type. "
"Did not have a base or a length. "
"Are yo | u sure this is a shape?")
def shape_property_schema_deserialization_disambiguation(object_dict, data):
type_to_schema = {
'rectangle': RectangleSchema,
'triangle': TriangleSchema
}
try:
return type_to_schema[data['type']]()
except KeyError:
pass
raise TypeError("Could not detect type. "
"Did not have a base or a length. "
"Are you sure this is a shape?")
|
liruhua99/awesome-python3-webapp | www/config_override.py | Python | gpl-3.0 | 135 | 0.014815 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'Li Ruhua'
configs = {
'db' : {
| 'host' : ' | 127.0.0.1'
}
}
|
abougouffa/node-gyp | gyp/pylib/gyp/easy_xml_test.py | Python | mit | 3,271 | 0.008254 | #!/usr/bin/env python2
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandPar | ent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
| '<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
|
gangadhar-kadam/verve_test_erp | erpnext/shopping_cart/test_shopping_cart.py | Python | agpl-3.0 | 7,585 | 0.026236 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from erpnext.shopping_cart import get_quotation, set_item_in_cart
class TestShoppingCart(unittest.TestCase):
"""
Note:
Shopping Cart == Quotation
"""
def setUp(self):
frappe.set_user("Administrator")
self.enable_shopping_cart()
def tearDown(self):
frappe.set_user("Administrator")
self.disable_shopping_cart()
def test_get_cart_new_user(self):
self.login_as_new_user()
# test if lead is created and quotation with new lead is fetched
quotation = get_quotation()
self.assertEquals(quotation.quotation_to, "Lead")
self.assertEquals(frappe.db.get_value("Lead", quotation.lead, "email_id"), "test_cart_user@example.com")
self.assertEquals(quotation.customer, None)
self.assertEquals(quotation.contact_email, frappe.session.user)
return quotation
def test_get_cart_lead(self):
self.login_as_lead()
# test if quotation with lead is fetched
quotation = get_quotation()
self.assertEquals(quotation.quotation_to, "Lead")
self.assertEquals(quotation.lead, frappe.db.get_value("Lead", {"email_id": "test_cart_lead@example.com"}))
self.assertEquals(quotation.customer, None)
self.assertEquals(quotation.contact_email, frappe.session.user)
return quotation
def test_get_cart_customer(self):
self.login_as_customer()
# test if quotation with customer is fetched
quotation = get_quotation()
self.assertEquals(quotation.quotation_to, "Customer")
self.assertEquals(quotation.customer, "_Test Customer")
self.assertEquals(quotation.lead, None)
self.assertEquals(quotation.contact_email, frappe.session.user)
return quotation
def test_add_to_cart(self):
self.login_as_lead()
# remove from cart
self.remove_all_items_from_cart()
# add first item
set_item_in_cart("_Test Item", 1)
quotation = self.test_get_cart_lead()
self.assertEquals(quotation.get("items")[0].item_code, "_Test Item")
self.assertEquals(quotation.get("items")[0].qty, 1)
self.assertEquals(quotation.get("items")[0].amount, 10)
# add second item
set_item_in_cart("_Test Item 2", 1)
quotation = self.test_get_cart_lead()
self.assertEquals(quotation.get("items")[1].item_code, "_Test Item 2")
self.assertEquals(quotation.get("items")[1].qty, 1)
self.assertEquals(quotation.get("items")[1].amount, 20)
self.assertEquals(len(quotation.get("items")), 2)
def test_update_cart(self):
# first, add to cart
self.test_add_to_cart()
# update first item
set_item_in_cart("_Test Item", 5)
quotation = self.test_get_cart_lead()
self.assertEquals(quotation.get("items")[0].item_code, "_Test Item")
self.assertEquals(quotation.get("items")[0].qty, 5)
self.assertEquals(quotation.get("items")[0].amount, 50)
self.assertEquals(quotation.net_total, 70)
self.assertEquals(len(quotation.get("items")), 2)
def test_remove_from_cart(self):
# first, add to cart
self.test_add_to_cart()
# remove first item
set_item_in_cart("_Test Item", 0)
quotation = self.test_get_cart_lead()
self.assertEquals(quotation.get("items")[0].item_code, "_Test Item 2")
self.assertEquals(quotation.get("items")[0].qty, 1)
self.assertEquals(quotation.get("items")[0].amount, 20)
self.assertEquals(quotation.net_total, 20)
self.assertEquals(len(quotation.get("items")), 1)
# remove second item
set_item_in_cart("_Test Item 2", 0)
quotation = self.test_get_cart_lead()
self.assertEquals(quotation.net_total, 0)
self.assertEquals(len(quotation.get("items")), 0)
def test_set_billing_address(self):
return
# first, add to cart
self.test_add_to_cart()
quotation = self.test_get_cart_lead()
default_address = frappe.get_doc("Address", {"lead": quotation.lead, "is_primary_address": 1})
self.assertEquals("customer_address", default_address.name)
def test_set_shipping_address(self):
# first, add to cart
self.test_add_to_cart()
def test_shipping_rule(self):
self.test_set_shipping_address()
# check if shipping rule changed
pass
def test_price_list(self):
self.test_set_billing_address()
# check if price changed
pass
def test_place_order(self):
pass
# helper functions
def enable_shopping_cart(self):
settings = frappe.get_doc("Shopping Cart Settings", "Shopping Cart Settings")
if settings.get("price_lists"):
settings.enabled = 1
else:
settings.update({
"enabled": 1,
"company": "_Test Company",
"default_territory": "_Test Territory Rest Of The World",
"default_customer_group": "_Test Customer Group",
"quotation_series": "_T-Quotation-"
})
settings.set("price_lists", [
# price lists
{"doctype": "Shopping Cart Price List", "parentfield": "price_lists",
"selling_price_list": "_Test Price List India"},
{"doctype": "Shopping Cart Price List", "parentfield": "price_lists",
"selling_price_list": "_Test Price List Rest of the World"}
])
settings.set("sales_taxes_and_charges_masters", [
# tax masters
{"doctype": "Shopping Cart Taxes and Charges Master", "parentfield": "sales_taxes_and_charges_masters",
"sales_taxes_and_charges_master": "_Test India Tax Master"},
{"doctype": "Shopping Cart Taxes and Charges Master", "parentfield": "sales_taxes_and_charges_masters",
"sales_taxes_and_charges_master": "_Test Sales Taxes and Charges Master - Rest of the World"},
])
settings.set("shipping_rules", {"doctype": "Shopping Cart Shipping Rule", "parentfield": "shipping_rules",
"shipping_rule": "_Test Shipping Rule - India"})
settings.save()
frappe.local.shopping_cart_settings = None
def disable_shopping_cart(self):
settings = frappe.get_doc("Shopping Cart Settings", "Shopping Cart Settings")
settings.enabled = 0
settings.save()
frappe.local.shopping_cart_settings = None
def login_as_new_user(self):
frappe.set_user("test_cart_user@example.com")
def login_as_lead(self):
self.create_lead()
frappe.set_user("test_cart_lead@example.com")
def login_as_customer(self):
frappe.set_user("test_contact_customer@example.com")
def create_lead(self):
if frappe.db.get_value("Lead", {"email_id": "test_cart_lead@example.com"}):
return
lead = frappe.get_doc({
"doctype": "Lead",
"email_id": "test_cart_lead@example.com",
"lead_name": "_Test Website Lead",
"status": "Open",
"territory": "_Test Territory Rest Of The World"
})
lead.insert(ignore_permissions=True)
frappe.get_doc({
"doctype": "Address",
"address_line1": "_Test Address Line 1",
"address | _title": "_Test Cart Lead Address",
"address_type": "Office",
"city": "_Test City",
"country": "United States",
"lead": lead.name,
"lead_name": "_Test Website Lead",
"is_primary_address": 1,
"phone": "+91 0000000000"
}).insert(ignore_permissions=True)
frappe.get_doc({
"doctype": "Address",
"address_line1": "_Test Address Line 1",
"address_title": "_Test Cart Lead Address", |
"address_type": "Personal",
"city": "_Test City",
"country": "India",
"lead": lead.name,
"lead_name": "_Test Website Lead",
"phone": "+91 0000000000"
}).insert(ignore_permissions=True)
def remove_all_items_from_cart(self):
quotation = get_quotation()
quotation.set("items", [])
quotation.save(ignore_permissions=True)
test_dependencies = ["Sales Taxes and Charges Master", "Price List", "Item Price", "Shipping Rule", "Currency Exchange",
"Customer Group", "Lead", "Customer", "Contact", "Address", "Item"]
|
portfoliome/pgawedge | pgawedge/ddl.py | Python | mit | 509 | 0 | """
Compiler objects for p | ostgres data definition langua | ge.
"""
from sqlalchemy.sql.expression import Executable, ClauseElement
from sqlalchemy.ext.compiler import compiles
class CreateTableAs(Executable, ClauseElement):
def __init__(self, name, query):
self.name = name
self.query = query
@compiles(CreateTableAs, "postgresql")
def _create_table_as(element, compiler, **kw):
return "CREATE TABLE %s AS %s" % (
element.name,
compiler.process(element.query)
)
|
home-assistant/home-assistant | homeassistant/components/hassio/sensor.py | Python | apache-2.0 | 3,165 | 0 | """Sensor platform for Hass.io addons."""
from __future__ import annotations
from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PERCENTAGE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import ADDONS_COORDINATOR
from .const import (
ATTR_CPU_PERCENT,
ATTR_MEMORY_PERCENT,
ATTR_VERSION,
ATTR_VERSION_LATEST,
DATA_KEY_ADDONS,
DATA_KEY_OS,
)
from .entity import HassioAddonEntity, HassioOSEntity
COMMON_ENTITY_DESCRIPTIONS = (
SensorEntityDescription(
entity_registry_enabled_default=False,
key=ATTR_VERSION,
name="Version",
),
SensorEntityDescription(
entity_registry_enabled_default=False,
key=ATTR_VERSION_LATEST,
name="Newest Version",
),
)
ADDON_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS + (
SensorEntityDescription(
entity_registry_enabled_default=False,
key=ATTR_CPU_PERCENT,
name="CPU Percent",
icon="mdi:cpu-64-bit",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
entity_registry_enabled_default=False,
key=ATTR_MEMORY_PERCENT,
name="Memory Percent",
icon="mdi:memory",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
)
OS_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Sensor set up for Hass.io config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
entities = []
| for addon in coordinator.data[DATA_KEY_ADDONS].values():
for entity_description in ADDON_ENTITY_DESCRIPTIONS:
entities.append(
HassioAddonSensor(
addon=addon,
| coordinator=coordinator,
entity_description=entity_description,
)
)
if coordinator.is_hass_os:
for entity_description in OS_ENTITY_DESCRIPTIONS:
entities.append(
HassioOSSensor(
coordinator=coordinator,
entity_description=entity_description,
)
)
async_add_entities(entities)
class HassioAddonSensor(HassioAddonEntity, SensorEntity):
"""Sensor to track a Hass.io add-on attribute."""
@property
def native_value(self) -> str:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_ADDONS][self._addon_slug][
self.entity_description.key
]
class HassioOSSensor(HassioOSEntity, SensorEntity):
"""Sensor to track a Hass.io add-on attribute."""
@property
def native_value(self) -> str:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_OS][self.entity_description.key]
|
bumper-app/bumper-bianca | bianca/cas_manager.py | Python | mit | 7,075 | 0.033074 | """
file: cas_manager.py
authors: Christoffer Rosen <cbr4830@rit.edu>
date: Jan. 2014
description: This module contains the CAS_manager class, which is a thread that continously checks if there
is work that needs to | be done. Also contains supporting classes of Worker and ThreadPool used by
the CAS_Manager.
"""
from analyzer.analyzer import *
from ingest | er.ingester import *
from orm.repository import *
import calendar # to convert datetime to unix time
from caslogging import logging
from queue import *
import threading
import time
from monthdelta import *
BACKGROUND_INTERVAL = 60 # wait 1 minutes
class CAS_Manager(threading.Thread):
"""
Thread that continiously checks if there is work to be done and adds it to
the thread pool work queue
"""
def __init__(self):
"""Constructor"""
threading.Thread.__init__(self)
numOfWorkers = int(config['system']['workers'])
self.workQueue = ThreadPool(numOfWorkers)
self.modelQueue = Queue()
def checkIngestion(self):
"""Check if any repo needs to be ingested"""
session = Session()
repo_update_freq = int(config['repoUpdates']['freqInDays'])
refresh_date = str(datetime.utcnow() - timedelta(days=repo_update_freq))
repos_to_get = (session.query(Repository)
.filter(
(Repository.status == "Waiting to be Ingested") |
(Repository.ingestion_date < refresh_date) &
(Repository.status != "Error") &
(Repository.status != "Analyzing"))
.all())
for repo in repos_to_get:
logging.info("Adding repo " + repo.id + " to work queue for ingesting")
repo.status = "In Queue to be Ingested"
session.commit() # update the status of repo
self.workQueue.add_task(ingest,repo.id)
session.close()
def checkAnalyzation(self):
"""Checks if any repo needs to be analyzed"""
session = Session()
repo_update_freq = int(config['repoUpdates']['freqInDays'])
refresh_date = str(datetime.utcnow() - timedelta(days=repo_update_freq))
repos_to_get = (session.query(Repository)
.filter( (Repository.status == "Waiting to be Analyzed") )
.all()
)
for repo in repos_to_get:
logging.info("Adding repo " + repo.id + " to work queue for analyzing.")
repo.status = "In Queue to be Analyzed"
session.commit() # update the status of repo
self.workQueue.add_task(analyze, repo.id)
session.close()
def checkModel(self):
"""Check if any repo needs metrics to be generated"""
session = Session()
repos_to_get = (session.query(Repository)
.filter(
(Repository.status == "In Queue to Build Model") )
.all())
for repo in repos_to_get:
logging.info("Adding repo " + repo.id + " to model queue to finish analyzing")
repo.status = "Building Model"
session.commit() # update status of repo
self.modelQueue.put(repo.id)
session.close()
def checkBuildModel(self):
""" Checks if any repo is awaiting to build model.
We are using a queue because we can't concurrently access R """
session = Session()
if self.modelQueue.empty() != True:
repo_id = self.modelQueue.get()
repo = (session.query(Repository).filter(Repository.id == repo_id).first())
# use data only up to X months prior we won't have sufficent data to build models
# as there may be bugs introduced in those months that haven't been fixed, skewing
# our model.
glm_model_time = int(config['glm_modeling']['months'])
data_months_datetime = datetime.utcnow() - monthdelta(glm_model_time)
data_months_unixtime = calendar.timegm(data_months_datetime.utctimetuple())
# all commits for repo prior to current time - glm model time
training_commits = (session.query(Commit)
.filter(
( Commit.repository_id == repo_id ) &
( Commit.author_date_unix_timestamp < str(data_months_unixtime))
)
.order_by( Commit.author_date_unix_timestamp.desc() )
.all())
# all commits for repo after or on current time - glm model time
testing_commits = (session.query(Commit)
.filter(
( Commit.repository_id == repo_id ) &
( Commit.author_date_unix_timestamp >= str(data_months_unixtime)))
.all())
try:
metrics_generator = MetricsGenerator(repo_id, training_commits, testing_commits)
metrics_generator.buildAllModels()
# montly data dump - or rather, every 30 days.
dump_refresh_date = str(datetime.utcnow() - timedelta(days=30))
if repo.last_data_dump == None or repo.last_data_dump < dump_refresh_date:
logging.info("Generating a monthly data dump for repository: " + repo_id)
# Get all commits for the repository
all_commits = (session.query(Commit)
.filter(
( Commit.repository_id == repo_id )
)
.order_by( Commit.author_date_unix_timestamp.desc() )
.all())
metrics_generator.dumpData(all_commits)
repo.last_data_dump = str(datetime.now().replace(microsecond=0))
# Notify user if repo has never been analyzed previously
if repo.analysis_date is None:
self.notify(repo)
logging.info("Repo " + repo_id + " finished analyzing.")
repo.analysis_date = str(datetime.now().replace(microsecond=0))
repo.status = "Analyzed"
session.commit() # update status of repo
session.close()
# uh-oh
except Exception as e:
logging.exception("Got an exception building model for repository " + repo_id)
repo.status = "Error"
session.commit() # update repo status
session.close()
def notify(self, repo):
""" Send e-mail notifications if applicable to a repo
used by checkBuildModel """
notify = False
notifier = None
logging.info("Notifying subscribed users for repository " + repo.id)
# Create the Notifier
gmail_user = config['gmail']['user']
gmail_pass = config['gmail']['pass']
notifier = Notifier(gmail_user, gmail_pass, repo.name)
# Add subscribers if applicable
if repo.email is not None:
notifier.addSubscribers([repo.email, gmail_user])
else:
notifier.addSubscribers([gmail_user])
notifier.notify()
def run(self):
while(True):
### --- Check repository table if there is any work to be done --- ###
self.checkIngestion()
self.checkAnalyzation()
self.checkModel()
self.checkBuildModel()
time.sleep(BACKGROUND_INTERVAL)
class Worker(threading.Thread):
"""Thread executing tasks from a given tasks queue"""
def __init__(self, tasks):
threading.Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self):
while True:
func, args, kargs = self.tasks.get()
try:
func(*args, **kargs)
except Exception as e:
print(e)
self.tasks.task_done()
class ThreadPool:
"""Pool of threads consuming tasks from a queue"""
def __init__(self, num_threads):
self.tasks = Queue(num_threads)
for _ in range(num_threads): Worker(self.tasks)
def add_task(self, func, *args, **kargs):
"""Add a task to the queue"""
self.tasks.put((func, args, kargs))
def wait_completion(self):
"""Wait for completion of all the tasks in the queue"""
self.tasks.join()
|
SimpleTax/merchant | example/app/urls.py | Python | bsd-3-clause | 2,389 | 0.005442 |
from django.conf.urls import *
from billing import get_integration
google_checkout_obj = get_integration("google_checkout")
pay_pal_obj = get_integration("pay_pal")
amazon_fps_obj = get_integration("fps")
fps_recur_obj = get_integration("fps")
world_pay_o | bj = get_integration("world_pay")
braintree_obj = get_integratio | n("braintree_payments")
stripe_obj = get_integration("stripe_example")
samurai_obj = get_integration("samurai_example")
urlpatterns = patterns('app.views',
url(r'^$', 'index', name='app_index'),
url(r'^authorize/$', 'authorize', name='app_authorize'),
url(r'^paypal/$', 'paypal', name='app_paypal'),
url(r'^eway/$', 'eway', name='app_eway'),
url(r'^braintree/$', 'braintree', name='app_braintree'),
url(r'^stripe/$', 'stripe', name='app_stripe'),
url(r'^samurai/$', 'samurai', name='app_samurai'),
)
# offsite payments
urlpatterns += patterns('app.views',
url(r'offsite/paypal/$', 'offsite_paypal', name='app_offsite_paypal'),
url(r'offsite/google-checkout/$', 'offsite_google_checkout', name='app_offsite_google_checkout'),
url(r'offsite/world_pay/$', 'offsite_world_pay', name='app_offsite_world_pay'),
url(r'offsite/amazon_fps/$', 'offsite_amazon_fps', name='app_offsite_amazon_fps'),
url(r'offsite/braintree/$', 'offsite_braintree', name='app_offsite_braintree'),
url(r'offsite/stripe/$', 'offsite_stripe', name='app_offsite_stripe'),
url(r'offsite/samurai/$', 'offsite_samurai', name='app_offsite_samurai'),
)
# paypal payment notification handler
urlpatterns += patterns('',
(r'^paypal-ipn-handler/', include(pay_pal_obj.urls)),
)
urlpatterns += patterns('',
(r'^', include(google_checkout_obj.urls)),
)
urlpatterns += patterns('',
(r'^fps/', include(amazon_fps_obj.urls)),
)
urlpatterns += patterns('',
(r'^braintree/', include(braintree_obj.urls)),
)
urlpatterns += patterns('',
(r'^stripe/', include(stripe_obj.urls)),
)
urlpatterns += patterns('',
(r'^samurai/', include(samurai_obj.urls)),
)
urlpatterns += patterns('django.views.generic.simple',
url(r'offsite/paypal/done/$',
'direct_to_template',
{'template': 'app/payment_done.html'},
name='app_offsite_paypal_done'),
url(r'offsite/google-checkout/done/$',
'direct_to_template',
{'template': 'app/payment_done.html'},
name='app_offsite_google_checkout_done'),
)
|
thismachinechills/minesqeeper | mines_pyglet.py | Python | gpl-3.0 | 4,042 | 0.026719 | import pyglet
from random import random as r
from board import Board
from math import floor
cursor = None
xi, yi = 30, 30
x, y = xi, yi
lvl = 5
starts = True
mines = Board(xi, yi, lvl)
flag_mode = False
brd = {(_x,_y) for _x in range(x) for _y in range(y)}
scale = 20
black = 0, 0, 0, 255
white = 255, 255, 255
purple = 0.5, 0, 0.5
orange = 1, 0.5, 0
red = 255, 0, 0
default = white
number_sq = 0.5, 0.5, 0.5
zero_sq = 0.25, 0.25, 0.25
bomb_sq = red
unclicked_sq = 0.75, 0.75, 0.75
flag_sq = orange
set_live_color = pyglet.gl.glColor3f
gl_draw_sq = pyglet.graphics.draw_indexed
window = pyglet.window.Window(width=(x+1)*scale, height=(y+1)*scale)
four, gl_flag, indices = 4, 'v2i2', (0, 1, 2, 0, 2, 3)
vertices = 0,0, x*scale+scale,0, x*scale+scale,y*scale+scale, 0,y*scale+scale
set_live_color(*unclicked_sq)
gl_draw_sq(four, pyglet.gl.GL_TRIANGLES, indices, (gl_flag, vertices))
def draw_square(point, size=1, color=white):
set_live_color(*color)
x, y = point[0] * size, point[1] * size
x_size, y_size = x + size, y + size
vertices = x,y, x_size,y, x_size,y_size, x,y_size
gl_draw_sq(four, pyglet.gl.GL_TRIANGLES, indices, (gl_flag, vertices))
def draw_text(label="Txt", point=(10,10), size=1, color=black):
x, y = point[0] * scale + scale/4, point[1] * scale + scale/4
font = 'Sans'
label = pyglet.text.Label(label, font_name=font, font_size=size, x=x, y=y, color=color)
label.draw()
def process_board(board, start):
#set_live_color(*zero_sq)
#gl_draw_sq(four, pyglet.gl.GL_TRIANGLES, indices, (gl_flag, vertices))
these_nums, these_zeros = get_visible_sqs(board)
_draw_zeros(board, these_zeros)
_draw_nums(board, these_nums)
_draw_flags(board)
if not board.unclicked_squares:
_draw_bombs(board)
def _draw_nums(board, pts):
for pt in pts:
number = board.numbers[pt]
draw_square(pt, scale, number_sq)
draw_text(str(number), pt, size=8, color=black)
board.numbers.pop(pt)
def _draw_zeros(board, pts):
for pt in pts:
draw_square(pt, scale, zero_sq)
board.blank_board.remove(pt)
def _draw_bombs(board):
for bomb in board.bombs:
draw_square(bomb, scale, bomb_sq)
def _draw_flags(board):
for flag in board.flags:
draw_square(flag, scale, flag_sq)
def _draw_unclicked(board):
for sq in | board.unclicked_squares:
draw_square(sq, scale, unclicked_sq)
def get_visible_sqs(board):
draw_these_nums = board.numbers.keys() - board.unclicked_squares
draw_these_zeros = board.blan | k_board - board.unclicked_squares
return draw_these_nums, draw_these_zeros
def update_title(dt, board):
if not board.start:
return
if board.round:
board.time += 1
str_mines = "Mines: " + str(len(board.bombs))
str_flags = "Flags: " + str(len(board.flags))
str_time = "Time: " + str(board.time)
window.set_caption(' '.join((str_mines, str_flags, str_time)))
def draw_clicks(board):
if not board.round:
board.start_game()
_draw_unclicked(board)
process_board(mines, starts)
if not board.start:
if board.win:
window.set_caption(":) You win. Time: " + str(board.time))
else:
window.set_caption(":( You lose.")
@window.event
def on_mouse_motion(x, y, dx, dy):
pass
def on_draw(*args): pass
@window.event
def on_key_press(symbol, modifiers, flag_mode=flag_mode):
if symbol == pyglet.window.key.F:
flag_mode = False if flag_mode else True
@window.event
def on_mouse_press(x, y, button, modifiers, board=mines):
point = floor(x / scale), floor(y / scale)
if flag_mode and button == 1:
start_button = 4
if button == 4:
if point not in mines.flags:
mines.select_sq(point)
elif button == 1:
truth = mines.set_flag(point)
if truth:
draw_square(point, scale, unclicked_sq)
elif button == 2:
mines.__init__(xi, xi, lvl)
draw_clicks(mines)
def main():
set_live_color(*unclicked_sq)
pyglet.clock.schedule_once(lambda args: gl_draw_sq(four, pyglet.gl.GL_TRIANGLES, indices, (gl_flag, vertices)), 0.1)
pyglet.clock.schedule_interval(update_title, 1, mines)
pyglet.app.run()
if __name__ == "__main__":
#pyglet.clock.schedule(on_draw)
main()
|
denys-duchier/Scolar | sco_bulletins_json.py | Python | gpl-2.0 | 13,221 | 0.011951 | # -*- mode: python -*-
# -*- coding: iso8859-15 -*-
##############################################################################
#
# Gestion scolarite IUT
#
# Copyright (c) 2001 - 2013 Emmanuel Viennet. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Emmanuel Viennet emmanuel.viennet@viennet.net
#
##############################################################################
"""Génération du bulletin en format JSON (beta, non completement testé)
"""
from notes_table import *
import sco_photos
import ZAbsences
import sco_bulletins
# -------- Bulletin en JSON
import mx
class ScoDocJSONEncoder(json.JSONEncoder):
def default(self, o):
# horrible hack pour encoder les dates
if str(type(o)) == "<type 'mx.DateTime.DateTime'>":
return o.strftime("%Y-%m-%dT%H:%M:%S")
else:
log('not mx: %s' % type(o))
return json.JSONEncoder.default(self, o)
def make_json_formsemestre_bulletinetud(
context, formsemestre_id, etudid, REQUEST=None,
xml_with_decisions=False, version='long',
force_publishing=False # force publication meme si semestre non publie sur "portail"
):
"""Renvoie bulletin en chaine JSON"""
d = formsemestre_bulletinetud_published_dict(
context, formsemestre_id, etudid,
force_publishing=force_publishing,
REQUEST=REQUEST,
xml_with_decisions=xml_with_decisions, version=version)
if REQUEST:
REQUEST.RESPONSE.setHeader('content-type', JSON_MIMETYPE)
return json.dumps(d, cls=ScoDocJSONEncoder, encoding=SCO_ENCODING)
# (fonction séparée: n'utilise pas formsemestre_bulletinetud_dict()
# pour simplifier le code, mais attention a la maintenance !)
#
def formsemestre_bulletinetud_published_dict(
context, formsemestre_id, etudid,
force_publishing=False,
xml_nodate=False,
REQUEST=None,
| xml_with_decisions=False, # inclue les decisions même si non publiées
version='long'
):
"""Dictionnaire representant les informations _publiees_ du bulletin de notes
Utilisé pour JSON, devrait l'être aussi pour XML. (todo)
"""
d = {}
sem = context.get_formsemestre(formsemestre_id)
if sem['bul_hide_xml'] == '0' or force_publishing:
published=1
else:
published=0
if xml_nodate:
docdat | e = ''
else:
docdate = datetime.datetime.now().isoformat()
d.update( etudid=etudid, formsemestre_id=formsemestre_id,
date=docdate,
publie=published,
etape_apo=sem['etape_apo'] or '',
etape_apo2=sem['etape_apo2'] or '',
etape_apo3=sem['etape_apo3'] or '',
etape_apo4=sem['etape_apo4'] or ''
)
# Infos sur l'etudiant
etudinfo = context.getEtudInfo(etudid=etudid,filled=1)[0]
d['etudiant'] = dict(
etudid=etudid, code_nip=etudinfo['code_nip'], code_ine=etudinfo['code_ine'],
nom=quote_xml_attr(etudinfo['nom']),
prenom=quote_xml_attr(etudinfo['prenom']),
sexe=quote_xml_attr(etudinfo['sexe']),
photo_url=quote_xml_attr(sco_photos.etud_photo_url(context, etudinfo)),
email=quote_xml_attr(etudinfo['email']))
# Disponible pour publication ?
if not published:
return d # stop !
# Groupes:
partitions = sco_groups.get_partitions_list(context, formsemestre_id, with_default=False)
partitions_etud_groups = {} # { partition_id : { etudid : group } }
for partition in partitions:
pid=partition['partition_id']
partitions_etud_groups[pid] = sco_groups.get_etud_groups_in_partition(context, pid)
nt = context._getNotesCache().get_NotesTable(context, formsemestre_id) #> toutes notes
ues = nt.get_ues()
modimpls = nt.get_modimpls()
nbetuds = len(nt.rangs)
mg = fmt_note(nt.get_etud_moy_gen(etudid))
if nt.get_moduleimpls_attente() or context.get_preference('bul_show_rangs', formsemestre_id) == 0:
# n'affiche pas le rang sur le bulletin s'il y a des
# notes en attente dans ce semestre
rang = ''
rang_gr = {}
ninscrits_gr = {}
else:
rang = str(nt.get_etud_rang(etudid))
rang_gr, ninscrits_gr, gr_name = sco_bulletins.get_etud_rangs_groups(
context, etudid, formsemestre_id, partitions, partitions_etud_groups, nt)
d['note'] = dict( value=mg, min=fmt_note(nt.moy_min), max=fmt_note(nt.moy_max), moy=fmt_note(nt.moy_moy) )
d['rang'] = dict( value=rang, ninscrits=nbetuds )
d['rang_group'] = []
if rang_gr:
for partition in partitions:
d['rang_group'].append( dict(
group_type=partition['partition_name'],
group_name=gr_name[partition['partition_id']],
value=rang_gr[partition['partition_id']],
ninscrits=ninscrits_gr[partition['partition_id']] ))
d['note_max'] = dict( value=20 ) # notes toujours sur 20
d['bonus_sport_culture'] = dict( value=nt.bonus[etudid] )
# Liste les UE / modules /evals
d['ue'] = []
d['ue_capitalisee'] = []
for ue in ues:
ue_status = nt.get_etud_ue_status(etudid, ue['ue_id'])
u = dict( id=ue['ue_id'],
numero=quote_xml_attr(ue['numero']),
acronyme=quote_xml_attr(ue['acronyme']),
titre=quote_xml_attr(ue['titre']),
note = dict(value=fmt_note(ue_status['cur_moy_ue']),
min=fmt_note(ue['min']), max=fmt_note(ue['max'])),
rang = str(nt.ue_rangs[ue['ue_id']][0][etudid]),
effectif = str(nt.ue_rangs[ue['ue_id']][1] - nt.nb_demissions)
)
d['ue'].append(u)
u['module'] = []
# Liste les modules de l'UE
ue_modimpls = [ mod for mod in modimpls if mod['module']['ue_id'] == ue['ue_id'] ]
for modimpl in ue_modimpls:
mod_moy = fmt_note(nt.get_etud_mod_moy(modimpl['moduleimpl_id'], etudid))
if mod_moy == 'NI': # ne mentionne pas les modules ou n'est pas inscrit
continue
mod = modimpl['module']
#if mod['ects'] is None:
# ects = ''
#else:
# ects = str(mod['ects'])
modstat = nt.get_mod_stats(modimpl['moduleimpl_id'])
m = dict(
id=modimpl['moduleimpl_id'], code=mod['code'],
coefficient=mod['coefficient'],
numero=mod['numero'],
titre=quote_xml_attr(mod['titre']),
abbrev=quote_xml_attr(mod['abbrev']),
# ects=ects, ects des modules maintenant inutilisés
note = dict( value=mod_moy )
)
m['note'].update(modstat)
for k in ('min', 'max', 'moy'): # formatte toutes les notes
m['note'][k] = fmt_note(m['note'][k])
u['module'].append(m)
if context.get_preference('bul_show_mod_rangs', formsemestre_id):
m['rang'] = dict( value=nt.mod_rangs[modimpl['moduleimpl_id']][0][etudid] )
m['effectif'] = dict( value=nt.mod_rangs[modimpl['moduleimpl_id']][1] )
# --- notes de chaque eval:
evals = nt.get_evals_in_mod(modimpl['moduleimpl_id'])
|
Rapptz/discord.py | discord/types/activity.py | Python | mit | 2,827 | 0 | """
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from typing import List, Literal, Optional, TypedDict
from .user import User
from .snowflake import Snowflake
StatusType = Literal['idle', 'dnd', 'online', 'offline']
class PartialPresenceUpdate(TypedDict):
user: User
guild_id: Snowflake
status: StatusType
activities: List[Activity]
client_status: ClientStatus
class ClientStatus(TypedDict, total=False):
desktop: StatusType
mobile: StatusType
web: StatusType
class ActivityTimestamps(TypedDict, total=False):
start: int
end: int
class ActivityParty(TypedDict, total=False):
id: str
size: List[int]
class ActivityAssets(TypedDict, total=False):
large_image: str
large_text: str
small_image: str
small_text: str
class ActivitySecrets(TypedDict, total=False):
join: str
spectate: str
match: str
class _ActivityEmojiOptional(TypedDict, total=False):
id: Snowflake
| animated: bool
class ActivityEmoji(_ActivityEmojiOptional):
name: str
class ActivityButton(TypedDict):
label: str
url: str
class _SendableActivityOptional(TypedDict, total=False):
url: Optional[str]
ActivityType = Literal[0, 1, 2, 4, 5]
class SendableActivity(_SendableActivityOptional):
name: str
type: ActivityType
class _BaseActivity(SendableActivity):
created_at: int
class Activity(_BaseActivity, total=False):
sta | te: Optional[str]
details: Optional[str]
timestamps: ActivityTimestamps
assets: ActivityAssets
party: ActivityParty
application_id: Snowflake
flags: int
emoji: Optional[ActivityEmoji]
secrets: ActivitySecrets
session_id: Optional[str]
instance: bool
buttons: List[ActivityButton]
|
antb/TPT----My-old-mod | src/python/stdlib/test/test_sax.py | Python | gpl-2.0 | 26,333 | 0.001443 | # regression test for SAX 2.0 -*- coding: utf-8 -*-
# $Id: test_sax.py 86637 2010-11-21 13:34:58Z ezio.melotti $
from xml.sax import make_parser, ContentHandler, \
SAXException, SAXReaderNotAvailable, SAXParseException
try:
make_parser()
except SAXReaderNotAvailable:
# don't try to test this module if we cannot create a parser
raise ImportError("no XML parsers available")
from xml.sax.saxutils import XMLGenerator, escape, unescape, quoteattr, \
XMLFilterBase
from xml.sax.expatreader import create_parser
from xml.sax.handler import feature_namespaces
from xml.sax.xmlreader import InputSource, AttributesImpl, AttributesNSImpl
from cStringIO import StringIO
from test.test_support import findfile, run_unittest
import unittest
TEST_XMLFILE = findfile("test.xml", subdir="xmltestdata")
TEST_XMLFILE_OUT = findfile("test.xml.out", subdir="xmltestdata")
ns_uri = "http://www.python.org/xml-ns/saxtest/"
class XmlTestBase(unittest.TestCase):
def verify_empty_attrs(self, attrs):
self.assertRaises(KeyError, attrs.getValue, "attr")
self.assertRaises(KeyError, attrs.getValueByQName, "attr")
self.assertRaises(KeyError, attrs.getNameByQName, "attr")
self.assertRaises(KeyError, attrs.getQNameByName, "attr")
self.assertRaises(KeyError, attrs.__getitem__, "attr")
self.assertEqual(attrs.getLength(), 0)
self.assertEqual(attrs.getNames(), [])
self.assertEqual(attrs.getQNames(), [])
self.assertEqual(len(attrs), 0)
self.assertFalse(attrs.has_key("attr"))
self.assertEqual(attrs.keys(), [])
self.assertEqual(attrs.get("attrs"), None)
self.assertEqual(attrs.get("attrs", 25), 25)
self.assertEqual(attrs.items(), [])
self.assertEqual(attrs.values(), [])
def verify_empty_nsattrs(self, attrs):
self.assertRaises(KeyError, attrs.getValue, (ns_uri, "attr"))
self.assertRaises(KeyError, attrs.getValueByQName, "ns:attr")
self.assertRaises(KeyError, attrs.getNameByQName, "ns:attr")
self.assertRaises(KeyError, attrs.getQNameByName, (ns_uri, "attr"))
self.assertRaises(KeyError, attrs.__getitem__, (ns_uri, "attr"))
self.assertEqual(attrs.getLength(), 0)
self.assertEqual(attrs.getNames(), [])
self.assertEqual(attrs.getQNames(), [])
self.assertEqual(len(attrs), 0)
self.assertFalse(attrs.has_key((ns_uri, "attr")))
self.assertEqual(attrs.keys(), [])
self.assertEqual(attrs.get((ns_uri, "attr")), None)
self.assertEqual(attrs.get((ns_uri, "attr"), 25), 25)
self.assertEqual(attrs.items(), [])
self.assertEqual(attrs.values(), [])
def verify_attrs_wattr(self, attrs):
self.assertEqual(attrs.getLength(), 1)
self.assertEqual(attrs.getNames(), ["attr"])
self.assertEqual(attrs.getQNames(), ["attr"])
self.assertEqual(len(attrs), 1)
self.assertTrue(attrs.has_key("attr"))
self.assertEqual(attrs.keys(), ["attr"])
self.assertEqual(attrs.get("attr"), "val")
self.assertEqual(attrs.get("attr", 25), "val")
self.assertEqual(attrs.items(), [("attr", "val")])
self.assertEqual(attrs.values(), ["val"])
self.assertEqual(attrs.getValue("attr"), "val")
self.assertEqual(attrs.getValueByQName("attr"), "val")
self.assertEqual(attrs.getNameByQName("attr"), "attr")
self.assertEqual(attrs["attr"], "val")
self.assertEqual(attrs.getQNameByName("attr"), "attr")
class MakeParserTest(unittest.TestCase):
def test_make_parser2(self):
# Creating parsers several times in a row should succeed.
# Testing this because there have been failures of this kind
# before.
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
# ===========================================================================
#
# saxutils tests
#
# ===========================================================================
class SaxutilsTest(unittest.TestCase):
# ===== escape
def test_escape_basic(self):
self.assertEqual(escape("Donald Duck & Co"), "Donald Duck & Co")
def test_escape_all(self):
self.assertEqual(escape("<Donald Duck & Co>"),
"<Donald Duck & Co>")
def test_escape_extra(self):
self.assertEqual(escape("Hei på deg", {"å" : "å"}),
"Hei på deg")
# ===== unescape
def test_unescape_basic(self):
self.assertEqual(unescape("Donald Duck & Co"), "Donald Duck & Co")
def test_unescape_all(self):
self.assertEqual(unescape("<Donald Duck & Co>"),
"<Donald Duck & Co>")
def test_unescape_extra(self):
self.assertEqual(unescape("Hei på deg", {"å" : "å"}),
"Hei på deg")
def test_unescape_amp_extra(self):
self.assertEqual(unescape("&foo;", {"&foo;": "splat"}), "&foo;")
# ===== quoteattr
def test_quoteattr_basic(self):
self.assertEqual(quoteattr("Donald Duck & Co"),
'"Donald Duck & Co"')
def test_single_quoteattr(self):
self.assertEqual(quoteattr('Includes "double" quotes'),
'\'Includes "double" quotes\'')
def test_double_quoteattr(self):
self.assertEqual(quoteattr("Includes 'single' quotes"),
"\"Includes 'single' quotes\"")
def test_single_double_quoteattr(self):
self.assertEqual(quoteattr("Includes 'single' and \"doub | le\" quotes"),
"\"Includes 'single' and "double" quotes\"")
# ===== make_parser
def test_make_parser(self):
# Creating a parser should succeed - it should fall back
# to the expatreader
p = make_parser(['xml.parsers.no_such_parser'])
# ===== XMLGenerator
start = '<?xml version="1.0" encoding="iso-8859-1"?>\n'
class XmlgenTest(unittest.TestCase):
def test_xmlgen_basic(self):
result = StringIO()
g | en = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {})
gen.endElement("doc")
gen.endDocument()
self.assertEqual(result.getvalue(), start + "<doc></doc>")
def test_xmlgen_content(self):
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {})
gen.characters("huhei")
gen.endElement("doc")
gen.endDocument()
self.assertEqual(result.getvalue(), start + "<doc>huhei</doc>")
def test_xmlgen_pi(self):
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.processingInstruction("test", "data")
gen.startElement("doc", {})
gen.endElement("doc")
gen.endDocument()
self.assertEqual(result.getvalue(), start + "<?test data?><doc></doc>")
def test_xmlgen_content_escape(self):
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {})
gen.characters("<huhei&")
gen.endElement("doc")
gen.endDocument()
self.assertEqual(result.getvalue(),
start + "<doc><huhei&</doc>")
def test_xmlgen_attr_escape(self):
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {"a": '"'})
gen.startElement("e", {"a": "'"})
gen.endElement("e")
gen.startElement("e", {"a": "'\""})
gen.endElement("e")
gen.startElement("e", {"a": "\n\r\t"})
gen.endElement("e")
gen.endElement("doc")
gen.endDocument()
sel |
YufeiZhang/Principles-of-Programming-Python-3 | Labs/lab2/cycloidal_curves.py | Python | gpl-3.0 | 1,590 | 0.050943 | from turtle import *
from math import *
from os import system
def save():
getcanvas().postscript(file=name+".ps")
system("ps2pdf "+name+".ps "+name+".pdf")
def quit():
Screen().bye()
def hypotrochoid(R, r, d, iteration):
x, y, up = 0, 0, 1
penup(); fillcolor("green")
while iteration:
theta = iteration/180*pi
x = (R-r) * cos(theta) + d * cos((R-r)/r*theta)
y = (R-r) * sin(theta) - d * sin((R-r)/r*theta)
goto(x,y)
if up: pendown(); begin_fill(); up = 0
iteration - | = 5
end_fill()
def epitrochoid(R, r, d, iteration):
x, y, up = 0, 0, 1
penup(); fillcolor("green")
while iteration:
theta = iteration/180*pi
x = (R+ | r) * cos(theta) - d * cos((R+r)/r*theta)
y = (R+r) * sin(theta) - d * sin((R+r)/r*theta)
goto(x,y)
if up: pendown(); begin_fill(); up = 0
iteration -= 5
end_fill();
def main():
tp = textinput("Hypotrochoid or Epitrochoid","provide no input for an Epitrochoid, and any input for a Hypotrochoid")
if tp: tp = "Hypotrochoid"
else: tp = "Epitrochoid"
R = numinput("Fixed circle", "Radius R between 10 and 290")
r = numinput("Rolling circle", "Radius r between 10 and 290")
d = numinput("Point", "Distance d to centre of rolling circle between 0 and 218")
period = int(r) / gcd(int(R), int(r))
print(tp,"for R =",R,"r =",r,"d =",d,"-- Period =", period)
global name; name = tp+"_"+str(R)+"_"+str(r)+"_"+str(d)
if tp == "Hypotrochoid": hypotrochoid(R,r,d,period*360)
else: epitrochoid (R,r,d,period*360)
Screen().onkey(save, "s"); Screen().onkey(quit, "q"); listen()
if __name__ == '__main__':
main(); done()
|
sebinthomas/pyvarnam | pyvarnam/library.py | Python | mit | 3,238 | 0.002471 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
""" Internal library class which holds all the bound functions
"""
# TODO : Comments,docstring, tests,code..
import ctypes as C
from ctypes.util import find_library
import os
import sys
from warnings import warn
from .utils import *
from .varnam_defs import *
class InternalVarnamLibrary(object):
"""Internal class which loads the varnam library
"""
# Singleton pattern code by Duncan Booth
__instance = None
__already_called = False
def __new__(cls, *args, **kwargs):
if cls.__instance is None:
cls.__instance = object.__new__(cls)
return cls.__instance
def __init__(self):
""" Find the library if not given and initialize
"""
#nerathe vilichitundenkil inim vilikenda ennu parayan
if self.__already_called:
print("Already called me once")
return None
# Status va | riable and error code
self.__status = (0, '')
# Windowsine avaganikkan paadilello !
if sys.platform == 'win32':
libfunc = C.windll
self.libcallback = C.WINFUNCTYPE
else:
libfunc = C.cdll
self.libcallback = C.CFUNCTYPE
# Hard | coding a shared library's path during dev time is
# preferred by ctypes manual than using ctypes.util.find_library()
getlib = self.find_path()
if getlib is None:
msg = "Cannot load library. Are you sure varnam was installed correctly ?"
raise VarnamLibraryLoadError(msg)
print("loadpath is {0}".format(getlib))
try:
self.lib = getattr(libfunc, getlib)
except Exception as msg:
print("Exception occured while loading library: %s"%str(msg))
self.__status = (1, msg)
if self.__status[0] == 0:
self.__status = (0, "Library loaded at %s"%str(self.lib))
for function in FUNCTION_LIST:
try:
self.bind_function(function)
except AttributeError as msg:
warn("Bind error %s "%function[0], VarnamFunctionNotFound)
def find_path(self):
full_path = None
for path in VARNAM_PATHS:
for name in VARNAM_NAMES:
full_path = os.path.join(path, name)
if os.path.isfile(full_path):
return full_path
return None
def bind_function(self, funcname):
""" Binds a function to the class from FUNCTION_LIST
"""
restype = None
name = funcname[0]
try:
args = funcname[1]
except IndexError:
args = None
try:
restype = funcname[2]
except IndexError:
restype = None
name = name.strip()
function_name = getattr(self.lib, name)
setattr(self, name, function_name)
if args is not None:
function_name.argtypes = args
if restype is not None:
function_name.restype = restype
def callback(self, *args):
return self.libcallback(*args)
def status(self):
""" Gets you the status of the library
"""
return self.__status
|
scylladb/scylla-cluster-tests | sdcm/results_analyze/base.py | Python | agpl-3.0 | 8,871 | 0.003269 | from test_lib.utils import get_data_by_path
class __DEFAULT__: # pylint: disable=invalid-name,too-few-public-methods
pass
class ClassBase:
"""
This class that is meant to be used as base for class that could be stored or loaded (in ES or any other backend)
"""
_es_data_mapping = {}
_data_type = None
def __init__(self, es_data=None, **kwargs):
if es_data:
self.load_from_es_data(es_data)
if kwargs:
self.load_kwargs(kwargs)
def load_kwargs(self, kwargs):
errors = []
for data_name, value in kwargs.items():
data_type = self.__annotations__.get(data_name, None) # pylint: disable=no-member
if data_type is None:
errors.append(f'Wrong {data_name} attribute was provided')
continue
if not isinstance(value, data_type):
errors.append(f'Wrong {data_name} attribute value was provided')
continue
setattr(self, data_name, value)
if errors:
raise ValueError(
f"Following errors occurred during class {self.__class__.__name__} initialization: \n" +
"\n".join(errors)
)
def load_from_es_data(self, es_data):
"""
Fills instance data with data from ES
"""
if not isinstance(es_data, dict):
raise ValueError(f"Class {self.__class__.__name__} can be loaded only from dict")
data_mapping = self._es_data_mapping
for data_name, data_type in self.__annotations__.items(): # pylint: disable=no-member
data_path = data_mapping.get(data_name, __DEFAULT__)
if data_path is __DEFAULT__:
value = es_data.get(data_name, __DEFAULT__)
elif data_path == '':
value = es_data
else:
value = get_data_by_path(es_data, data_path=data_path, default=__DEFAULT__)
if value is __DEFAULT__:
continue
self._apply_data(data_name, data_type, value)
def save_to_es_data(self):
"""
Represents contents of the instance as ES data according to _es_data_mappings
"""
output = {}
def data_cb(data_instance, current_instance, data_path, es_data_path, is_edge):
if is_edge:
if isinstance(data_instance, ClassBase):
value = data_instance.save_to_es_data()
else:
value = data_instance
output['.'.join(es_data_path)] = value
self._iterate_data(data_cb)
return output
def _apply_data(self, data_name, data_type, value):
setattr(self, data_name, data_type(value))
def is_valid(self):
for data_name in self.__annotations__.keys(): # pylint: disable=no-member
default = getattr(self.__class__, data_name)
value = getattr(self, data_name, None)
if value is default:
return False
elif isinstance(value, ClassBase):
if not value.is_valid():
return False
return True
@classmethod
def _get_all_es_data_mapping(cls, max_level=10) -> dict:
"""
Returns dictionary where keys are all possible class data paths and values are related ES data paths
"""
if max_level == 0:
return {}
output = {}
for data_name, data_type in cls.__annotations__.items(): # pylint: disable=no-member
data_path = cls._es_data_mapping.get(data_name, __DEFAULT__)
if data_path is __DEFAULT__:
data_path = data_name
# Set data
if isinstance(data_type, type) and issubclass(data_type, ClassBase):
if data_type.load_from_es_data is not cls.load_from_es_data:
# No mapping if custom loader defined
child_data_mapping = {}
else:
child_data_mapping = data_type._get_all_es_data_mapping( # pylint: disable=protected-access
max_level=max_level-1)
if not child_data_mapping:
output[data_name] = data_path
continue
for child_data_name, child_data_path in child_data_mapping.items():
if data_path:
output[f'{data_name}.{child_data_name}'] = f'{data_path}.{child_data_path}'
else:
output[f'{data_name}.{child_data_name}'] = child_data_path
else:
output[data_name] = data_path
return output
def _iterate_data(self, callback, data_path=None, es_data_path=None):
"""
Iterate all data in the instance by calling callback function
"""
if data_path is None:
data_path = []
if es_data_path is None:
es_data_path = []
instances = [(self, data_path, es_data_path)]
| while instances:
current_instance, data_path, es_data_path = instances.pop()
for data_name, data_instance in current_instance.__dict__.items():
if current_instance.__annotations__.get(data_name, None) is None: # pyl | int: disable=no-member
continue
es_data_name = current_instance._es_data_mapping.get( # pylint: disable=protected-access
data_name, None)
if es_data_name is None:
es_data_name = es_data_path + [data_name]
elif es_data_name == '':
es_data_name = es_data_path
else:
es_data_name = es_data_path + es_data_name.split('.')
if not isinstance(data_instance, ClassBase) \
or data_instance.__class__.load_from_es_data is not ClassBase.load_from_es_data:
callback(data_instance, current_instance, data_path + [data_name], es_data_name, True)
continue
if callback(data_instance, current_instance, data_path + [data_name], es_data_name, False):
instances.insert(0, (data_instance, data_path + [data_name], es_data_name))
def _get_es_data_path_and_values_from_patterns(self, data_patterns: list, flatten=False):
"""
Reads data patterns and builds dictionary of es data paths as keys and instance values as values
If flatten is True, it will produce one level dictionary,
otherwise each level of data path will be represented by one level in dictionary
"""
data_patterns_split = []
for data_pattern in data_patterns:
data_patterns_split.append(data_pattern.split('.'))
output = {}
def data_cb(data_instance, current_instance, data_path, es_data_path, is_edge): # pylint: disable=too-many-branches, too-many-locals
final_return = False
for data_pattern_split in data_patterns_split:
to_add = len(data_pattern_split) == len(data_path)
to_return = False
for num, data_pattern_part in enumerate(data_pattern_split):
if num >= len(data_path):
to_add = False
final_return = True
break
data_path_part = data_path[num]
if data_pattern_part == '*':
to_add = is_edge
to_return = True
break
if data_pattern_part != data_path_part:
to_add = False
break
if to_add:
if isinstance(data_instance, ClassBase):
result = data_instance.save_to_es_data()
else:
result = data_instance
current_output = output
if flatten:
current_output['.'.join(es_data_path)] = result
else:
|
NetX-lab/RepNet | exp_code/0_setiplist.py | Python | apache-2.0 | 1,777 | 0.013506 | import os
serverA = open('serverlistA.list', 'r')
serverB = open('serverlistB.list', 'r')
numA = int(serverA.readline())
numB = i | nt( | serverB.readline())
iplistA = open('iplistA', 'w')
iplistB = open('iplistB', 'w')
sshconfig = open('/Users/iqua/.ssh/config', 'w')
csshconfig = open('/etc/clusters', 'w')
csshconfig.write("rackA ")
for i in range(numA):
port = int(serverA.readline())
content = "host A" + str(port) + "\n Hostname sing.cse.ust.hk\n User shuhao\n Port " + str(port) + "\n"
sshconfig.write(content)
hostname = "A" + str(port) + " "
csshconfig.write(hostname)
ipaddr = "192.168.6." + str(port - 30042) + "\n"
iplistA.write(ipaddr)
iplistA.close()
csshconfig.write("\nrackB ")
for i in range(numB):
port = int(serverB.readline())
content = "host B" + str(port) + "\n Hostname sing.cse.ust.hk\n User shuhao\n Port " + str(port) + "\n"
sshconfig.write(content)
hostname = "B" + str(port) + " "
csshconfig.write(hostname)
if (port == 30055):
port = 30050
ipaddr = "192.168.7." + str(port - 30048) + "\n"
iplistB.write(ipaddr)
iplistB.close()
sshconfig.close()
csshconfig.close()
serverA.close()
serverB.close()
csshconfig = open('/etc/clusters', 'r')
serverA = csshconfig.readline().split()
serverB = csshconfig.readline().split()
csshconfig.close()
for i in range(1, numA+1):
os.system("echo '" + str(i) + "' > iplist && cat iplistB >> iplist")
os.system("scp ./iplist " + serverA[i] + ":~/repnet/exp_code/iplist")
print "Done copying iplist to", serverA[i]
for j in range(1, numB+1):
os.system("echo '" + str(i+j) + "' > iplist && cat iplistA >> iplist")
cmd = "scp ./iplist " + serverB[j] + ":~/repnet/exp_code/iplist"
os.system(cmd)
print "Done copying iplist to", serverB[j]
os.system("rm iplist*")
|
AIFDR/inasafe | safe/messaging/item/exceptions.py | Python | gpl-3.0 | 712 | 0 | """
InaSAFE Disaster risk assessment tool developed by AusAid - **Exceptions**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Softwar | e Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'marco@opengis.ch'
__revision__ = '$Format:%H$'
__date__ = '11/06/2013'
__copyright__ = ('Copyright 2012, Australia | Indonesia Facility for '
'Disaster Reduction')
class InvalidMessageItemError(Exception):
"""Custom exception for when the passed MessageElement is invalid."""
pass
|
tswicegood/steinie | examples/simple.py | Python | apache-2.0 | 269 | 0.007435 | import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from steinie import Stein | ie
app = Steinie()
@app.get("/")
def | handle(request, response):
return "Hello, World! This is Steinie.\n"
if __name__ == "__main__":
app.run()
|
isharacomix/tvtgj | core/log.py | Python | gpl-3.0 | 311 | 0.016077 |
import time
logfile = None
is_logging = False
def log(s):
global logfile
global is_logging
if is_logging:
if not logfile:
logfile = open("log_%d | "%time.time(),"w")
logfile.w | rite(s+"\n")
def toggle(flag):
global is_logging
is_logging = True if flag else False
|
devops2014/djangosite | django/contrib/gis/tests/geoapp/tests.py | Python | bsd-3-clause | 41,661 | 0.002664 | from __future__ import unicode_literals
import re
from tempfile import NamedTemporaryFile
from django.contrib.gis import gdal
from django.contrib.gis.geos import HAS_GEOS
from django.contrib.gis.tests.utils import (
no_oracle, oracle, postgis, spatialite,
)
from django.core.management import call_command
from django.db import connection
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
if HAS_GEOS:
from django.contrib.gis.db.models import Extent, MakeLine, Union
from django.contrib.gis.geos import (fromstr, GEOSGeometry,
Point, LineString, LinearRing, Polygon, GeometryCollection)
from .models import Country, City, PennsylvaniaCity, State, Track, NonConcreteModel, Feature, MinusOneSRID
def postgis_bug_version():
spatial_version = getattr(connection.ops, "spatial_version", (0, 0, 0))
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)
@skipUnlessDBFeature("gis_enabled")
class GeoModelTest(TestCase):
fixtures = ['initial']
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
# Testing on a Point
pnt = Point(0, 0)
nullcity = City(name='NullCity', point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
try:
nullcity.point = bad
except TypeError:
pass
else:
self.fail('Should throw a TypeError')
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name='NullCity').point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.save()
self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.delete()
# Testing on a Polygon
shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name='NullState', poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name='NullState')
self.assertEqual(ply, ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
if gdal.HAS_GDAL:
self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry)
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertIsInstance(ns.poly.srs, gdal.SpatialReference)
self.assertEqual('WGS 84', ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(ply, State.objects.get(name='NullState').poly)
ns.delete()
@skipUnlessDBFeature("supports_transform")
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = 'POINT (-98.493183 29.424170)'
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# Oracle doesn't have SRID 3084, using 41157.
if oracle:
# San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157)
# Used the following Oracle SQL to get this value:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(
# SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT | (-98.493183 29.424170)', 4326), 41157))
# )
# FROM DUAL;
nad_wkt = 'POINT (300662.034646583 5416427.45974934)'
nad_srid = 41157
else:
# San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084)
# Used ogr.py in gdal 1.4.1 for | this transform
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)'
nad_srid = 3084
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
nad_pnt = fromstr(nad_wkt, srid=nad_srid)
if oracle:
tx = Country.objects.get(mpoly__contains=nad_pnt)
else:
tx = Country.objects.get(mpoly__intersects=nad_pnt)
self.assertEqual('Texas', tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name='San Antonio', point=nad_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name='San Antonio')
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
if spatialite and connection.ops.spatial_version < (3, 0, 0):
# SpatiaLite < 3 does not support missing SRID values.
return
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertEqual(c.point, None)
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name='Point', geom=Point(1, 1)).save()
Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save()
Feature(name='GeometryCollection',
geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save()
f_1 = Feature.objects.get(name='Point')
self.assertIsInstance(f_1.geom, Point)
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name='LineString')
self.assertIsInstance(f_2.geom, LineString)
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name='Polygon')
self.assertIsInstance(f_3.geom, Polygon)
f_4 = Feature.objects.get(name='GeometryCollection')
self.assertIsInstance(f_4.geom, GeometryCollection)
self.assertEqual(f_3.geom, f_4.geom[2])
@skipUnlessDBFeature("supports_transform")
def test_inherited_geofields(self):
"Test GeoQuerySet methods on inherited Geometry fields."
# Creating a Pennsylvanian city.
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.transfor |
davidfergusonaz/davidtest | config57.py | Python | mit | 146 | 0.006849 | provider "aws" {
access_key = "AKIAIYBPEWJ6JU5UNKZQ"
secret | _key = "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"
region = "${va | r.region}"
}
|
noemis-fr/old-custom | e3z_compute_parent_left_right/__init__.py | Python | agpl-3.0 | 997 | 0 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-2013 Elanz (<http://www.openelanz.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, | either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the | GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
|
tfroehlich82/koalixcrm | crm_core/__init__.py | Python | bsd-3-clause | 64 | 0.015625 | """
This mod | ule provides the Koalix CRM core functionality
""" | |
Konubinix/lazygal | lazygaltest/test_conf.py | Python | gpl-2.0 | 4,698 | 0.000426 | # Lazygal, a lazy static web gallery generator.
# Copyright (C) 2011-2012 Alexandre Rossi <alexandre.rossi@gmail.com>
#
# This program is free software | ; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as publishe | d by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import unittest
import os
import ConfigParser
from __init__ import LazygalTestGen
import lazygal.config
from lazygal.generators import WebalbumDir
from lazygal.sourcetree import Directory
class TestConf(LazygalTestGen):
def test_perdir_conf(self):
"""
Lazygal shall read configuration files in every source directory,
the parent directory configuration shall apply to child directories.
"""
os.makedirs(os.path.join(self.source_dir, 'gal', 'subgal'))
# src_dir/.lazygal
config = ConfigParser.RawConfigParser()
config.add_section('template-vars')
config.set('template-vars', 'foo', 'root')
config.set('template-vars', 'root', 'root')
with open(os.path.join(self.source_dir, '.lazygal'), 'a') as f:
config.write(f)
# src_dir/gal/.lazygal
config = ConfigParser.RawConfigParser()
config.add_section('template-vars')
config.set('template-vars', 'foo', 'gal')
config.set('template-vars', 'gal', 'gal')
with open(os.path.join(self.source_dir, 'gal', '.lazygal'), 'a') as f:
config.write(f)
# src_dir/gal/subgal/.lazygal
config = ConfigParser.RawConfigParser()
config.add_section('template-vars')
config.set('template-vars', 'foo', 'subgal')
config.set('template-vars', 'subgal', 'subgal')
with open(os.path.join(self.source_dir, 'gal', 'subgal', '.lazygal'), 'a') as f:
config.write(f)
config = lazygal.config.LazygalConfig()
config.set('global', 'puburl', 'http://example.com/album/')
self.setup_album(config)
source_gal = self.setup_subgal('gal', ['gal_img.jpg'])
source_subgal = self.setup_subgal(os.path.join('gal', 'subgal'),
['subgal_img.jpg'])
source_root = Directory(self.source_dir, [source_gal], [], self.album)
dest_path = self.get_working_path()
dest_subgal = WebalbumDir(source_subgal, [], self.album, dest_path)
self.assertEqual(dest_subgal.config.get('global', 'puburl'),
'http://example.com/album/')
self.assertEqual(dest_subgal.config.get('template-vars', 'root'),
'root')
self.assertEqual(dest_subgal.config.get('template-vars', 'gal'),
'gal')
self.assertEqual(dest_subgal.config.get('template-vars', 'subgal'),
'subgal')
self.assertEqual(dest_subgal.config.get('template-vars', 'foo'),
'subgal')
dest_gal = WebalbumDir(source_gal, [dest_subgal], self.album, dest_path)
self.assertEqual(dest_gal.config.get('global', 'puburl'),
'http://example.com/album/')
self.assertEqual(dest_gal.config.get('template-vars', 'root'), 'root')
self.assertEqual(dest_gal.config.get('template-vars', 'gal'), 'gal')
self.assertRaises(ConfigParser.NoOptionError,
dest_gal.config.get, 'template-vars', 'subgal')
self.assertEqual(dest_gal.config.get('template-vars', 'foo'), 'gal')
dest_root = WebalbumDir(source_root, [dest_gal], self.album, dest_path)
self.assertEqual(dest_root.config.get('global', 'puburl'),
'http://example.com/album/')
self.assertEqual(dest_root.config.get('template-vars', 'root'), 'root')
self.assertRaises(ConfigParser.NoOptionError,
dest_root.config.get, 'template-vars', 'gal')
self.assertRaises(ConfigParser.NoOptionError,
dest_root.config.get, 'template-vars', 'subgal')
self.assertEqual(dest_root.config.get('template-vars', 'foo'), 'root')
if __name__ == '__main__':
unittest.main()
# vim: ts=4 sw=4 expandtab
|
chrys87/fenrir | src/fenrirscreenreader/commands/onSwitchApplicationProfile/inactive/bash.py | Python | lgpl-3.0 | 853 | 0.01524 | #!/bin/python
# -*- coding: utf-8 -*-
# Fenrir TTY screen reader
# By Chrys, Storm Dragon, and contributers.
from fenrirscreenreader.core import debug
class command():
def __init__(self):
pass
def initialize(self, environment):
self.en | v = environment
def shutdown(self):
pass
def getDescription(self):
return 'No description found'
def load(self):
print('--------------')
print( | 'bash')
print('load new',self.env['screen']['newApplication'])
print('--------------')
def unload(self):
print('--------------')
print('bash')
print('unload old',self.env['screen']['oldApplication'])
print('--------------')
def setCallback(self, callback):
pass
|
codepotpl/codepot-backend | codepot/views/purchases/exceptions.py | Python | mit | 1,512 | 0.005952 | from codepot.exceptions import CodepotException
class PurchaseException(CodepotException):
pass
class UserPurchaseNotFoundException(PurchaseException):
def __init__(self, user_id):
super().__init__('No purchase found for user with ID: {}.'.format(user_id), 300)
class PromoCodeForPurchaseNotFoundException(PurchaseException):
def __init__(seld, code):
super().__init__('Given promo code: {}, does not exist.'.format(code), 301)
class PromoCodeForPurchaseNotActiveException(PurchaseException):
def __init__(seld, code):
super().__init__('Given promo code: {} is not active.'.format(code), 302)
class PromoCodeForPurchaseHasExceededUsageLimit(PurchaseException):
def __init__(seld, code):
super().__init__('Given promo code: {} has exceeded usage limit.'.format(code), 303)
class UserAlreadyHasPurchaseException(PurchaseException):
def __init__(seld, user_id, pu | rchase_id):
super().__init__('User: {} already has purchase: {}.'.format(user_id, purchase_id), 304)
class ProductNotFoundException(PurchaseException):
def __init__(self, product_id):
super().__init__('Product for ID: {}, not found.'.format(product_id), 305)
class ProductInactiveException(PurchaseException):
def __init__(self, product_id):
super | ().__init__('Product for ID: {} is not active.'.format(product_id), 306)
class InvalidPaymentInfoException(PurchaseException):
def __init__(self, message):
super().__init__(message, 307)
|
shanzi/tchelper | api/migrations/0003_auto_20150326_1435.py | Python | bsd-2-clause | 560 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migr | ation):
dependencies = [
('api', '0002_auto_20150326_1433'),
]
operations = [
migrations.RemoveField(
model_name='problem',
name='id',
),
migrations.AlterField(
model_name='problem',
name='problemId',
field=models.IntegerField(serialize=False, primary_key=True),
preserve_default= | True,
),
]
|
joke2k/faker | faker/providers/lorem/de_DE/__init__.py | Python | mit | 9,092 | 0 | from .. import Provider as LoremProvider
class Provider(LoremProvider):
"""Implement lorem provider for ``de_DE`` locale.
Word list is based on the source below, and some words have been removed
because of some duplications.
Sources:
- https://www.gut1.de/grundwortschatz/grundwortschatz-500/
"""
word_list = (
"ab",
"Abend",
"aber",
"acht",
"Affe",
"alle",
"allein",
"als",
"also",
"alt",
"am",
"an",
"andere",
"anfangen",
"Angst",
"antworten",
"Apfel",
"Arbeit",
"arbeiten",
"Arzt",
"auch",
"auf",
"Auge",
"aus",
"Auto",
"baden",
"bald",
"Ball",
"bauen",
"Bauer",
"Baum",
"bei",
"beide",
"beim",
"Bein",
"Beispiel",
"beißen",
"bekommen",
"Berg",
"besser",
"Bett",
"Bild",
"bin",
"bis",
"blau",
"bleiben",
"Blume",
"Boden",
"brauchen",
"braun",
"Brief",
"bringen",
"Brot",
"Bruder",
"Buch",
"böse",
"da",
"dabei",
"dafür",
"damit",
"danach",
"dann",
"daran",
"darauf",
"darin",
"das",
"dauern",
"davon",
"dazu",
"dein",
"dem",
"den",
"denken",
"denn",
"der",
"deshalb",
"dich",
"dick",
"die",
"Ding",
"dir",
"doch",
"Dorf",
"dort",
"draußen",
"drehen",
"drei",
"dumm",
"dunkel",
"durch",
"dürfen",
"eigentlich",
"ein",
"einfach",
"einige",
"einigen",
"einmal",
"Eis",
"Eltern",
"Ende",
"endlich",
"er",
"Erde",
"erklären",
"erschrecken",
"erst",
"erzählen",
"es",
"essen",
"Essen",
"etwas",
"fahren",
"Fahrrad",
"fallen",
"Familie",
"fangen",
"fast",
"fehlen",
"Fenster",
"Ferien",
"fertig",
"fest",
"Feuer",
"fiel",
"finden",
"Finger",
"Fisch",
"Flasche",
"fliegen",
"Frage",
"fragen",
"Frau",
"frei",
"fressen",
"Freude",
"freuen",
"Freund",
"fröhlich",
" | früh",
"früher",
"Fuß",
"Fußball",
"fährt",
"führen",
"fünf",
"für",
"gab",
"ganz",
"g | ar",
"Garten",
"geben",
"Geburtstag",
"gefährlich",
"gegen",
"gehen",
"gehören",
"gelb",
"Geld",
"genau",
"gerade",
"gern",
"Geschenk",
"Geschichte",
"Gesicht",
"gestern",
"gesund",
"gewinnen",
"gibt",
"ging",
"Glas",
"glauben",
"gleich",
"Glück",
"glücklich",
"Gott",
"groß",
"grün",
"gut",
"Haare",
"haben",
"halbe",
"halten",
"Hand",
"hart",
"Hase",
"hat",
"Haus",
"heiß",
"heißen",
"helfen",
"her",
"heraus",
"Herr",
"Herz",
"heute",
"hier",
"Hilfe",
"Himmel",
"hin",
"hinein",
"hinter",
"hoch",
"holen",
"Hund",
"Hunger",
"hängen",
"hören",
"ich",
"ihm",
"ihn",
"ihr",
"im",
"immer",
"in",
"ins",
"ist",
"ja",
"Jahr",
"jeder",
"jetzt",
"jung",
"Junge",
"kalt",
"kam",
"kann",
"Katze",
"kaufen",
"kein",
"kennen",
"Kind",
"Klasse",
"klein",
"klettern",
"kochen",
"kommen",
"Kopf",
"krank",
"kurz",
"können",
"Küche",
"lachen",
"Land",
"lange",
"langsam",
"las",
"lassen",
"laufen",
"laut",
"leben",
"Leben",
"legen",
"Lehrer",
"Lehrerin",
"leicht",
"leise",
"lernen",
"lesen",
"letzte",
"Leute",
"Licht",
"lieb",
"liegen",
"ließ",
"Loch",
"los",
"Luft",
"lustig",
"machen",
"mal",
"Mama",
"man",
"Mann",
"Maus",
"Meer",
"mehr",
"mein",
"Mensch",
"merken",
"mich",
"Milch",
"Minute",
"Minutenmir",
"mit",
"Monat",
"Monate",
"Musik",
"Mutter",
"Mädchen",
"mögen",
"möglich",
"müde",
"müssen",
"nach",
"Nacht",
"nah",
"Name",
"Nase",
"nass",
"natürlich",
"neben",
"nehmen",
"nein",
"nennen",
"neu",
"neun",
"nicht",
"nichts",
"nie",
"nimmt",
"noch",
"nun",
"nur",
"nächste",
"nämlich",
"ob",
"oben",
"oder",
"offen",
"oft",
"ohne",
"Oma",
"Onkel",
"Opa",
"packen",
"Papa",
"Pferd",
"Platz",
"plötzlich",
"Polizei",
"Rad",
"rechnen",
"reich",
"reiten",
"rennen",
"richtig",
"rot",
"rufen",
"ruhig",
"rund",
"Sache",
"sagen",
"schaffen",
"schauen",
"scheinen",
"schenken",
"schicken",
"Schiff",
"schlafen",
"schlagen",
"schlecht",
"schlimm",
"Schluss",
"Schnee",
"schnell",
"schon",
"schreiben",
"schreien",
"Schuh",
"Schule",
"schwarz",
"schwer",
"Schwester",
"schwimmen",
"schön",
"Schüler",
"sechs",
"See",
"sehen",
"sehr",
"sein",
"seit",
"Seite",
"selbst",
"setzen",
"sich",
"sicher",
"sie",
"sieben",
"sieht",
"sind",
"singen",
"sitzen",
"so",
"sofort",
"Sohn",
"sollen",
"Sommer",
"Sonne",
"Sonntag",
"sonst",
"Spaß",
"Spiel",
"spielen",
"sprechen",
"springen",
"spät",
"später",
"Stadt",
"stark",
"stehen",
"steigen",
"Stein",
"Stelle",
"stellen",
"Straße",
"Stunde",
"Stück",
"suchen",
"Tag",
"Tante",
"Teller",
"tief",
"Tier",
"Tisch",
"tot",
"tragen",
"traurig",
"treffen",
"trinken",
"tun",
"turnen",
"Tür",
"Uhr",
"um",
"und",
"uns",
"unser",
"unten",
"unter",
"Vater",
"vergessen",
"verkaufen",
"verlieren",
"verstecken",
"verstehen",
"versuchen",
"viel",
"vielleicht",
"vier",
"Vogel",
"voll",
"vom",
"von",
"vor",
"vorbei",
"Wagen",
"wahr",
"Wald",
"war",
"warm",
"warten",
"warum",
"was",
"waschen",
"Wasser",
"weg",
"Weg",
|
MingdaMingda/WE060001-NMJKL | TT_io_weixin_auth.py | Python | unlicense | 1,327 | 0.008289 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
import io_weixin_auth
def test_get_user_token_by_code():
#code = '0010bc8ec77f6972b66ad5cb143ceb4i'
code = '041709e7b1cff6ece362b81e8e071a1j'
token_info = io_weixin_auth.get_user_token_by_code(code)
return token_info
def test_get_user_info_by_token(openid, token):
#openid = 'occ4ovxUED8OWFpV0q5XM98S48wE'
#token = 'OezXcEiiBSKSxW0eoylIeI6rrU2VFUNTsnm9tD9ZhVfu2jw0MMJ_Osu48gmTq5RUuk2-qh_4W2wZ_EbAkQplGHFihR_p5kmIinEqxK5s-VAwrxi09xB02QuiZLVdMArWhwQ6WLI_wBNOS8ucIZt7uw'
return io_weixin_auth.get_user_info_by_token(openid, token)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.info('Begin')
token_info = test_get_user_token_by_code()
if (not token_info is None) and ('openid' in token_info):
user_info = test_get_user_info_by_tok | en(token_info['openid'], token_info['access_token'])
if (not user_info is Non | e) and ('nickname' in user_info):
logging.info('openid: %s' % user_info['openid'])
logging.info('nickname: %s' % user_info['nickname'])
logging.info('headimgurl: %s' % user_info['headimgurl'])
else:
logging.error('bad token')
logging.info('Done')
#/* vim: set ai expandtab ts=4 sw=4 sts=4 tw=100: */
|
coco-project/contract | src/coco/contract/errors.py | Python | bsd-3-clause | 3,806 | 0.001314 | class Error(Exception):
"""
Base error for all coco exceptions.
Only when throwing an instance of this error one can be sure it
will get caught by the application. All other exceptions might
be unhandled leading to crashes.
"""
pass
class BackendError(Error):
"""
Base error for all backend exceptions.
Backend errors are meant to be raised instead of letting the backend's
real exception/err | or pass up the stack. Every error thrown from the backend
should be wrapped.
"""
pass
class Co | nnectionError(BackendError):
"""
Generic backend error meant to be thrown then a connection to the backend cannot be established.
"""
pass
class NotFoundError(BackendError):
"""
Generic (backend record) not found error.
Error meant to be raised when an operation can not be performed
because the resource on which the method should act does not exist.
"""
pass
class ContainerBackendError(BackendError):
"""
Backend error type for container backends.
"""
pass
class ContainerNotFoundError(NotFoundError, ContainerBackendError):
"""
Error meant to be raised when a container does not exist.
A reason for such a failure could be that the container on which a method should act, does not exist.
"""
pass
class IllegalContainerStateError(ContainerBackendError):
"""
Error for problems due to a container's state.
Error meant to be raised when an operation can not be performed
because the container on which the method should act is in an
illegal state (e.g. exec method and the container is stopped).
"""
pass
class ContainerImageNotFoundError(NotFoundError, ContainerBackendError):
"""
Error meant to be raised when an image (container template) does not exist.
"""
pass
class ContainerSnapshotNotFoundError(ContainerBackendError):
"""
Error for non-existing container snapshots.
Meant to be raised when an operation can not be performed
because the snapshot on which the method should act does not exist.
"""
pass
class GroupBackendError(BackendError):
"""
Backend error type for user backends.
"""
pass
class GroupNotFoundError(NotFoundError, GroupBackendError):
"""
Error meant to be raised when a group does not exist.
"""
pass
class StorageBackendError(BackendError):
"""
Backend error type for storage backends.
"""
pass
class DirectoryNotFoundError(NotFoundError, StorageBackendError):
"""
Error to be raised when the directory on which an operation should be performed does not exist.
"""
pass
class UserBackendError(BackendError):
"""
Backend error type for user backends.
"""
pass
class AuthenticationError(UserBackendError):
"""
Error meant to be raised when there is a problem while authenticating.
"""
pass
class ReadOnlyError(UserBackendError):
"""
Error indicating that a user cannot be updated because the backend is read-only.
"""
pass
class UserNotFoundError(NotFoundError, UserBackendError):
"""
Error meant to be raised when a user does not exist.
"""
pass
class ServiceError(Exception):
"""
Base exception class for errors raised by service implementations.
"""
pass
class EncryptionServiceError(ServiceError):
"""
Service error type for encryption services.
"""
pass
class IntegrityServiceError(ServiceError):
"""
Service error type for integrity services.
"""
pass
class IntegrityValidationError(IntegrityServiceError):
"""
Error to be raised when an integrity cannot be verified or the integrity check fails.
"""
pass
|
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractWwwFruitydeerCom.py | Python | bsd-3-clause | 548 | 0.034672 |
def extra | ctWwwFruitydeerCom(item):
'''
Parser for 'www.fruitydeer.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type | =tl_type)
return False
|
andris210296/andris-projeto | backend/venv/test/lib/python2.7/site-packages/gaeforms/__init__.py | Python | mit | 20 | 0.05 | __version__ | = '0.14' | |
alex/djangobench | djangobench/benchmarks/query_values/benchmark.py | Python | bsd-3-clause | 254 | 0.015748 | from djangobench.utils import run_benchmark
from query_values.mode | ls import Book
def benchmark():
list(Book.objects.values('title'))
run_benchmark(
benchmark,
| meta = {
'description': 'A simple Model.objects.values() call.',
}
)
|
jmgc/swift | utils/gyb_syntax_support/DeclNodes.py | Python | apache-2.0 | 37,155 | 0.000081 | # flake8: noqa I201
from .Child import Child
from .Node import Node
DECL_NODES = [
# type-assignment -> '=' type
Node('TypeInitializerClause', kind='Syntax',
children=[
Child('Equal', kind='EqualToken'),
Child('Value', kind='Type'),
]),
# typealias-declaration -> attributes? access-level-modifier? 'typealias'
# typealias-name generic-parameter-clause?
# type-assignment
# typealias-name -> identifier
Node('TypealiasDecl', kind='Decl', traits=['IdentifiedDecl'],
children=[
Child('Attributes', kind='AttributeList',
collection_element_name='Attribute', is_optional=True),
Child('Modifiers', kind='ModifierList',
collection_element_name='Modifier', is_optional=True),
Child('TypealiasKeyword', kind='TypealiasToken'),
Child('Identifier', kind='IdentifierToken'),
Child('GenericParameterClause', kind='GenericParameterClause',
is_optional=True),
Child('Initializer', kind='TypeInitializerClause',
is_optional=True),
Child('GenericWhereClause', kind='GenericWhereClause',
is_optional=True),
]),
# associatedtype-declaration -> attributes? access-level-modifier?
# 'associatedtype' associatedtype-name
# inheritance-clause? type-assignment?
# generic-where-clause?
# associatedtype-name -> identifier
Node('AssociatedtypeDecl', kind='Decl', traits=['IdentifiedDecl'],
children=[
Child('Attributes', kind='AttributeList',
collection_element_name='Attribute', is_optional=True),
Child('Modifiers', kind='ModifierList',
collection_element_name='Modifier', is_optional=True),
Child('AssociatedtypeKeyword', kind='AssociatedtypeToken'),
Child('Identifier', kind='IdentifierToken'),
Child('InheritanceClause', kind='TypeInheritanceClause',
is_optional=True),
Child('Initializer', kind='TypeInitializerClause',
is_optional=True),
Child('GenericWhereClause', kind='GenericWhereClause',
is_optional=True),
]),
Node('FunctionParameterList', kind='SyntaxCollection',
element='FunctionParameter'),
Node('ParameterClause', kind='Syntax',
traits=['Parenthesized'],
children=[
Child('LeftParen', kind='LeftParenToken'),
Child('ParameterList', kind='FunctionParameterList',
collection_element_name='Parameter'),
Child('RightParen', kind='RightParenToken'),
]),
# -> Type
Node('ReturnClause', kind='Syntax',
children=[
Child('Arrow', kind='ArrowToken'),
Child('ReturnType', kind='Type'),
]),
# function-signature ->
# '(' parameter-list? ')' async? (throws | rethrows)? '->'? type?
Node('FunctionSignature', kind='Syntax',
children=[
Child('Input', kind='ParameterClause'),
Child('AsyncKeyword', kind='IdentifierToken',
classification='Keyword',
text_choices=['async'], is_optional=True),
Child('ThrowsOrRethrowsKeyword', kind='Token',
is_optional=True,
token_choices=[
'ThrowsToken',
'RethrowsToken',
]),
Child('Output', kind='ReturnClause', is_optional=True),
]),
# if-config-clause ->
# ('#if' | '#elseif' | '#else') expr? (stmt-list | switch-case-list)
Node('IfConfigClause', kind='Syntax',
children=[
Child('PoundKeyword', kind='Token',
classification='BuildConfigId',
token_choices=[
'PoundIfToken',
'PoundElseifToken',
'PoundElseToken',
]),
Child('Condition', kind='Expr', classification='BuildConfigId',
is_optional=True),
Child('Elements', kind='Syntax',
node_choices=[
Child('Statements', kind='CodeBlockItemList'),
Child('SwitchCases', kind='SwitchCaseList'),
Child('Decls', kind='MemberDeclList'),
]),
]),
Node('IfConfigClauseList', kind='SyntaxCollection',
element='IfConfigClause'),
# if-config-decl -> '#if' expr stmt-list else-if-directive-clause-list
# else-clause? '#endif'
Node('IfConfigDecl', kind='Decl',
children=[
Child('Clauses', kind='IfConfigClauseList',
collection_element_name='Clause'),
Child('PoundEndif', kind='PoundEndifToken',
classification='BuildConfigId'),
]),
Node('PoundErrorDecl', kind='Decl',
traits=['Parenthesized'],
children=[
Child('PoundError', kind='PoundErrorToken'),
Child('LeftParen', kind='LeftParenToken'),
Child('Message', kind='StringLiteralExpr'),
Child('RightParen', kind='RightPar | enToken')
]),
Node('PoundWarningDecl', kind='Decl',
traits=['Par | enthesized'],
children=[
Child('PoundWarning', kind='PoundWarningToken'),
Child('LeftParen', kind='LeftParenToken'),
Child('Message', kind='StringLiteralExpr'),
Child('RightParen', kind='RightParenToken')
]),
Node('PoundSourceLocation', kind='Decl',
traits=['Parenthesized'],
children=[
Child('PoundSourceLocation', kind='PoundSourceLocationToken'),
Child('LeftParen', kind='LeftParenToken'),
Child('Args', kind='PoundSourceLocationArgs', is_optional=True),
Child('RightParen', kind='RightParenToken')
]),
Node('PoundSourceLocationArgs', kind='Syntax',
children=[
Child('FileArgLabel', kind='IdentifierToken',
text_choices=['file']),
Child('FileArgColon', kind='ColonToken'),
Child('FileName', kind='StringLiteralToken'),
Child('Comma', kind='CommaToken'),
Child('LineArgLabel', kind='IdentifierToken',
text_choices=['line']),
Child('LineArgColon', kind='ColonToken'),
Child('LineNumber', kind='IntegerLiteralToken'),
]),
Node('DeclModifier', kind='Syntax',
children=[
Child('Name', kind='Token', classification='Attribute',
text_choices=[
'class', 'convenience', 'dynamic', 'final', 'infix',
'lazy', 'optional', 'override', 'postfix', 'prefix',
'required', 'static', 'unowned', 'weak', 'private',
'fileprivate', 'internal', 'public', 'open',
'mutating', 'nonmutating', 'indirect', '__consuming',
'actor', 'async'
]),
Child('DetailLeftParen', kind='LeftParenToken', is_optional=True),
Child('Detail', kind='IdentifierToken', is_optional=True),
Child('DetailRightParen', kind='RightParenToken', is_optional=True),
]),
Node('InheritedType', kind='Syntax',
traits=['WithTrailingComma'],
children=[
Child('TypeName', kind='Type'),
Child('TrailingComma', kind='CommaToken', is_optional=True),
]),
Node('InheritedTypeList', kind='SyntaxCollection',
element='InheritedType'),
# type-inheritance-clause -> ':' type
Node('TypeInheritanceClause', kind='Syntax',
children=[
Child('Colon', kind='ColonToken'),
Child('InheritedTypeCollection', kind='InheritedTypeLis |
martinschaef/grovepi | grovepi.py | Python | mit | 13,742 | 0.016737 | # grovepi.py
# v1.2.2
# This file provides the basic functions for using the GrovePi
#
# Karan Nayan
# Initial Date: 13 Feb 2014
# Last Updated: 22 Jan 2015
# http://www.dexterindustries.com/
#
# These files have been made available online through
# a Creative Commons Attribution-ShareAlike 3.0 license.
# (http://creativecommons.org/licenses/by-sa/3.0/)
###############################################################################
import smbus
import time
import math
import RPi.GPIO as GPIO
import struct
rev = GPIO.RPI_REVISION
if rev == 2 or rev == 3:
bus = smbus.SMBus(1)
else:
bus = smbus.SMBus(0)
# I2C Address of Arduino
address = 0x04
# Command Format
# digitalRead() command format header
dRead_cmd = [1]
# digitalWrite() command format header
dWrite_cmd = [2]
# analogRead() command format header
aRead_cmd = [3]
# analogWrite() command format header
aWrite_cmd = [4]
# pinMode() command format header
pMode_cmd = [5]
# Ultrasonic read
uRead_cmd = [7]
# Get firmware version
version_cmd = [8]
# Accelerometer (+/- 1.5g) read
acc_xyz_cmd = [20]
# RTC get time
rtc_getTime_cmd = [30]
# DHT Pro sensor temperature
dht_temp_cmd = [40]
# Grove LED Bar commands
# Initialise
ledBarInit_cmd = [50]
# Set orientation
ledBarOrient_cmd = [51]
# Set level
ledBarLevel_cmd = [52]
# Set single LED
ledBarSetOne_cmd = [53]
# Toggle single LED
ledBarToggleOne_cmd = [54]
# Set all LEDs
ledBarSet_cmd = [55]
# Get current state
ledBarGet_cmd = [56]
# Grove 4 Digit Display commands
# Initialise
fourDigitInit_cmd = [70]
# Set brightness, not visible until next cmd
fourDigitBrightness_cmd = [71]
# Set numeric value without leading zeros
fourDigitValue_cmd = [72]
# Set numeric value with leading zeros
fourDigitValueZeros_cmd = [73]
# Set individual digit
fourDigitIndividualDigit_cmd = [74]
# Set individual leds of a segment
fourDigitIndividualLeds_cmd = [75]
# Set left and right values with colon
fourDigitScore_cmd = [76]
# Analog read for n seconds
fourDigitAnalogRead_cmd = [77]
# Entire display on
fourDigitAllOn_cmd = [78]
# Entire display off
fourDigitAllOff_cmd = [79]
# Grove Chainable RGB LED commands
# Store color for later use
storeColor_cmd = [90]
# Initialise
chainableRgbLedInit_cmd = [91]
# Initialise and test with a simple color
chainableRgbLedTest_cmd = [92]
# Set one or more leds to the stored color by pattern
chainableRgbLedSetPattern_cmd = [93]
# set one or more leds to the stored color by modulo
chainableRgbLedSetModulo_cmd = [94]
# sets leds similar to a bar graph, reversible
chainableRgbLedSetLevel_cmd = [95]
# This allows us to be more specific about which commands contain unused bytes
unused = 0
# Function declarations of the various functions used for encoding and sending
# data from RPi to Arduino
# Write I2C block
def write_i2c_block(address, block):
try:
return bus.write_i2c_block_data(address, 1, block)
except IOError:
print "IOError"
return -1
# Read I2C byte
def read_i2c_byte(address):
try:
return bus.read_byte(address)
except IOError:
print "IOError"
return -1
# Read I2C block
def read_i2c_block(address):
try:
return bus.read_i2c_block_data(address, 1)
except IOError:
print "IOError"
return -1
# Arduino Digital Read
def digitalRead(pin):
write_i2c_block(address, dRead_cmd + [pin, unused, unused])
time.sleep(.1)
n = read_i2c_byte(address)
return n
# Arduino Digital Write
def digitalWrite(pin, value):
write_i2c_block(address, dWrite_cmd + [pin, value, unused])
return 1
# Setting Up Pin mode on Arduino
def pinMode(pin, mode):
if mode == "OUTPUT":
write_i2c_block(address, pMode_cmd + [pin, 1, unused])
elif mode == "INPUT":
write_i2c_block(address, pMode_cmd + [pin, 0, unused])
return 1
# Read analog value from Pin |
def analogRead(pin):
bus.write_i2c_block_data(address, 1, aRead_cmd + [pin, unused, unused])
#time.sleep(.001)
bus.read_byte(address)
number = bus.read_i2c_block_data(address, 1)
return number[1] * 256 + number[2]
# Write PWM
def analogWrite(pin, value):
write_i2c_block(address, aWrite_cmd + [pin, value, unused])
return 1
# Read | temp in Celsius from Grove Temperature Sensor
def temp(pin, model = '1.0'):
# each of the sensor revisions use different thermistors, each with their own B value constant
if model == '1.2':
bValue = 4250 # sensor v1.2 uses thermistor ??? (assuming NCP18WF104F03RC until SeeedStudio clarifies)
elif model == '1.1':
bValue = 4250 # sensor v1.1 uses thermistor NCP18WF104F03RC
else:
bValue = 3975 # sensor v1.0 uses thermistor TTC3A103*39H
a = analogRead(pin)
resistance = (float)(1023 - a) * 10000 / a
t = (float)(1 / (math.log(resistance / 10000) / bValue + 1 / 298.15) - 273.15)
return t
# Read value from Grove Ultrasonic
def ultrasonicRead(pin):
write_i2c_block(address, uRead_cmd + [pin, unused, unused])
time.sleep(.2)
read_i2c_byte(address)
number = read_i2c_block(address)
return (number[1] * 256 + number[2])
# Read the firmware version
def version():
write_i2c_block(address, version_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
return "%s.%s.%s" % (number[1], number[2], number[3])
# Read Grove Accelerometer (+/- 1.5g) XYZ value
def acc_xyz():
write_i2c_block(address, acc_xyz_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
if number[1] > 32:
number[1] = - (number[1] - 224)
if number[2] > 32:
number[2] = - (number[2] - 224)
if number[3] > 32:
number[3] = - (number[3] - 224)
return (number[1], number[2], number[3])
# Read from Grove RTC
def rtc_getTime():
write_i2c_block(address, rtc_getTime_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
return number
# Read and return temperature and humidity from Grove DHT Pro
def dht(pin, module_type):
write_i2c_block(address, dht_temp_cmd + [pin, module_type, unused])
# Delay necessary for proper reading fron DHT sensor
time.sleep(.6)
try:
read_i2c_byte(address)
number = read_i2c_block(address)
if number == -1:
return -1
except (TypeError, IndexError):
return -1
# data returned in IEEE format as a float in 4 bytes
f = 0
# data is reversed
for element in reversed(number[1:5]):
# Converted to hex
hex_val = hex(element)
#print hex_val
try:
h_val = hex_val[2] + hex_val[3]
except IndexError:
h_val = '0' + hex_val[2]
# Convert to char array
if f == 0:
h = h_val
f = 1
else:
h = h + h_val
# convert the temp back to float
t = round(struct.unpack('!f', h.decode('hex'))[0], 2)
h = ''
# data is reversed
for element in reversed(number[5:9]):
# Converted to hex
hex_val = hex(element)
# Print hex_val
try:
h_val = hex_val[2] + hex_val[3]
except IndexError:
h_val = '0' + hex_val[2]
# Convert to char array
if f == 0:
h = h_val
f = 1
else:
h = h + h_val
# convert back to float
hum = round(struct.unpack('!f', h.decode('hex'))[0], 2)
return [t, hum]
# Grove LED Bar - initialise
# orientation: (0 = red to green, 1 = green to red)
def ledBar_init(pin, orientation):
write_i2c_block(address, ledBarInit_cmd + [pin, orientation, unused])
return 1
# Grove LED Bar - set orientation
# orientation: (0 = red to green, 1 = green to red)
def ledBar_orientation(pin, orientation):
write_i2c_block(address, ledBarOrient_cmd + [pin, orientation, unused])
return 1
# Grove LED Bar - set level
# level: (0-10)
def ledBar_setLevel(pin, level):
write_i2c_block(address, ledBarLevel_cmd + [pin, level, unused])
return 1
# Grove LED Bar - set single led
# led: which led (1-10)
# state: off or on (0-1)
def ledBar_setLed(pin, led, state):
write_i2c_block(address, ledBarSetOne_cmd + [pin, led, state])
return 1
# Grove LED Bar - toggle single led
# led: which led (1-10)
def ledBar_toggleLed(pin, led):
write_i2c_b |
lakshmi-kannan/st2 | st2api/tests/unit/controllers/v1/test_packs.py | Python | apache-2.0 | 10,101 | 0.002079 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from st2common.models.api.execution import ActionExecutionAPI
from st2common.models.db.pack import PackDB
from st2common.persistence.pack import Pack
from st2common.services import packs as pack_service
from st2api.controllers.v1.actionexecutions import ActionExecutionsControllerMixin
from tests import FunctionalTest
PACK_INDEX = {
"test": {
"version": "0.4.0",
"name": "test",
"repo_url": "https://github.com/StackStorm-Exchange/stackstorm-test",
"author": "st2-dev",
"keywords": ["some", "search", "another", "terms"],
"email": "info@stackstorm.com",
"description": "st2 pack to test package management pipeline"
},
"test2": {
"version": "0.5.0",
"name": "test2",
"repo_url": "https://github.com/StackStorm-Exchange/stackstorm-test2",
"author": "stanley",
"keywords": ["some", "special", "terms"],
"email": "info@stackstorm.com",
"description": "another st2 pack to test package management pipeline"
}
}
class PacksControllerTestCase(FunctionalTest):
@classmethod
def setUpClass(cls):
super(PacksControllerTestCase, cls).setUpClass()
cls.pack_db_1 = PackDB(name='pack1', description='foo', version='0.1.0', author='foo',
email='test@example.com', ref='pack1')
cls.pack_db_2 = PackDB(name='pack2', description='foo', version='0.1.0', author='foo',
email='test@example.com', ref='pack2')
Pack.add_or_update(cls.pack_db_1)
Pack.add_or_update(cls.pack_db_2)
def test_get_all(self):
resp = self.app.get('/v1/packs')
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(resp.json), 2, '/v1/actionalias did not return all aliases.')
def test_get_one(self):
# Get by id
resp = self.app.get('/v1/packs/%s' % (self.pack_db_1.id))
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json['name'], self.pack_db_1.name)
# Get by name
resp = self.app.get('/v1/packs/%s' % (self.pack_db_1.ref))
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json['ref'], self.pack_db_1.ref)
self.assertEqual(resp.json['name'], self.pack_db_1.name)
def test_get_one_doesnt_exist(self):
resp = self.app.get('/v1/packs/doesntexistfoo', expect_errors=True)
self.assertEqual(resp.status_int, 404)
@mock.patch.object(ActionExecutionsControllerMixin, '_handle_schedule_execution')
def test_install(self, _handle_schedule_execution):
_handle_schedule_execution.return_value = ActionExecutionAPI(id='123')
payload = {'packs': ['some']}
resp = self.app.post_json('/v1/packs/install', payload)
self.assertEqual(resp.status_int, 202)
self.assertEqual(resp.json, {'execution_id': '123'})
@mock.patch.object(ActionExecutionsControllerMixin, '_handle_schedule_execution')
def test_install_with_force_parameter(self, _handle_schedule_execution):
_handle_schedule_execution.return_value = ActionExecutionAPI(id='123')
payload = {'packs': ['some'], 'force': True}
resp = self.app.post_json('/v1/packs/install', payload)
self.assertEqual(resp.status_int, 202)
self.assertEqual(resp.json, {'execution_id': '123'})
@mock.patch.object(ActionExecutionsControllerMixin, '_handle_schedule_execution')
def test_uninstall(self, _handle_schedule_execution):
_handle_schedule_execution.return_value = ActionExecutionAPI(id='123')
payload = {'packs': ['some']}
resp = self.app.post_json('/v1/packs/uninstall', payload)
self.assertEqual(resp.status_int, 202)
self.assertEqual(resp.json, {'execution_id': '123'})
@mock.patch.object(pack_service, 'fetch_pack_index',
mock.MagicMock(return_value=(PACK_INDEX, {})))
def test_search(self):
resp = self.app.post_json('/v1/packs/index/search', {'query': 'test'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test'], PACK_INDEX['test2']])
resp = self.app.post_json('/v1/packs/index/search', {'query': 'stanley'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test2']])
resp = self.app.post_json('/v1/packs/index/search', {'query': 'special'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test2']])
# Search should be case insensitive by default
resp = self.app.post_json('/v1/packs/index/search', {'query': 'TEST'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test'], PACK_INDEX['test2']])
resp = self.app.post_json('/v1/packs/index/search', {'query': 'SPECIAL'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test2']])
resp = self.app.post_json('/v1/packs/index/search', {'query': 'sPeCiAL'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test2']])
resp = self.app.post_json('/v1/packs/index/search', {'query': 'st2-dev'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test']])
resp = self.app.post_json('/v1/packs/index/search', {'query': 'ST2-dev'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test']])
resp = self.app.post_json('/v1/packs/index/search', {'query': '-dev'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, [PACK_INDEX['test']])
@mock.patch.object(pack_service, 'fetch_pack_index',
mock.MagicMock(return_value=(PACK_INDEX, {})))
def test_show(self):
resp = self.app.post_json('/v1/packs/index/search', {'pack': 'test'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, PACK_INDEX['test'])
resp = self.app.post_json('/v1/packs/index/search', {'pack': 'test2'})
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.json, PACK_INDEX['test2'])
def test_packs_register_endpoint(self):
# Register resources from all packs - make sure the count values are correctly added
# together
resp = self.app.post_json('/v1/packs/register')
self.assertEqual(resp.status_int, 200)
self.assertTrue('runners' in resp.json)
self.assertTrue('actions' in resp.json)
self.assertTrue('triggers' in resp.json)
self.assertTrue('sensors' in resp.json)
self.assertTrue('rules' in resp.json)
self.assertTrue('rule_types' in resp.json)
self.assertTrue('aliases' in resp.json)
self.asse | rtTrue('policy_ty | pes' in resp.json)
self.assertTrue('policies' in resp.json)
self.assertTrue('configs' in resp.json)
self.assertTrue(resp.json['actions'] >= 3)
self.assertTrue(resp.json['configs'] >= 3)
# Register resources from a specific pack
resp = self.app.post_json('/v1/packs/register', {'packs': ['dummy_pack_1']})
self.assertEqual(resp.status_int, 200)
self.assertTrue(resp.json['actions'] >= 1)
self.assertTrue(re |
lordappsec/ooni-probe | scripts/fabfile.py | Python | bsd-2-clause | 3,381 | 0.004734 | #-*- coding: utf-8 -*-
#
# :authors: Arturo Filastò, Isis Lovecruft
# :license: see included LICENSE file
import os
import sys
import yaml
import xmlrpclib
from StringIO import StringIO
from fabric.operations import get
from fabric.api import run, cd, sudo, env
api_auth = {}
# Set these values
api_auth['Username'] = "you@example.com"
api_auth['AuthString'] = "your_password"
slice_name = "your_slice_name"
### Do not change this
api_auth['AuthMethod'] = "password"
env.user = 'root'
def set_hosts(host_file):
with open(host_file) as f:
for host in f:
env.hosts.append(host)
def search_node(nfilter="*.cert.org.cn"):
api_server = xmlrpclib.ServerProxy('https://www.planet-lab.org/PLCAPI/')
if api_server.AuthCheck(api_auth):
print "We are authenticated"
else:
print "We are not authenticated"
node_filter = {'hostname': nfilter}
return_fields = ['hostname', 'site_id']
all_nodes = api_server.GetNodes(api_auth, node_filter, return_fields)
print all_nodes
def add_node(nodeid):
node_id = int( | nodeid)
api_server = xmlrpclib.ServerProxy('https://www.planet-lab.org/PLCAPI/')
node_filter = {'node_id': node_id}
return_fields = ['hostname', 'site_id']
nodes = api_server.GetNodes(api_auth, node_filter, return_fields)
print 'Adding nodes %s' % nodes
api_s | erver.AddNode(api_auth, node_id, slice_name)
def deployooniprobe(distro="debian"):
"""
This is used to deploy ooni-probe on debian based systems.
"""
run("git clone https://git.torproject.org/ooni-probe.git ooni-probe")
cd("ooni-probe")
if distro == "debian":
sudo("apt-get install git-core python python-pip python-dev")
else:
print "The selected distro is not supported"
print "The following commands may fail"
run("virtualenv env")
run("source env/bin/activate")
run("pip install https://hg.secdev.org/scapy/archive/tip.zip")
run("pip install -r requirements.txt")
def generate_bouncer_file(install_directory='/data/oonib/', bouncer_file="bouncer.yaml"):
output = StringIO()
get(os.path.join(install_directory, 'oonib.conf'), output)
output.seek(0)
oonib_configuration = yaml.safe_load(output)
output.truncate(0)
get(os.path.join(oonib_configuration['main']['tor_datadir'], 'collector', 'hostname'),
output)
output.seek(0)
collector_hidden_service = output.readlines()[0].strip()
address = env.host
test_helpers = {
'dns': address + ':' + str(oonib_configuration['helpers']['dns']['tcp_port']),
'ssl': 'https://' + address,
'traceroute': address,
}
if oonib_configuration['helpers']['tcp-echo']['port'] == 80:
test_helpers['tcp-echo'] = address
else:
test_helpers['http-return-json-headers'] = 'http://' + address
bouncer_data = {
'collector':
{
'httpo://'+collector_hidden_service: {'test-helper': test_helpers}
}
}
with open(bouncer_file) as f:
old_bouncer_data = yaml.safe_load(f)
with open(bouncer_file, 'w+') as f:
old_bouncer_data['collector']['httpo://'+collector_hidden_service] = {}
old_bouncer_data['collector']['httpo://'+collector_hidden_service]['test-helper'] = test_helpers
yaml.dump(old_bouncer_data, f)
|
pyshop/pyjobs | src/adverts/migrations/0012_auto_20151214_1910.py | Python | mit | 1,060 | 0.001887 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.d | b import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('adverts', '0011_auto_20151210_1116'),
]
operations = [
migrations.CreateModel(
name='UserActivationKey',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('activation_key', models.CharField(blank=True, max_length=40)), |
],
),
migrations.RemoveField(
model_name='userprofile',
name='user',
),
migrations.RemoveField(
model_name='user',
name='activation_key',
),
migrations.DeleteModel(
name='UserProfile',
),
migrations.AddField(
model_name='useractivationkey',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
]
|
pebble/flotilla | src/flotilla/agent/db.py | Python | mit | 4,650 | 0 | import logging
import json
import time
from collections import defaultdict
from flotilla.model import FlotillaServiceRevision, FlotillaUnit, \
GLOBAL_ASSIGNMENT, GLOBAL_ASSIGNMENT_SHARDS
from Crypto.Cipher import AES
logger = logging.getLogger('flotilla')
class FlotillaAgentDynamo(object):
"""Database interaction for worker/agent component.
Required table permissions:
status
-PutItem
assignments:
- BatchGetItem
revisions:
- BatchGetItem
units:
- BatchGetItem
"""
def __init__(self, instance_id, service_name, status_table,
assignments_table, revisions_table, units_table, kms):
self._id = instance_id
global_shard = hash(instance_id) % GLOBAL_ASSIGNMENT_SHARDS
self._global_id = '%s_%d' % (GLOBAL_ASSIGNMENT, global_shard)
self._service = service_name
self._status = status_table
self._assignments = assignments_table
self._revisions = revisions_table
self._units = units_table
self._kms = kms
def store_status(self, unit_status):
"""Store unit status.
:param unit_status Unit statuses.
"""
logger.debug('Storing status as %s...', self._id)
data = di | ct(unit_status)
data['service'] = self._service
data['instance_id'] = self._id
data['status_time'] = time.time()
self._status.put_item(data=data, overwrite=True)
logger.info('Store | d status of %s units as %s.', len(unit_status),
self._id)
def get_assignments(self):
assignments = self._assignments.batch_get([
{'instance_id': self._id}, {'instance_id': self._global_id}])
assigned_revisions = [assignment['assignment'] for assignment in
assignments]
return sorted(assigned_revisions)
def get_units(self, assigned_revisions):
"""
Get currently assigned FlotillaUnits.
:param assigned_revisions: Assigned revisions
:return: Revisions.
"""
# Fetch every revision and index units:
revisions = {}
unit_revisions = defaultdict(list)
revision_keys = [{'rev_hash': assigned_revision}
for assigned_revision in set(assigned_revisions)]
for revision_item in self._revisions.batch_get(revision_keys):
rev_hash = revision_item['rev_hash']
revision = FlotillaServiceRevision(label=revision_item['label'])
revisions[rev_hash] = revision
for unit in revision_item['units']:
unit_revisions[unit].append(rev_hash)
# Fetch every unit:
units = []
unit_keys = [{'unit_hash': unit_hash}
for unit_hash in sorted(unit_revisions.keys())]
logger.debug('Fetching %d units for %d/%d revisions.', len(unit_keys),
len(revisions), len(assigned_revisions))
for unit_item in self._units.batch_get(unit_keys):
env_key = unit_item.get('environment_key')
if env_key:
decrypted_key = self._kms.decrypt(env_key.decode('base64'))
iv = unit_item['environment_iv'].decode('base64')
aes = AES.new(decrypted_key['Plaintext'], AES.MODE_CBC, iv)
ciphertext = unit_item['environment_data'].decode('base64')
plaintext = aes.decrypt(ciphertext)
unit_environment = json.loads(plaintext)
else:
unit_environment = unit_item['environment']
unit_file = unit_item['unit_file']
unit = FlotillaUnit(unit_item['name'], unit_file, unit_environment)
unit_hash = unit.unit_hash
if unit_hash != unit_item['unit_hash']:
logger.warn('Unit hash %s expected %s', unit_hash,
unit_item['unit_hash'])
unit_hash = unit_item['unit_hash']
for revision in unit_revisions[unit_hash]:
rev_unit = FlotillaUnit(unit_item['name'], unit_file,
unit_environment, rev_hash)
units.append(rev_unit)
revisions[revision].units.append(rev_unit)
# Verify each revision matches expected hash:
for expected_hash, revision in revisions.items():
revision_hash = revision.revision_hash
if revision_hash != expected_hash:
# FIXME: enforce?
logger.warn('Revision hash %s expected %s', revision_hash,
expected_hash)
return units
|
probcomp/bayeslite | src/backends/nig_normal.py | Python | apache-2.0 | 19,474 | 0.001643 | # -*- coding: utf-8 -*-
# Copyright (c) 2010-2016, MIT Probabilistic Computing Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A model that posts that all columns are independently Gaussian with
unknown parameters.
The parameters are taken from the normal and inverse-gamma conjuate
prior.
This module implements the :class:`bayeslite.BayesDB_Backend`
interface for the NIG-Normal model.
| """
import math
import random
import bayeslite.backend
import bayeslite.core as core
from bayeslite.backend import baye | sdb_backend_version
from bayeslite.exception import BQLError
from bayeslite.math_util import logmeanexp
from bayeslite.sqlite3_util import sqlite3_quote_name
from bayeslite.util import cursor_value
nig_normal_schema_1 = '''
INSERT INTO bayesdb_backend (name, version) VALUES ('nig_normal', 1);
CREATE TABLE bayesdb_nig_normal_column (
population_id INTEGER NOT NULL REFERENCES bayesdb_population(id),
generator_id INTEGER NOT NULL REFERENCES bayesdb_generator(id),
colno INTEGER NOT NULL,
count INTEGER NOT NULL,
sum REAL NOT NULL,
sumsq REAL NOT NULL,
PRIMARY KEY(population_id, generator_id, colno),
FOREIGN KEY(population_id, colno)
REFERENCES bayesdb_variable(population_id, colno)
);
CREATE TABLE bayesdb_nig_normal_model (
population_id INTEGER NOT NULL REFERENCES bayesdb_population(id),
generator_id INTEGER NOT NULL REFERENCES bayesdb_generator(id),
colno INTEGER NOT NULL,
modelno INTEGER NOT NULL,
mu REAL NOT NULL,
sigma REAL NOT NULL,
PRIMARY KEY(population_id, generator_id, colno, modelno),
FOREIGN KEY(generator_id, modelno)
REFERENCES bayesdb_generator_model(generator_id, modelno),
FOREIGN KEY(population_id, generator_id, colno)
REFERENCES bayesdb_nig_normal_column(population_id, generator_id, colno)
);
'''
nig_normal_schema_2 = '''
UPDATE bayesdb_backend SET version = 2 WHERE name = 'nig_normal';
CREATE TABLE bayesdb_nig_normal_deviation (
population_id INTEGER NOT NULL REFERENCES bayesdb_population(id),
generator_id INTEGER NOT NULL REFERENCES bayesdb_generator(id),
deviation_colno INTEGER NOT NULL,
observed_colno INTEGER NOT NULL,
PRIMARY KEY(population_id, generator_id, deviation_colno),
FOREIGN KEY(population_id, observed_colno)
REFERENCES bayesdb_variable(population_id, colno)
);
'''
class NIGNormalBackend(bayeslite.backend.BayesDB_Backend):
"""Normal-Inverse-Gamma-Normal backend for BayesDB.
The backend is named ``nig_normal`` in BQL::
CREATE GENERATOR t_nig FOR t USING nig_normal(..)
Internally, the NIGNormal backend add SQL tables to the
database with names that begin with ``bayesdb_nig_normal_``.
"""
def __init__(self, hypers=(0, 1, 1, 1), seed=0):
self.hypers = hypers
self.prng = random.Random(seed)
def name(self): return 'nig_normal'
def register(self, bdb):
with bdb.savepoint():
version = bayesdb_backend_version(bdb, self.name())
if version is None:
bdb.sql_execute(nig_normal_schema_1)
version = 1
if version == 1:
bdb.sql_execute(nig_normal_schema_2)
version = 2
if version != 2:
raise BQLError(bdb, 'NIG-Normal already installed'
' with unknown schema version: %d' % (version,))
def create_generator(self, bdb, generator_id, schema, **kwargs):
# XXX Do something with the schema.
insert_column_sql = '''
INSERT INTO bayesdb_nig_normal_column
(population_id, generator_id, colno, count, sum, sumsq)
VALUES (:population_id, :generator_id, :colno,
:count, :sum, :sumsq)
'''
population_id = core.bayesdb_generator_population(bdb, generator_id)
table = core.bayesdb_population_table(bdb, population_id)
for colno in core.bayesdb_variable_numbers(bdb, population_id, None):
column_name = core.bayesdb_variable_name(
bdb, population_id, generator_id, colno)
stattype = core.bayesdb_variable_stattype(
bdb, population_id, generator_id, colno)
if not stattype == 'numerical':
raise BQLError(bdb, 'NIG-Normal only supports'
' numerical columns, but %s is %s'
% (repr(column_name), repr(stattype)))
(count, xsum, sumsq) = data_suff_stats(bdb, table, column_name)
bdb.sql_execute(insert_column_sql, {
'population_id': population_id,
'generator_id': generator_id,
'colno': colno,
'count': count,
'sum': xsum,
'sumsq': sumsq,
})
# XXX Make the schema a little more flexible.
if schema == [[]]:
return
for clause in schema:
if not (len(clause) == 3 and \
isinstance(clause[0], str) and \
clause[1] == 'deviation' and \
isinstance(clause[2], list) and \
len(clause[2]) == 1 and \
isinstance(clause[2][0], str)):
raise BQLError(bdb, 'Invalid nig_normal clause: %r' %
(clause,))
dev_var = clause[0]
obs_var = clause[2][0]
if not core.bayesdb_has_variable(bdb, population_id, None,
obs_var):
raise BQLError(bdb, 'No such variable: %r' % (obs_var,))
obs_colno = core.bayesdb_variable_number(bdb, population_id, None,
obs_var)
dev_colno = core.bayesdb_add_latent(bdb, population_id,
generator_id, dev_var, 'numerical')
bdb.sql_execute('''
INSERT INTO bayesdb_nig_normal_deviation
(population_id, generator_id, deviation_colno,
observed_colno)
VALUES (?, ?, ?, ?)
''', (population_id, generator_id, dev_colno, obs_colno))
def drop_generator(self, bdb, generator_id):
with bdb.savepoint():
self.drop_models(bdb, generator_id)
delete_columns_sql = '''
DELETE FROM bayesdb_nig_normal_column
WHERE generator_id = ?
'''
bdb.sql_execute(delete_columns_sql, (generator_id,))
delete_deviations_sql = '''
DELETE FROM bayesdb_nig_normal_deviation
WHERE generator_id = ?
'''
bdb.sql_execute(delete_deviations_sql, (generator_id,))
def initialize_models(self, bdb, generator_id, modelnos):
population_id = core.bayesdb_generator_population(bdb, generator_id)
insert_sample_sql = '''
INSERT INTO bayesdb_nig_normal_model
(population_id, generator_id, colno, modelno, mu, sigma)
VALUES (:population_id, :generator_id, :colno, :modelno,
:mu, :sigma)
'''
self._set_models(bdb, population_id, generator_id, modelnos,
insert_sample_sql)
def drop_models(self, bdb, generator_id, modelnos=None):
with bdb.savepoint():
if modelnos is None:
delete_models_sql = '''
DELETE FROM bayesdb_nig_normal_model
WHERE generator_id = ?
'''
bdb.sql_execute(delete_models_sql, (generator_id,))
|
siddhism/GCompris-qt | tools/l10n-fetch-po-files.py | Python | gpl-3.0 | 1,862 | 0.003222 | import os
import re
import subprocess
# Copied from Trojita
"""Fetch the .po files from KDE's SVN for GCompris
Run me from GCompris's top-level directory.
"""
SVN_PATH = "svn://anonsvn.kde.org/home/kde/trunk/l10n-kf5/"
SOURCE_PO_PATH = "/messages/kdereview/gcompris_qt.po"
OUT | PUT_PO_PATH = "./po/"
OUTPUT_PO_PATTERN = "gcompris_%s.po"
fixer = re.compile(r'^#~\| ', re.MULTILINE)
re_empty_msgid = re.compile('^msgid ""$', re.MULTILINE)
re_empty_line = re.compile('^$', re.MULTILINE)
re_has_qt_contexts = re.compile('X-Qt-Contexts: true\\n') |
if not os.path.exists(OUTPUT_PO_PATH):
os.mkdir(OUTPUT_PO_PATH)
all_languages = subprocess.check_output(['svn', 'cat', SVN_PATH + 'subdirs'],
stderr=subprocess.STDOUT)
all_languages = [x.strip() for x in all_languages.split("\n") if len(x)]
all_languages.remove("x-test")
for lang in all_languages:
try:
raw_data = subprocess.check_output(['svn', 'cat', SVN_PATH + lang + SOURCE_PO_PATH],
stderr=subprocess.PIPE)
(transformed, subs) = fixer.subn('# ~| ', raw_data)
pos1 = re_empty_msgid.search(transformed).start()
pos2 = re_empty_line.search(transformed).start()
if re_has_qt_contexts.search(transformed, pos1, pos2) is None:
transformed = transformed[:pos2] + \
'"X-Qt-Contexts: true\\n"\n' + \
transformed[pos2:]
subs = subs + 1
if (subs > 0):
print "Fetched %s (and performed %d cleanups)" % (lang, subs)
else:
print "Fetched %s" % lang
file(OUTPUT_PO_PATH + OUTPUT_PO_PATTERN % lang, "wb").write(transformed)
except subprocess.CalledProcessError:
print "No data for %s" % lang
# Inform qmake about the updated file list
#os.utime("CMakeLists.txt", None)
|
AdriaGS/MTP-Group-C | Single Mode/Semi-Final/deviceTX_wC2.py | Python | gpl-2.0 | 8,154 | 0.035565 | try:
import RPi.GPIO as GPIO
from lib_nrf24 import NRF24
from math import *
import time
import spidev
import sys
import os.path
import numpy
import pickle
import sqlite3
import mat4py as m4p
import os
def compress(uncompressed):
"""Compress a string to a list of output symbols."""
# Build the dictionary.
dict_size = 256
dictionary = {chr(i): i for i in range(dict_size)}
#dictionary = dict((chr(i), i) for i in xrange(dict_size))
# in Python 3: dictionary = {chr(i): i for i in range(dict_size)}
w = ""
result = []
for c in uncompressed:
wc = w + c
if wc in dictionary:
w = wc
else:
result.append(dictionary[w])
# Add wc to the dictionary.
dictionary[wc] = dict_size
dict_size += 1
w = c
# Output the code for w.
if w:
result.append(dictionary[w])
return result
def printSummary(file1, file2):
"""
printSummary() prints out the number of bytes in the original file and in
the result file.
@params: two files that are to be checked.
@return: n/a.
"""
# Checks if the files exist in the current directory.
if (not os.path.isfile(file1)) or (not os.path.isfile(file2)):
printError(0)
# Finds out how many bytes in each file.
f1_bytes = os.path.getsize(file1)
f2_bytes = os.path.getsize(file2)
sys.stderr.write(str(file1) + ': ' + str(f1_bytes) + ' bytes\n')
sys.stderr.write(str(file2) + ': ' + str(f2_bytes) + ' bytes\n')
def main():
GPIO.setmode(GPIO.BCM)
GPIO.setup(23, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(22, GPIO.OUT, initial=GPIO.LOW)
print("Transmitter")
pipes = [[0xe7, 0xe7, 0xe7, 0xe7, 0xe7], [0xc2, 0xc2, 0xc2, 0xc2, 0xc2]]
payloadSize = 32
channel_TX = 0x40
channel_RX = 0x45
#Initializa the radio transceivers with the CE ping connected to the GPIO22 and GPIO23
radio_Tx = NRF24(GPIO, spidev.SpiDev())
radio_Rx = NRF24(GPIO, spidev.SpiDev())
radio_Tx.begin(0, 22)
radio_Rx.begin(1, 24)
#We set the Payload Size to the limit which is 32 bytes
radio_Tx.setPayloadSize(payloadSize)
radio_Rx.setPayloadSize(payloadSize)
#We choose the channels to be used for one and the other transceiver
radio_Tx.setChannel(channel_TX)
radio_Rx.setChannel(channel_RX)
#We set the Transmission Rate
radio_Tx.setDataRate(NRF24.BR_250KBPS)
radio_Rx.setDataRate(NRF24.BR_250KBPS)
#Configuration of the power level to be used by the transceiver
radio_Tx.setPALevel(NRF24.PA_MIN)
radio_Rx.setPALevel(NRF24.PA_MIN)
#We disable the Auto Acknowledgement
radio_Tx.setAutoAck(False)
radio_Rx.setAutoAck(False)
radio_Tx.enableDynamicPayloads()
radio_Rx.enableDynamicPayloads()
#Open the writing and reading pipe
radio_Tx.openWritingPipe(pipes[1])
radio_Rx.openReadingPipe(0, pipes[0])
#We print the configuration details of both transceivers
print("Transmitter Details #################################################################################")
radio_Tx.printDetails()
print("*---------------------------------------------------------------------------------------------------*")
print("Receiver Details ####################################################################################")
radio_Rx.printDetails()
print("*---------------------------------------------------------------------------------------------------*")
###############################################################################################################################
###############################################################################################################################
###############################################################################################################################
#Read file to transmit
#inFile = open("SampleTextFile1Mb.txt", "rb")
inFile = open("ElQuijote.txt", "rb")
data2Tx = inFile.read()
inFile.close()
#flag variables
original_flag_data = 'A'
flag = ""
flag_n = 0
#packet realted variables
overhead = 1
dataSize = payloadSize - overhead
dataControlSize = payloadSize - overhead
#Data Packets
packets = []
numberofPackets = 0
#ACK related variables
ack = []
handshake = []
ack_received = 0
handshakeAck_received = 0
#Time variables
time_ack = 1
start_c = time.time()
#Compression of the data to transmit into data2Tx_compressed
data2Tx_compressed = compress(data2Tx)
n=len(bin(max(data2Tx_compressed)))-2
#We create the string with the packets needed to decompress the file transmitted
controlList_extended = []
controlList = []
for val in data2Tx_compressed:
division = int(val/256)
controlList.append(division)
if(n > 16):
for val in controlList:
division = int(val/256)
controlList_extended.append(division)
data2Send = []
for iterator in range(0, len(controlList)):
data2Send.append(data2Tx_compressed[iterator])
data2Send.append(controlList[iterator])
if(n > 16):
data2Send.append(controlList_extended[iterator])
final_c = time.time()
print("Compression time: " + str(final_c-start_c))
#Now we conform all the data packets in a list
for i in range (0, len(data2Send), dataSize):
if((i+dataSize) < len(data2Send)):
packets.append(data2Send[i:i+dataSize])
else:
packets.append(data2Send[i:])
numberofPackets += 1
#Start time
start = time.time()
radio_Rx.startListening()
radio_Tx.write(str(numberofPackets) + "," + str(n))
timeout = time.time() + time_ack
str_Handshake = ""
#While we don't receive the handshake ack we keep trying
while not (handshakeAck_received):
if radio_Rx.available(0):
radio_Rx.read(handshake, radio_Rx.getDynamicPayloadSize())
print("Something received")
for c in range(0, len(handshake)):
str_Handshake = str_Handshake + chr(handshake[c])
#If the received ACK does not match the expected one we retransmit, else we set the received handshake ack to 1
if(list(str_Handshake) != list("ACK")):
radio_Tx.write(str(numberofPackets) + "," + str(n))
timeout = time.time() + time_ack
print("Handshake Message Lost")
str_Handshake = ""
else:
print("Handshake done")
handshakeAck_received = 1
#If an established time passes and | we have not received anything we retransmit the handshake packet
if((time.time() + 0.2 | ) > timeout):
print("No Handshake ACK received resending message")
radio_Tx.write(str(numberofPackets) + "," + str(n))
timeout = time.time() + time_ack
#We iterate over every packet to be sent
dec_ready = 0
for message in packets:
flag = chr(ord(original_flag_data) + flag_n)
message2Send = list(flag) + message
radio_Tx.write(message2Send)
time.sleep(1)
if(dec_ready == 200):
time.sleep(0.3)
dec_ready = 0
timeout = time.time() + time_ack
radio_Rx.startListening()
str_ack = ""
#While we don't receive a correct ack for the transmitted packet we keep trying for the same packet
while not (ack_received):
if radio_Rx.available(0):
radio_Rx.read(ack, radio_Rx.getDynamicPayloadSize())
for c in range(0, len(ack)):
str_ack = str_ack + chr(ack[c])
print(str_ack)
#If the received ACK does not match the expected one we retransmit, else we set the received data ack to 1
if(list(str_ack) != (list("ACK") + list(flag))):
radio_Tx.write(list(flag) + list(message))
timeout = time.time() + time_ack
#print("Data ACK received but not the expected one --> resending message")
str_ack = ""
else:
ack_received = 1
#If an established time passes and we have not received anything we retransmit the data packet
if((time.time() + 0.01) > timeout):
print("No Data ACK received resending message")
radio_Tx.write(message2Send)
timeout = time.time() + time_ack
dec_ready = 0
ack_received = 0
flag_n = (flag_n + 1) % 10
final = time.time()
totalTime = final - start
print(totalTime)
GPIO.output(22, 0)
GPIO.output(23, 0)
if __name__ == '__main__':
main()
except KeyboardInterrupt:
GPIO.output(22,0)
GPIO.output(23,0)
GPIO.output(24,0)
GPIO.cleanup()
|
giacomov/3ML | threeML/plugins/DispersionSpectrumLike.py | Python | bsd-3-clause | 8,596 | 0.002792 | import copy
import pandas as pd
from threeML.plugins.SpectrumLike import SpectrumLike
from threeML.utils.OGIP.response import InstrumentResponse
from threeML.utils.spectrum.binned_spectrum import (
BinnedSpectrumWithDispersion,
ChannelSet,
)
__instrument_name = "General binned spectral data with energy dispersion"
class DispersionSpectrumLike(SpectrumLike):
def __init__(
self,
name,
observation,
background=None,
background_exposure=None,
verbose=True,
tstart=None,
tstop=None,
):
"""
A plugin for generic spectral data with energy dispersion, accepts an observed binned spectrum,
and a background binned spectrum or plugin with the background data.
In the case of a binned background spectrum, the background model is profiled
out and the appropriate profile-likelihood is used to fit the total spectrum. In this
case, caution must be used when there are zero background counts in bins as the
profiled background parameters (one per channel) will then have zero information from which to
constrain the background. It is recommended to bin the spectrum such that there is one background count
per channel.
If either an SpectrumLike or XYLike instance is provided as background, it is assumed that this is the
background data and the likelihood model from this plugin is used to simultaneously fit the background
and source.
:param name: the plugin name
:param observation: the observed spectrum
:param background: the background spectrum or a plugin from which the background will be modeled
:param background_exposure: (optional) adjust the background exposure of the modeled background data comes from and
XYLike plugin
:param verbose: turn on/off verbose logging
"""
assert isinstance(
observation, BinnedSpectrumWithDispersion
), "observed spectrum is not an instance of BinnedSpectrumWithDispersion"
assert (
observation.response is not None
), "the observed spectrum does not have a response"
# assign the response to the plugins
self._rsp = observation.response # type: InstrumentResponse
super(DispersionSpectrumLike, self).__init__(
name=name,
observation=observation,
background=background,
background_exposure=background_exposure,
verbose=verbose,
tstart=tstart,
tstop=tstop,
)
def set_model(self, likelihoodModel):
"""
Set the model to be used in the joint minimization.
"""
# Store likelihood model
self._like_model = likelihoodModel
# We assume there are no extended sources, since we cannot handle them here
assert self._like_model.get_number_of_extended_sources() == 0, (
"OGIP-like plugins do not support " "extended sources"
)
# Get the differential flux function, and the integral function, with no dispersion,
# we simply integrate the model over the bins
differential_flux, integral = self._get_diff_flux_and_integral(self._like_model)
self._rsp.set_function(integral)
def _evaluate_model(self):
"""
evaluates the full model over all channels
:return:
"""
return self._rsp.convolve()
def get_simulated_dataset(self, new_name=None, **kwargs):
"""
Returns another DispersionSpectrumLike instance where data have been obtained by randomizing the current expectation from the
model, as well as from the background (depending on the respective noise models)
:return: a DispersionSpectrumLike simulated instance
"""
# pass the response thru to the constructor
return super(DispersionSpectrumLike, self).get_simulated_dataset(
new_name=new_name, **kwargs
)
def get_pha_files(self):
info = {}
# we want to pass copies so that
# the user doesn't grab the instance
# and try to modify things. protection
info["pha"] = copy.copy(self._ob | served_spectrum)
if self._background_spectrum is not None:
| info["bak"] = copy.copy(self._background_spectrum)
info["rsp"] = copy.copy(self._rsp)
return info
def display_rsp(self):
"""
Display the currently loaded full response matrix, i.e., RMF and ARF convolved
:return:
"""
self._rsp.plot_matrix()
@property
def response(self):
return self._rsp
def _output(self):
# type: () -> pd.Series
super_out = super(DispersionSpectrumLike, self)._output() # type: pd.Series
the_df = pd.Series({"response": self._rsp.rsp_filename})
return super_out.append(the_df)
def write_pha(self, filename, overwrite=False, force_rsp_write=False):
"""
Writes the observation, background and (optional) rsp to PHAII fits files
:param filename: base file name to write out
:param overwrite: if you would like to force overwriting of the files
:param force_rsp_write: force the writing of an rsp even if not required
"""
# we need to pass up the variables to an OGIPLike
# so that we have the proper variable name
# a local import here because OGIPLike is dependent on this
from threeML.plugins.OGIPLike import OGIPLike
ogiplike = OGIPLike.from_general_dispersion_spectrum(self)
ogiplike.write_pha(
file_name=filename, overwrite=overwrite, force_rsp_write=force_rsp_write
)
@staticmethod
def _build_fake_observation(
fake_data, channel_set, source_errors, source_sys_errors, is_poisson, **kwargs
):
"""
This is the fake observation builder for SpectrumLike which builds data
for a binned spectrum without dispersion. It must be overridden in child classes.
:param fake_data: series of values... they are ignored later
:param channel_set: a channel set
:param source_errors:
:param source_sys_errors:
:param is_poisson:
:return:
"""
assert (
"response" in kwargs
), "A response was not provided. Cannor build synthetic observation"
response = kwargs.pop("response")
observation = BinnedSpectrumWithDispersion(
fake_data,
exposure=1.0,
response=response,
count_errors=source_errors,
sys_errors=source_sys_errors,
quality=None,
scale_factor=1.0,
is_poisson=is_poisson,
mission="fake_mission",
instrument="fake_instrument",
tstart=0.0,
tstop=1.0,
)
return observation
@classmethod
def from_function(
cls,
name,
source_function,
response,
source_errors=None,
source_sys_errors=None,
background_function=None,
background_errors=None,
background_sys_errors=None,
):
"""
Construct a simulated spectrum from a given source function and (optional) background function. If source and/or background errors are not supplied, the likelihood is assumed to be Poisson.
:param name: simulated data set name
:param source_function: astromodels function
:param response: 3ML Instrument response
:param source_errors: (optional) gaussian source errors
:param source_sys_errors: (optional) systematic source errors
:param background_function: (optional) astromodels background function
:param background_errors: (optional) gaussian background errors
:param background_sys_errors: (optional) background systematic errors
:return: simulated DispersionSpectrumLike plugin
"""
channel_set = ChannelSet.from_instrument_response(response)
energy_min, energy_max = channel_set.bin_ |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.