repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
garyjohnson/ci_screen_2 | ci_screen/service/ci_server_poller.py | Python | mit | 2,307 | 0.002167 | import logging
import time
import threading
try:
import ConfigParser as config
except:
import configparser as config
from pydispatch import dispatcher
import requests
import ci_screen.service.ci_server_loader as ci_ | loader
logger = logging.getLogger(__name__)
class CIServerPoller(object):
def __init__(self):
self._stop = threading.Event()
self._update = threading.Event()
self._poll_rate = self.get_poll_rate()
se | lf.polling_thread = None
self.ci_servers = ci_loader.get_ci_servers()
def __del__(self):
self.stop_polling()
def start_polling_async(self):
self._stop.clear()
self._update.clear()
self.polling_thread = threading.Thread(target=self.poll_for_changes)
self.polling_thread.daemon = True
self.polling_thread.start()
def stop_polling(self):
self._stop.set()
self.polling_thread = None
def poll_for_changes(self):
while not self._stop.isSet():
errors = {}
responses = {}
for ci_server in self.ci_servers:
name = ci_server['name']
url = ci_server['url']
username = ci_server.get('username')
token = ci_server.get('token')
auth = None
if username is not None and token is not None:
auth = requests.auth.HTTPBasicAuth(username, token)
try:
response = requests.get('{}/cc.xml'.format(url), auth=auth)
if response.status_code == 200:
responses[name] = response
else:
raise Exception('ci server {} returned {}: {}'.format(url, response, response.text))
except Exception as ex:
logger.warning(ex)
errors[name] = ex
dispatcher.send(signal="CI_UPDATE", sender=self, responses=responses, errors=errors)
time.sleep(self._poll_rate)
def get_poll_rate(self):
config_parser = config.SafeConfigParser(allow_no_value=False)
with open('ci_screen.cfg') as config_file:
config_parser.readfp(config_file)
return int(config_parser.get('general', 'poll_rate_seconds'))
|
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_08_01/operations/_peer_express_route_circuit_connections_operations.py | Python | mit | 9,496 | 0.004844 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------- | -----------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import mode | ls as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class PeerExpressRouteCircuitConnectionsOperations(object):
"""PeerExpressRouteCircuitConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.PeerExpressRouteCircuitConnection"
"""Gets the specified Peer Express Route Circuit Connection from the specified express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the peer express route circuit connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PeerExpressRouteCircuitConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.PeerExpressRouteCircuitConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PeerExpressRouteCircuitConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PeerExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/peerConnections/{connectionName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PeerExpressRouteCircuitConnectionListResult"]
"""Gets all global reach peer connections associated with a private peering in an express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PeerExpressRouteCircuitConnectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.PeerExpressRouteCircuitConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PeerExpressRouteCircuitConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
|
google-research/football | gfootball/scenarios/tests/11_vs_11_hard_deterministic.py | Python | apache-2.0 | 2,272 | 0.013204 | # coding=utf-8
# Copyright 2019 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import *
def build_scenario(builder):
builder.config().game_duration = 3000
builder.config().right_team_difficulty = 0.95
builder.config().deterministic = True
if builder.EpisodeNumber() % 2 == 0:
first_team = Team.e_Left
second_team = Te | am.e_Right
else:
first_team = Team.e_Right
sec | ond_team = Team.e_Left
builder.SetTeam(first_team)
builder.AddPlayer(-1.000000, 0.000000, e_PlayerRole_GK)
builder.AddPlayer(0.000000, 0.020000, e_PlayerRole_RM)
builder.AddPlayer(0.000000, -0.020000, e_PlayerRole_CF)
builder.AddPlayer(-0.422000, -0.19576, e_PlayerRole_LB)
builder.AddPlayer(-0.500000, -0.06356, e_PlayerRole_CB)
builder.AddPlayer(-0.500000, 0.063559, e_PlayerRole_CB)
builder.AddPlayer(-0.422000, 0.195760, e_PlayerRole_RB)
builder.AddPlayer(-0.184212, -0.10568, e_PlayerRole_CM)
builder.AddPlayer(-0.267574, 0.000000, e_PlayerRole_CM)
builder.AddPlayer(-0.184212, 0.105680, e_PlayerRole_CM)
builder.AddPlayer(-0.010000, -0.21610, e_PlayerRole_LM)
builder.SetTeam(second_team)
builder.AddPlayer(-1.000000, 0.000000, e_PlayerRole_GK)
builder.AddPlayer(-0.050000, 0.000000, e_PlayerRole_RM)
builder.AddPlayer(-0.010000, 0.216102, e_PlayerRole_CF)
builder.AddPlayer(-0.422000, -0.19576, e_PlayerRole_LB)
builder.AddPlayer(-0.500000, -0.06356, e_PlayerRole_CB)
builder.AddPlayer(-0.500000, 0.063559, e_PlayerRole_CB)
builder.AddPlayer(-0.422000, 0.195760, e_PlayerRole_RB)
builder.AddPlayer(-0.184212, -0.10568, e_PlayerRole_CM)
builder.AddPlayer(-0.267574, 0.000000, e_PlayerRole_CM)
builder.AddPlayer(-0.184212, 0.105680, e_PlayerRole_CM)
builder.AddPlayer(-0.010000, -0.21610, e_PlayerRole_LM)
|
gion86/awlsim | awlsimhw_dummy/main.py | Python | gpl-2.0 | 1,933 | 0.021728 | # -*- coding: utf-8 -*-
#
# AWL simulator - Dummy hardware interface
#
# Copyright 2013-2014 Michael Buesch <m@bues.ch>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, | Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from __future__ import division, | absolute_import, print_function, unicode_literals
from awlsim.common.compat import *
from awlsim.core.hardware import *
from awlsim.core.operators import AwlOperator
from awlsim.core.datatypes import AwlOffset
class HardwareInterface(AbstractHardwareInterface):
name = "dummy"
def __init__(self, sim, parameters={}):
AbstractHardwareInterface.__init__(self,
sim = sim,
parameters = parameters)
def doStartup(self):
pass # Do nothing
def doShutdown(self):
pass # Do nothing
def readInputs(self):
pass # Do nothing
def writeOutputs(self):
pass # Do nothing
def directReadInput(self, accessWidth, accessOffset):
if accessOffset < self.inputAddressBase:
return None
# Just read the current value from the CPU and return it.
return self.sim.cpu.fetch(AwlOperator(AwlOperator.MEM_E,
accessWidth,
AwlOffset(accessOffset)))
def directWriteOutput(self, accessWidth, accessOffset, data):
if accessOffset < self.outputAddressBase:
return False
# Just pretend we wrote it somewhere.
return True
|
prometheanfire/cloud-init | cloudinit/config/cc_keys_to_console.py | Python | gpl-3.0 | 2,179 | 0 | # vi: ts=4 expandtab
#
# Copyright (C) 2011 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
#
# Author: Scott Moser <scott.moser@canonical.com>
# Author: Juerg Haefliger <juerg.haefliger@hp.com>
#
# This program is free software: you can redistr | ibute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# | This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from cloudinit.settings import PER_INSTANCE
from cloudinit import util
frequency = PER_INSTANCE
# This is a tool that cloud init provides
HELPER_TOOL_TPL = '%s/cloud-init/write-ssh-key-fingerprints'
def _get_helper_tool_path(distro):
try:
base_lib = distro.usr_lib_exec
except AttributeError:
base_lib = '/usr/lib'
return HELPER_TOOL_TPL % base_lib
def handle(name, cfg, cloud, log, _args):
helper_path = _get_helper_tool_path(cloud.distro)
if not os.path.exists(helper_path):
log.warn(("Unable to activate module %s,"
" helper tool not found at %s"), name, helper_path)
return
fp_blacklist = util.get_cfg_option_list(cfg,
"ssh_fp_console_blacklist", [])
key_blacklist = util.get_cfg_option_list(cfg,
"ssh_key_console_blacklist",
["ssh-dss"])
try:
cmd = [helper_path]
cmd.append(','.join(fp_blacklist))
cmd.append(','.join(key_blacklist))
(stdout, _stderr) = util.subp(cmd)
util.multi_log("%s\n" % (stdout.strip()),
stderr=False, console=True)
except Exception:
log.warn("Writing keys to the system console failed!")
raise
|
m4ll0k/Infoga | setup.py | Python | gpl-3.0 | 494 | 0.022267 | #! | /usr/bin/env python
# -*- coding:utf-8 -*-
#
# @name : Infoga - Email OSINT
# @url : http://github.com/m4ll0k
# @author : Momo Outaadi (m4ll0k)
from setuptools import setup
setup(
name='infoga',
version='0.1.5',
description='Email OSINT',
url= | 'https://github.com/m4ll0k',
author = 'Momo (m4ll0k) Outaadi',
author_email='m4ll0k@protonmail.com',
license='GPLv3',
install_requires = ['colorama','requests','urllib3'],
console =['infoga.py'],
) |
collective/ECSpooler | lib/util/errorcodes.py | Python | gpl-2.0 | 154 | 0.006494 | # -*- co | ding: utf-8 -*-
# $Id: $
ERROR_AUTH_FAILED = "Authorization failed"
NO_SUCH_BACKEND = "No such backend"
REDIRECTION_FAILED = "R | edirection failed" |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyQt4/QtCore/QSequentialAnimationGroup.py | Python | gpl-2.0 | 2,169 | 0.011065 | # encoding: utf-8
# module PyQt4.QtCore
# from /usr/lib/python3/dist-packages/PyQt4/QtCore.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import sip as __sip
from .QAnimationGroup import QAnimationGroup
class QSequentialAnimationGroup(QAnimationGroup):
""" QSequentialAnimationGroup(QObject parent=None) """
def addPause(self, p_int): # real signature unknown; restored from __doc__
""" QSequentialAnimationGroup.addPause(int) -> QPauseAnimation """
return QPauseAnimation
def currentAnimation(self): # real signature unknown; restored from __doc__
""" QSequentialAnimationGroup.currentAnimation() -> QAbstractAnimation """
return QAbstractAnimati | on
def currentAnimationChanged(self, *args, **kwargs): # real signature unknown
""" QSequentialAnimationGroup.cu | rrentAnimationChanged[QAbstractAnimation] [signal] """
pass
def duration(self): # real signature unknown; restored from __doc__
""" QSequentialAnimationGroup.duration() -> int """
return 0
def event(self, QEvent): # real signature unknown; restored from __doc__
""" QSequentialAnimationGroup.event(QEvent) -> bool """
return False
def insertPause(self, p_int, p_int_1): # real signature unknown; restored from __doc__
""" QSequentialAnimationGroup.insertPause(int, int) -> QPauseAnimation """
return QPauseAnimation
def updateCurrentTime(self, p_int): # real signature unknown; restored from __doc__
""" QSequentialAnimationGroup.updateCurrentTime(int) """
pass
def updateDirection(self, QAbstractAnimation_Direction): # real signature unknown; restored from __doc__
""" QSequentialAnimationGroup.updateDirection(QAbstractAnimation.Direction) """
pass
def updateState(self, QAbstractAnimation_State, QAbstractAnimation_State_1): # real signature unknown; restored from __doc__
""" QSequentialAnimationGroup.updateState(QAbstractAnimation.State, QAbstractAnimation.State) """
pass
def __init__(self, QObject_parent=None): # real signature unknown; restored from __doc__
pass
|
kaihuang201/opinion | appopinion/CommentGenerator.py | Python | gpl-2.0 | 1,938 | 0.013416 |
"""
CommentGenerator.py
Author: Zefu Lu (zefulu2)
Description: This Module generates comments and add it to the database
Creation: 2014-11-4
"""
#===============================================================================
# import references
#===============================================================================
import sys
reload(sys)
# from DatabaseWrapper import DatabaseWrapper
from datetime import datetime
from dateutil.parser import parse
from pprint import pprint as pp
from random import randrange
import time
import os
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
os.environ['DJANGO_SETTINGS_MODULE']='opinion.settings'
from django.contrib.auth.models import User
from a | ppopinion.models import *
users = User.objects.all()
django.setup()
class CommentGenerator(object):
'''Constructor'''
def __init__(self, topic_id):
self.topic_id = topic_id;
#initialize database wrapper
#self.db = DatabaseWrapper(db_config_file)
def generate(self):
counter= 1
while(True):
item_dict = {}
item_dict['content'] = "Comment " + str(randrange(1000)) + " @ "+dat | etime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
item_dict['date'] = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
item_dict['parent_id'] = self.topic_id
# self.db.insertToTable('newapp_comment', item_dict)
topic = Topic.objects.get(pk=item_dict['parent_id'])
comment = Comment(
content=item_dict['content'],
parent=topic,
date=item_dict['date'],
)
comment.save()
pp(item_dict)
time.sleep(2)
counter+=1
def main():
generator = CommentGenerator(5)
generator.generate()
if __name__ == '__main__':
main()
|
jpoullet2000/cgs-apps | installForTest.py | Python | apache-2.0 | 4,453 | 0.005614 | #!/usr/bin/python
__author__ = 'CGS'
import os, shutil, sys, distutils.core, subprocess
# Some configuration needed for this file
apps_directory = ""
apps = {"variants": "apps/variants"}
PRODUCTION = False
# TODO: better management of errors
# Some basic checks
if os.getuid() != 0:
sys.exit("This program requires super user privileges.")
if len(sys.argv) <= 1:
sys.exit("Please, give the name of the app you want to install. Choose among the followings: " +
str(apps.keys()))
if sys.argv[0] != "installCGSapps.py" and "/" in sys.argv[0]:
# If the script was not launch in the current directory, we have to make some modifications
tmp = sys.argv[0].split("/")
script_name = tmp.pop()
app_directory_prefix = sys.argv[0].replace("/"+script_name,"/")
else:
app_directory_prefix = ""
# We take the folder where hue is installed
try:
hue_directory = subprocess.Popen("whereis hue", stdin=False, shell=True, stdout=subprocess.PIPE)
hue_directory = str(hue_directory.communicate()[0 | ]).split(" ")[2].strip()
except:
hue_directory = "/usr/lib/hue"
if not os.path.exists(hue_directory) and "HUE_DIRECTORY" in os.environ:
hue_directory = os.environ["HUE_DIRECTORY"]
if os.path.exists(hue_directory) and not os.path.exists(hue_directory+"/myapps"):
try:
os.makedirs(hue_directory+"/myapps")
except:
sys.exit("Impossible to create the folder 'myapps' in '"+hue_directory+"'.")
apps_directory = hue_directory + "/myapps"
# Some basic checks first
if no | t os.path.exists(hue_directory):
sys.exit("This installation file did not find the hue directory, please create a HUE_DIRECTORY environment"
"variable.")
# We install each application
aborted = 0
for i in xrange(1, len(sys.argv)):
app_name = sys.argv[i]
if not app_name in apps:
sys.exit("Invalid app name. Choose among the followings: "+str(apps.keys()))
if not os.path.exists(app_directory_prefix+apps[app_name]):
sys.exit("It seems the source of the app '"+app_name+"' is missing from the uncompressed zip.")
app_directory = apps_directory+"/"+app_name
"""
# We try to delete the eventual old folder
if os.path.exists(app_directory):
if PRODUCTION == True:
reinstall = raw_input("It seems the '"+app_name+"' already exists. Do you want to reinstall it [Y/n]?")
else:
reinstall = "Y"
if reinstall != "Y" and reinstall != "y":
print("Installation of '"+app_name+"' aborted.")
aborted += 1
continue
else:
try:
shutil.rmtree(app_directory)
except Exception as e:
print(e.message)
sys.exit("Impossible to delete the folder "+app_directory+". Check the access rights.")
# We create the app
# TODO: we do not catch correctly the errors of 'subprocess'
try:
print("Creating the app '"+app_name+"'...")
app_install = subprocess.Popen("cd " + apps_directory + " && " + hue_directory +
"/build/env/bin/hue create_desktop_app " + app_name,
stdin=False, shell=True, stdout=subprocess.PIPE)
app_install.communicate()
app_install = subprocess.Popen("cd " + apps_directory + " && python " + hue_directory +
"/tools/app_reg/app_reg.py --install " + app_name,
stdin=False, shell=True, stdout=subprocess.PIPE)
app_install.communicate()
except Exception as e:
print(e.message)
sys.exit("Error while creating the app...")
"""
# We copy the content of the application to the new directory
app_src = app_directory_prefix+apps[app_name]
try:
print("Copying source code to app folder...")
distutils.dir_util.copy_tree(app_src, app_directory)
except:
sys.exit("Impossible to copy data from '"+app_src+"' to '"+app_directory+"'.")
# We restart hue
try:
app_install = subprocess.Popen("service hue restart", stdin=False, shell=True, stdout=subprocess.PIPE)
app_install.communicate()
except Exception as e:
print(e.message)
sys.exit("Error while restarting hue.")
# The happy end
if aborted == 0:
print("Installation successful.")
elif aborted != len(sys.argv) - 1:
print("Installation of the 'non-aborted' apps successful.")
|
odoousers2014/LibrERP | account_due_list/reports/parser.py | Python | agpl-3.0 | 1,214 | 0.007414 | # -*- coding: utf-8 -*-
############################################################ | ##################
#
# Modulo realizzato da Andrea Cometa (info@andreacometa.it)
# Compatible with OpenERP release 6.1.X
# Copyright (C) 2012 Andrea Cometa. All Rights Reserved.
# Email: info@andreacometa.it
# Web site: http://www.andreacometa.it
#
##############################################################################
import time
from report import report_sxw
import inspect, os
from datetime import datetime
class account_due_list_webkit(report_sxw.rml | _parse):
def __init__(self, cr, uid, name, context):
super(account_due_list_webkit, self).__init__(cr, uid, name, context=context)
file_path = os.path.dirname(inspect.getfile(inspect.currentframe()))
self.localcontext.update({
'datetime': datetime,
'time': time,
'cr':cr,
'uid': uid,
'file_path':file_path,
})
report_sxw.report_sxw('report.account_due_list.scadenzario',
'account.move.line',
'account_due_list_extended/reports/scadenzario.mako',
parser=account_due_list_webkit)
|
MartinHjelmare/home-assistant | homeassistant/components/toon/binary_sensor.py | Python | apache-2.0 | 4,548 | 0 | """Support for Toon binary sensors."""
from datetime import timedelta
import logging
from typing import Any
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import (ToonEntity, ToonDisplayDeviceEntity, ToonBoilerDeviceEntity,
ToonBoilerModuleDeviceEntity)
from .const import DATA_TOON_CLIENT, DOMAIN
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
SCAN_INTERVAL = timedelta(seconds=300)
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry,
async_add_entities) -> None:
"""Set up a Toon binary sensor based on a config entry."""
toon = hass.data[DATA_TOON_CLIENT][entry.entry_id]
sensors = [
ToonBoilerModuleBinarySensor(toon, 'thermostat_info',
'boiler_connected', None,
'Boiler Module Connection',
'mdi:check-network-outline',
'connectivity'),
ToonDisplayBinarySensor(toon, 'thermostat_info', 'active_state', 4,
"Toon Holiday Mode", 'mdi:airport', None),
ToonDisplayBinarySensor(toon, 'thermostat_info', 'next_program', None,
"Toon Program", 'mdi:calendar-clock', None),
]
if toon.thermostat_info.have_ot_boiler:
sensors.extend([
ToonBoilerBinarySensor(toon, 'thermostat_info',
'ot_communication_error', '0',
"OpenTherm Connection",
'mdi:check-network-outline',
'connectivity'),
ToonBoilerBinarySensor(toon, 'thermostat_info', 'error_found', 255,
"Boiler Status", 'mdi:alert', 'problem',
inverted=True),
ToonBoilerBinarySensor(toon, 'thermostat_info', 'burner_info',
None, "Boiler Burner", 'mdi:fire', None),
ToonBoilerBinarySensor(toon, 'thermostat_info', 'burner_info', '2',
"Hot Tap Water", 'mdi:water-pump', None),
ToonBoilerBinarySensor(toon, 'thermostat_info', 'burner_info', '3',
"Boiler Preheating", 'mdi:fire', None),
| ])
async_add_entities(sensors)
class ToonBinarySensor(ToonEntity, BinarySensorDevice):
"""Defines an Toon binary sensor."""
def __init__(self, toon, section: str, measurement: str, on_value: Any,
| name: str, icon: str, device_class: str,
inverted: bool = False) -> None:
"""Initialize the Toon sensor."""
self._state = inverted
self._device_class = device_class
self.section = section
self.measurement = measurement
self.on_value = on_value
self.inverted = inverted
super().__init__(toon, name, icon)
@property
def unique_id(self) -> str:
"""Return the unique ID for this binary sensor."""
return '_'.join([DOMAIN, self.toon.agreement.id, 'binary_sensor',
self.section, self.measurement, str(self.on_value)])
@property
def device_class(self) -> str:
"""Return the device class."""
return self._device_class
@property
def is_on(self) -> bool:
"""Return the status of the binary sensor."""
if self.on_value is not None:
value = self._state == self.on_value
elif self._state is None:
value = False
else:
value = bool(max(0, int(self._state)))
if self.inverted:
return not value
return value
def update(self) -> None:
"""Get the latest data from the binary sensor."""
section = getattr(self.toon, self.section)
self._state = getattr(section, self.measurement)
class ToonBoilerBinarySensor(ToonBinarySensor, ToonBoilerDeviceEntity):
"""Defines a Boiler binary sensor."""
pass
class ToonDisplayBinarySensor(ToonBinarySensor, ToonDisplayDeviceEntity):
"""Defines a Toon Display binary sensor."""
pass
class ToonBoilerModuleBinarySensor(ToonBinarySensor,
ToonBoilerModuleDeviceEntity):
"""Defines a Boiler module binary sensor."""
pass
|
eduNEXT/edx-platform | lms/djangoapps/commerce/tests/test_views.py | Python | agpl-3.0 | 370 | 0 | """ Tests for commerce views. """
from common.djangoapps.student.tests.factories import UserFactory
class UserMixin:
""" Mixin for tests involving users. """
def s | etUp(self):
super().setUp()
self.user = UserFactory()
def _login(self):
""" Log into LMS. """
self.client.logi | n(username=self.user.username, password='test')
|
satyrius/cmsplugin-feedback | cmsplugin_feedback/signals.py | Python | mit | 1,157 | 0.000864 | import django.dispatch
from django.contrib.sites.models import get_current_site
from django.core.mail import mail_managers
from django.core.urlresolvers import reverse
from django.template import loader, Context
from . import settings
form_submited = django.dispatch.Signal()
def notify | _managers(sender, message, request, *args, **kwargs):
if settings.NOTIFY_MANAGERS:
mail_managers(
subject=settings.NOTIFY_SUBJECT,
message=render_email(message, request),
fail_silently=True)
form_submited.connect(notify_managers)
def get_admin_url(instance, request):
meta = instance._meta
model = hasattr('meta', 'model_name') and \
meta.model_name or meta.module_name
url_pattern = 'admin:{app}_{model}_change'.format(
app | =meta.app_label, model=model)
s = get_current_site(request)
return 'http://' + s.domain + reverse(url_pattern, args=[instance.pk])
def render_email(message, request):
t = loader.get_template('cms/plugins/feedback-email.html')
c = Context({
'message': message,
'url': get_admin_url(message, request),
})
return t.render(c)
|
nightlights/py_vn | src/cable.py | Python | gpl-3.0 | 415 | 0 | __author__ = "Liam R | igby"
__license__ = "GPLv3"
__version__ = "0.1 alpha"
__maintainer__ = "Liam Rigby"
__email__ = "lrig521@aucklanduni.ac.nz"
__status__ = "Development"
class FastEthernetCable:
def __init__(self):
self.max_length = 300
class GigabitEthernetCable:
def __init__(self):
self.max_length = 300
class ConsoleCable:
def __init__(self):
| self.max_length = 300
|
podcastquotes/podcastquotes | django_project/quotes_app/management/commands/rank_quotes.py | Python | agpl-3.0 | 387 | 0.007752 | from django.core.management.base import BaseCommand, CommandError
from quotes_app.tasks import | rank_all
import logging
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Runs reranking algorithms on the Quotes.'
def handle(self, *args, **options):
logger.info('Running {0} management task.'.format(__name__ | ))
rank_all()
|
tomashaber/raiden | raiden/tests/unit/test_channel.py | Python | mit | 32,853 | 0.000213 | # -*- coding: utf-8 -*-
# pylint: disable=too-many-locals,too-many-statements
from __future__ import division
import pytest
from ethereum import slogging
from raiden.channel import (
BalanceProof,
Channel,
ChannelEndState,
ChannelExternalState,
)
from raiden.exceptions import (
InsufficientBalance,
)
from raiden.messages import (
EMPTY_MERKLE_ROOT,
DirectTransfer,
Lock,
LockedTransfer,
Secret,
MediatedTransfer,
)
from raiden.mtree import Merkletree
from raiden.utils import sha3
from raiden.tests.utils.messages import make_mediated_transfer
from raiden.tests.utils.transfer import assert_synched_channels, channel
from raiden.tests.utils.factories import make_address, make_privkey_address
log = slogging.getLogger(__name__) # pylint: disable=invalid-name
class NettingChannelMock(object):
# pylint: disable=no-self-use
def __init__(self):
self.address = 'channeladdresschanne'
def opened(self):
return 1
def closed(self):
return 0
def make_external_state():
channel_for_hashlock = list()
netting_channel = NettingChannelMock()
external_state = ChannelExternalState(
lambda *args: channel_for_hashlock.append(args),
netting_channel,
)
return external_state
def test_end_state():
token_address = make_address()
privkey1, address1 = make_privkey_address()
address2 = make_address()
channel_address = make_address()
balance1 = 70
balance2 = 110
lock_secret = sha3('test_end_state')
lock_amount = 30
lock_expiration = 10
lock_hashlock = sha3(lock_secret)
state1 = ChannelEndState(address1, balance1, BalanceProof(None))
state2 = ChannelEndState(address2, balance2, BalanceProof(None))
assert state1.contract_balance == balance1
assert state2.contract_balance == balance2
assert state1.balance(state2) == balance1
assert state2.balance(state1) == balance2
assert state1.distributable(state2) == balance1
assert state2.distributable(state1) == balance2
assert state1.locked() == 0
assert state2.locked() == 0
assert state1.balance_proof.is_pending(lock_hashlock) is False
assert state2.balance_proof.is_pending(lock_hashlock) is False
assert state1.balance_proof.merkleroot_for_unclaimed() == EMPTY_MERKLE_ROOT
assert state2.balance_proof.merkleroot_for_unclaimed() == EMPTY_MERKLE_ROOT
assert state1.nonce is None
assert state2.nonce is None
lock = Lock(
lock_amount,
lock_expiration,
lock_hashlock,
)
lock_hash = sha3(lock.as_bytes)
transferred_amount = 0
locksroot = state2.compute_merkleroot_with(lock)
locked_transfer = LockedTransfer(
1,
nonce=1,
token=token_address,
channel=channel_address,
transferred_amount=transferred_amount,
recipient=state2.address,
locksroot=locksroot,
lock=lock,
)
transfer_target = make_address()
transfer_initiator = make_address()
fee = 0
mediated_transfer = locked_transfer.to_mediatedtransfer(
transfer_target,
transfer_initiator,
fee,
)
mediated_transfer.sign(privkey1, address1)
state1.register_locked_transfer(mediated_transfer)
assert state1.contract_balance == balance1
assert state2.contract_balance == balance2
assert state1.balance(state2) == balance1
assert state2.balance(state1) == balance2
assert state1.distributable(state2) == balance1 - lock_amount
assert state2.distributable(state1) == balance2
assert state1.locked() == lock_amount
assert state2.locked() == 0
assert state1.balance_proof.is_pending(lock_hashlock) is True
assert state2.balance_proof.is_pending(lock_hashlock) is False
assert state1.balance_proof.merkleroot_for_unclaimed() == lock_hash
assert state2.balance_proof.merkleroot_for_unclaimed() == EMPTY_MERKLE_ROOT
assert state1.nonce is 1
assert state2.nonce is None
with pytest.raises(ValueError):
state1.update_contract_balance(balance1 - 10)
state1.update_contract_balance(balance1 + 10)
assert state1.contract_balance == balance1 + 10
assert state2.contract_balance == balance2
assert state1.balance(state2) == balance1 + 10
assert state2.balance(state1) == balance2
assert state1.distributable(state2) == balance1 - lock_amount + 10
assert state2.distributable(state1) == balance2
assert state1.locked() == lock_amount
assert state2.locked() == 0
assert state1.balance_proof.is_pending(lock_hashlock) is True
assert state2.balance_proof.is_pending(lock_hashlock) is False
assert state1.balance_proof.merkleroot_for_unclaimed() == lock_hash
assert state2.balance_proof.merkleroot_for_unclaimed() == EMPTY_MERKLE_ROOT
assert state1.nonce is 1
assert state2.nonce is None
# registering the secret should not change the locked amount
state1.register_secret(lock_secret)
assert state1.contract_balance == balance1 + 10
assert state2.contract_balance == balance2
assert state1.balance(state2) == balance1 + 10
assert state2.balance(state1) == balance2
assert state1.distributable(state2) == balance1 - lock_amount + 10
assert state2.distributable(state1) == balance2
assert state1.locked() == lock_amount
assert state2.locked() == 0
assert state1.balance_proof.is_pending(lock_hashlock) is False
assert state2.balance_proof.is_pending(lock_hashlock) is False
assert state1.balance_proof.merkleroot_for_unclaimed() == lock_hash
assert state2.balance_proof.merkleroot_for_unclaimed() == EMPTY_MERKLE_ROOT
assert state1.nonce is 1
assert state2.nonce is None
secret_message = Secret(
identifier=1,
nonce=2,
channel=channel_address,
transferred_amount=transferred_amount + lock_amount,
locksroot=EMPTY_MERKLE_ROOT,
secret=lock_secret,
)
secret_message.sign(privkey1, address1)
state1.register_secretmessage(secret_message)
assert state1.contract_balance == balance1 + 10
assert state2.contract_balance == balance2
assert state1.balance(state2) == balance1 + 10 - lock_amount
assert state2.balance(state1) == balance2 + lock_amount
assert state1.distributable(state2) == balance1 + 10 - lock_amount
assert state2.distributable(state1) == balance2 + lock_amount
assert state1.locked() == 0
assert state2.locked() == 0
assert state1.balance_proof.is_pending(lock_hashlock) is False
assert state2.balance_proof.is_pending(lock_hashlock) is False
assert state1.balance_proof.merkleroot_for_unclaimed() == EMPTY_MERKLE_ROOT
assert state2.balance_proof.merkleroot_for_unclaimed() == EMPTY_MERKLE_ROOT
assert state1.nonce is 2
assert state2.nonce is None
def test_sender_cannot_overspend():
token_address = make_address()
privkey1, address1 = make_privkey_address()
address2 = make_address()
balance1 = 70
balance2 = 110
reveal_timeout = 5
settle_timeout = 15
block_number = 10
our_state = ChannelEndState(address1, balance1, BalanceProof(None))
partner_state = ChannelEndState(ad | dress2, balance2, BalanceProof(None))
external_state = make_external_state()
test_channel = Channel(
our_state,
partner_state,
external_state,
token_address,
reveal_timeout,
| settle_timeout,
)
amount = balance1
expiration = block_number + settle_timeout
sent_mediated_transfer0 = test_channel.create_mediatedtransfer(
address1,
address2,
fee=0,
amount=amount,
identifier=1,
expiration=expiration,
hashlock=sha3('test_locked_amount_cannot_be_spent'),
)
sent_mediated_transfer0.sign(privkey1, address1)
test_channel.register_transfer(
block_number,
sent_mediated_transfer0,
)
lock2 = Lock(
amount=amount,
expiration=expiration,
hashlock=sha3('test_locked_amount_cannot_be_spent2'),
)
locksroot2 = Merkletree([
sha3(sent_mediated_tra |
X-DataInitiative/tick | tick/hawkes/inference/hawkes_sumgaussians.py | Python | bsd-3-clause | 15,015 | 0.001399 | # License: BSD 3 clause
import math
import numpy as np
from scipy.stats import norm
from tick.hawkes.inference.base import LearnerHawkesNoParam
from tick.hawkes.inference.build.hawkes_inference import (HawkesSumGaussians as
_HawkesSumGaussians)
from tick.solver.base.utils import relative_distance
class HawkesSumGaussians(LearnerHawkesNoParam):
"""A class that implements parametric inference for Hawkes processes
with parametrisation of the kernels as sum of Gaussian basis functions
and a mix of Lasso and group-lasso regularization
Hawkes processes are point processes defined by the intensity:
.. math::
\\forall i \\in [1 \\dots D], \\quad
\\lambda_i(t) = \\mu_i + \\sum_{j=1}^D
\\sum_{t_k^j < t} \\phi_{ij}(t - t_k^j)
where
* :math:`D` is the number of nodes
* :math:`\mu_i` are the baseline intensities
* :math:`\phi_{ij}` are the kernels
* :math:`t_k^j` are the timestamps of all events of node :math:`j`
and with an parametrisation of the kernels as sum of Gaussian basis
functions
.. math::
\phi_{ij}(t) = \sum_{m=1}^M \\alpha^{ij}_m f (t - t_m), \\quad
f(t) = (2 \\pi \\sigma^2)^{-1} \exp(- t^2 / (2 \\sigma^2))
In our implementation we denote:
* Integer :math:`D` by the attribute `n_nodes`
* Vector :math:`\mu \in \mathbb{R}^{D}` by the attribute
`baseline`
* Vector :math:`(t_m) \in \mathbb{R}^{M}` by the variable
`means_gaussians`
* Number :math:`\\sigma` by the variable `std_gaussian`
* Tensor
:math:`A = (\\alpha^{ij}_m)_{ijm} \in \mathbb{R}^{D \\times D \\times M}`
by the attribute `amplitudes`
Parameters
----------
max_mean_gaussian : `float`
The mean of the last Gaussian basis function. This can be considered
a proxy of the kernel support.
n_gaussians : `int`
The number of Gaussian basis functions used to approximate each kernel.
step_size : `float`
The step-size used in the optimization for the EM algorithm.
C : `float`, default=1e3
Level of penalization
lasso_grouplasso_ratio : `float`, default=0.5
Ratio of Lasso-Nuclear regularization mixing parameter with
0 <= ratio <= 1.
* For ratio = 0 this is Group-Lasso regularization
* For ratio = 1 this is lasso (L1) regularization
* For 0 < ratio < 1, the regularization is a linear combination
of Lasso and Group-Lasso.
max_iter : `int`, default=50
Maximum number of iterations of the solving algorithm
tol : `float`, default=1e-5
The tolerance of the solving algorithm (iterations stop when the
stopping criterion is below it). If not reached it does ``max_iter``
iterations
n_threads : `int`, default=1
Number of threads used for parallel computation.
verbose : `bool`, default=False
If `True`, we verbose things
* if `int <= 0`: the number of physical cores available on the CPU
* otherwise the desired number of threads
print_every : `int`, default=10
Print history information when ``n_iter`` (iteration number) is
a multiple of ``print_every | ``
record_every : `int`, default=10
Record history information when ``n_iter`` (iteration number) is
a multiple of ``record_every``
Other Parameters
----------------
approx : `int`, default=0 (read-only)
Level of approximation used for computing exponential functions
* if 0: no approximation
* if 1: a fast approximated exponential function is us | ed
em_max_iter : `int`, default=30
Maximum number of loop for inner em algorithm.
em_tol : `float`, default=None
Tolerance of loop for inner em algorithm. If relative difference of
baseline and adjacency goes bellow this tolerance, em inner loop
will stop.
If None, it will be set given a heuristic which look at last
Attributes
----------
n_nodes : `int`
Number of nodes / components in the Hawkes model
baseline : `np.array`, shape=(n_nodes,)
Inferred baseline of each component's intensity
amplitudes : `np.ndarray`, shape=(n_nodes, n_nodes, n_gaussians)
Inferred adjacency matrix
means_gaussians : `np.array`, shape=(n_gaussians,)
The means of the Gaussian basis functions.
std_gaussian : `float`
The standard deviation of each Gaussian basis function.
References
----------
Xu, Farajtabar, and Zha (2016, June) in ICML,
`Learning Granger Causality for Hawkes Processes`_.
.. _Learning Granger Causality for Hawkes Processes: http://jmlr.org/proceedings/papers/v48/xuc16.pdf
"""
_attrinfos = {
"_learner": {
"writable": False
},
"_model": {
"writable": False
},
"n_gaussians": {
"cpp_setter": "set_n_gaussians"
},
"em_max_iter": {
"cpp_setter": "set_em_max_iters"
},
"max_mean_gaussian": {
"cpp_setter": "set_max_mean_gaussian"
},
"step_size": {
"cpp_setter": "set_step_size"
},
"baseline": {
"writable": False
},
"amplitudes": {
"writable": False
},
"approx": {
"writable": False
}
}
def __init__(self, max_mean_gaussian, n_gaussians=5, step_size=1e-7, C=1e3,
lasso_grouplasso_ratio=0.5, max_iter=50, tol=1e-5,
n_threads=1, verbose=False, print_every=10, record_every=10,
approx=0, em_max_iter=30, em_tol=None):
LearnerHawkesNoParam.__init__(
self, verbose=verbose, max_iter=max_iter, print_every=print_every,
tol=tol, n_threads=n_threads, record_every=record_every)
self.baseline = None
self.amplitudes = None
self.n_gaussians = n_gaussians
self.max_mean_gaussian = max_mean_gaussian
self.step_size = step_size
strength_lasso = lasso_grouplasso_ratio / C
strength_grouplasso = (1. - lasso_grouplasso_ratio) / C
self.em_max_iter = em_max_iter
self.em_tol = em_tol
self._learner = _HawkesSumGaussians(
n_gaussians, max_mean_gaussian, step_size, strength_lasso,
strength_grouplasso, em_max_iter, n_threads, approx)
self.verbose = verbose
self.history.print_order += ["rel_baseline", "rel_amplitudes"]
def fit(self, events, end_times=None, baseline_start=None,
amplitudes_start=None):
"""Fit the model according to the given training data.
Parameters
----------
events : `list` of `list` of `np.ndarray`
List of Hawkes processes realizations.
Each realization of the Hawkes process is a list of n_node for
each component of the Hawkes. Namely `events[i][j]` contains a
one-dimensional `numpy.array` of the events' timestamps of
component j of realization i.
If only one realization is given, it will be wrapped into a list
end_times : `np.ndarray` or `float`, default = None
List of end time of all hawkes processes that will be given to the
model. If None, it will be set to each realization's latest time.
If only one realization is provided, then a float can be given.
baseline_start : `None` or `np.ndarray`, shape=(n_nodes)
Set initial value of baseline parameter
If `None` starts with uniform 1 values
amplitudes_start : `None` or `np.ndarray`, shape=(n_nodes, n_nodes, n_gaussians)
Set initial value of amplitudes parameter
If `None` starts with random values uniformly sampled between 0.5
and 0.9`
"""
LearnerHawkesNoParam.fit(self, events, end_times=end_times)
self.solve(baseline_start=baseline_start,
amplitudes_start=amplitudes_start)
return self
|
robjwells/adventofcode-solutions | 2017/python/2017-01.py | Python | mit | 3,488 | 0 | #!/usr/bin/env python3
import pytest
def circ_pairs(sequence, distance=1):
"""Zip elements some distance apart in sequence and wrap around
For example:
[4, 5, 1] -> [(4, 5), (5, 1), (1, 4)]
"""
return zip(sequence, sequence[distance:] + sequence[:distance])
def int_to_sequence(integer):
"""Return a list of each digit in integer
For example:
1111 -> [1, 1, 1, 1]
451 -> [4, 5, 1]
"""
return [int(n) for n in str(integer)]
def matching_pairs(sequence):
"""Filter sequence so only pairs that are equal are returned
For example:
[(1, 2), (2, 3), (3, 1), (1, 1)] -> [(1, 1)]
[(4, 5), (5, 1), (1, 4)] -> []
"""
return filter(lambda t: t[0] == t[1], sequence)
def sum_matching_digits(pairs):
"""Sum the digits that match the second item of the pair
Note that matching digits are single counted, not double.
For example:
[(1, 1)] -> 1
[(1, 1), (2, 2)] -> 3
"""
return sum(a for a, b in pairs)
def total_nearby_digits_in_sequence(circular_number, digit_distance):
"""Sums all digits that match a digit a specified distance away
The sequence of digits (the circular_number) is considered to
be circular, so the comparison wraps around the rear to the
front of the sequence.
"""
seq = int_to_sequence(circular_number)
pairs = circ_pairs(seq, distance=digit_distance)
matching = matching_pairs(pairs)
return sum_matching_digits(matching)
def total_matching_neighbours(cir | cular_number):
"""Sums all digits that match the next digit in the sequence"""
return total_nearby_digits_in_sequence(circular_number, digit_distance=1)
def total_half_distant(circular_number):
"""Sums all digits that match the digit halfway distant in the sequence"""
return total_nearby_digits_in_sequence(
circular_number,
digit_distance=len(str(circular_number)) // 2)
@pytest.mark.parametrize('number,digit_distance,pairs', [
(451, 1, [(4, 5), (5 | , 1), (1, 4)]),
(1122, 1, [(1, 1), (1, 2), (2, 2), (2, 1)]),
(1122, 2, [(1, 2), (1, 2), (2, 1), (2, 1)]),
(1212, 1, [(1, 2), (2, 1), (1, 2), (2, 1)]),
(1212, 2, [(1, 1), (2, 2), (1, 1), (2, 2)]),
(123123, 1, [(1, 2), (2, 3), (3, 1), (1, 2), (2, 3), (3, 1)]),
(123123, 3, [(1, 1), (2, 2), (3, 3), (1, 1), (2, 2), (3, 3)]),
])
def test_circ_pairs(number, digit_distance, pairs):
"""circ_pairs pairs digits in number with digit specified distance away"""
sequence = int_to_sequence(number)
assert list(circ_pairs(sequence, distance=digit_distance)) == pairs
@pytest.mark.parametrize('circ_number,total', [
(1122, 3),
(1111, 4),
(1234, 0),
(91212129, 9),
])
def test_next(circ_number, total):
"""Test total_matching_neighbours against known input and output"""
assert total_matching_neighbours(circ_number) == total
@pytest.mark.parametrize('circ_number,total', [
(1212, 6),
(1221, 0),
(123425, 4),
(123123, 12),
(12131415, 4),
])
def test_half_distant(circ_number, total):
"""Test total_half_distant against known input and output"""
assert total_half_distant(circ_number) == total
def main(puzzle_input):
print('Part one:', total_matching_neighbours(puzzle_input))
print('Part two:', total_half_distant(puzzle_input))
if __name__ == '__main__':
with open('../input/2017-01.txt') as f:
puzzle_input = int(f.read())
main(puzzle_input)
|
wkschwartz/django | tests/auth_tests/settings.py | Python | bsd-3-clause | 587 | 0 | import os
AUTH_MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware', |
]
AUTH_TEMPLATES = [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.mess | ages',
],
},
}]
|
noba3/KoTos | addons/plugin.video.vox-now.de/resources/lib/kodion/utils/function_cache.py | Python | gpl-2.0 | 3,125 | 0.00096 | from functools import partial
import hashlib
import datetime
from storage import Storage
class FunctionCache(Storage):
ONE_MINUTE = 60
ONE_HOUR = 60 * ONE_MINUTE
ONE_DAY = 24 * ONE_HOUR
ONE_WEEK = 7 * ONE_DAY
ONE_MONTH = 4 * ONE_WEEK
def __init__(self, filename, max_file_size_kb=-1):
Storage.__init__(self, filename, max_file_size_kb=max_file_size_kb)
self._enabled = True
pass
def clear(self):
self._clear()
pass
def enabled(self):
"""
Enables the caching
:return:
"""
self._enabled = True
pass
def disable(self):
"""
Disable caching e.g. for tests
:return:
"""
self._enabled = False
pass
def _create_id_from_func(self, partial_func):
"""
Creats an id from the given function
:param partial_func:
:return: id for the given function
"""
m = hashlib.md5()
m.update(partial_func.func.__module__)
m.update(partial_func.func.__name__)
m.update(str(partial_func.args))
m.update(str(partial_func.keywords))
return m.hexdigest()
def _get_cached_data(self, partial_func):
cache_id = self._create_id_from_func(partial_func)
return self._get(cache_id), cache_id
def get_cached_only(self, func, *args, **keywords):
partial_func = partial(func, *args, **keywords)
# if caching is disabled call the function
if not self._enabled:
return partial_func()
# only return before cached data
data, cache_id = self._get_cached_data(partial_func)
if data is not None:
return data[0]
return None
def get(self, seconds, func, *args, **keywords):
def _seconds_difference(_first, _last):
_delta = _last - _first
return 24*60*60*_delta.days + _delta.seconds + _delta.microseconds/1000000.
"""
Returns the cached data of the given function.
:param partial_func: function to cache
:param seconds: time to live in seconds
:param return_cached_only: return only cached data and don't call the function
:return:
"""
partial_func = partial(func, *args, **keywords)
# if caching is disabled call the function
if not self._enabled:
return partial_func()
cached_data = None
cached_time = None
data, cache_id = self._get_cached_data(partial_func)
if data is not None:
cached_data = data[0]
| cached_time = data[1]
pass
now = datetime.datetime.now()
if cached_time is not None:
# this is so stupid, but we have the function 'total_seconds' only st | arting with python 2.7
diff_seconds = _seconds_difference(cached_time, now)
pass
if cached_data is None or diff_seconds > seconds:
cached_data = partial_func()
self._set(cache_id, cached_data)
pass
return cached_data
pass
|
wolverineav/horizon-bsn | horizon_bsn/bsndashboard/networktemplate/panel.py | Python | apache-2.0 | 817 | 0.001224 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.a | pache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from horizon_ | bsn.bsndashboard import dashboard
class Networktemplate(horizon.Panel):
name = _("Network Templates")
slug = "networktemplate"
dashboard.Bsndashboard.register(Networktemplate)
|
wiad/librenms | discovery-wrapper.py | Python | gpl-3.0 | 13,314 | 0.004431 | #! /usr/bin/env python2
"""
discovery-wrapper A small tool which wraps around discovery and tries to
guide the discovery process with a more modern approach with a
Queue and workers.
Based on the original version of poller-wrapper.py by Job Snijders
Author: Neil Lathwood <neil@librenms.org>
Date: Sep 2016
Usage: This program accepts one command line argument: the number of threads
that should run simultaneously. If no argument is given it will assume
a default of 1 thread.
Ubuntu Linux: apt-get install python-mysqldb
FreeBSD: cd /usr/ports/*/py-MySQLdb && make install clean
License: This program is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be u | sefu | l, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/.
LICENSE.txt contains a copy of the full GPLv3 licensing conditions.
"""
try:
import json
import os
import Queue
import subprocess
import sys
import threading
import time
except:
print "ERROR: missing one or more of the following python modules:"
print "threading, Queue, sys, subprocess, time, os, json"
sys.exit(2)
try:
import MySQLdb
except:
print "ERROR: missing the mysql python module:"
print "On ubuntu: apt-get install python-mysqldb"
print "On FreeBSD: cd /usr/ports/*/py-MySQLdb && make install clean"
sys.exit(2)
"""
Fetch configuration details from the config_to_json.php script
"""
install_dir = os.path.dirname(os.path.realpath(__file__))
config_file = install_dir + '/config.php'
def get_config_data():
config_cmd = ['/usr/bin/env', 'php', '%s/config_to_json.php' % install_dir]
try:
proc = subprocess.Popen(config_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
except:
print "ERROR: Could not execute: %s" % config_cmd
sys.exit(2)
return proc.communicate()[0]
try:
with open(config_file) as f:
pass
except IOError as e:
print "ERROR: Oh dear... %s does not seem readable" % config_file
sys.exit(2)
try:
config = json.loads(get_config_data())
except:
print "ERROR: Could not load or parse configuration, are PATHs correct?"
sys.exit(2)
discovery_path = config['install_dir'] + '/discovery.php'
db_username = config['db_user']
db_password = config['db_pass']
db_port = int(config['db_port'])
if config['db_socket']:
db_server = config['db_host']
db_socket = config['db_socket']
else:
db_server = config['db_host']
db_socket = None
db_dbname = config['db_name']
def db_open():
try:
if db_socket:
db = MySQLdb.connect(host=db_server, unix_socket=db_socket, user=db_username, passwd=db_password, db=db_dbname)
else:
db = MySQLdb.connect(host=db_server, port=db_port, user=db_username, passwd=db_password, db=db_dbname)
return db
except:
print "ERROR: Could not connect to MySQL database!"
sys.exit(2)
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC1
if 'distributed_poller_group' in config:
discovery_group = str(config['distributed_poller_group'])
else:
discovery_group = False
def memc_alive():
try:
global memc
key = str(uuid.uuid4())
memc.set('discovery.ping.' + key, key, 60)
if memc.get('discovery.ping.' + key) == key:
memc.delete('discovery.ping.' + key)
return True
else:
return False
except:
return False
def memc_touch(key, time):
try:
global memc
val = memc.get(key)
memc.set(key, val, time)
except:
pass
if ('distributed_poller' in config and
'distributed_poller_memcached_host' in config and
'distributed_poller_memcached_port' in config and
config['distributed_poller']):
try:
import memcache
import uuid
memc = memcache.Client([config['distributed_poller_memcached_host'] + ':' +
str(config['distributed_poller_memcached_port'])])
if str(memc.get("discovery.master")) == config['distributed_poller_name']:
print "This system is already joined as the discovery master."
sys.exit(2)
if memc_alive():
if memc.get("discovery.master") is None:
print "Registered as Master"
memc.set("discovery.master", config['distributed_poller_name'], 30)
memc.set("discovery.nodes", 0, 3600)
IsNode = False
else:
print "Registered as Node joining Master %s" % memc.get("discovery.master")
IsNode = True
memc.incr("discovery.nodes")
distdisco = True
else:
print "Could not connect to memcached, disabling distributed discovery."
distdisco = False
IsNode = False
except SystemExit:
raise
except ImportError:
print "ERROR: missing memcache python module:"
print "On deb systems: apt-get install python-memcache"
print "On other systems: easy_install python-memcached"
print "Disabling distributed discovery."
distdisco = False
else:
distdisco = False
# EOC1
s_time = time.time()
real_duration = 0
per_device_duration = {}
discovered_devices = 0
"""
Take the amount of threads we want to run in parallel from the commandline
if None are given or the argument was garbage, fall back to default of 16
"""
try:
amount_of_workers = int(sys.argv[1])
if amount_of_workers == 0:
print "ERROR: 0 threads is not a valid value"
sys.exit(2)
except:
amount_of_workers = 1
devices_list = []
"""
This query specificly orders the results depending on the last_discovered_timetaken variable
Because this way, we put the devices likely to be slow, in the top of the queue
thus greatening our chances of completing _all_ the work in exactly the time it takes to
discover the slowest device! cool stuff he
"""
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC2
if discovery_group is not False:
query = "select device_id from devices where poller_group IN(" + discovery_group + ") and disabled = 0 order by last_polled_timetaken desc"
else:
query = "select device_id from devices where disabled = 0 order by last_polled_timetaken desc"
# EOC2
db = db_open()
cursor = db.cursor()
cursor.execute(query)
devices = cursor.fetchall()
for row in devices:
devices_list.append(int(row[0]))
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC3
if distdisco and not IsNode:
query = "select max(device_id),min(device_id) from devices"
cursor.execute(query)
devices = cursor.fetchall()
maxlocks = devices[0][0]
minlocks = devices[0][1]
# EOC3
db.close()
"""
A seperate queue and a single worker for printing information to the screen prevents
the good old joke:
Some people, when confronted with a problem, think,
"I know, I'll use threads," and then they two they hav erpoblesms.
"""
def printworker():
nodeso = 0
while True:
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC4
global IsNode
global distdisco
if distdisco:
if not IsNode:
memc_touch('discovery.master', 30)
nodes = memc.get('discovery.nodes')
if nodes is None and not memc_alive():
print "WARNING: Lost Memcached. Taking over all devices. Nodes will |
GoogleCloudPlatform/datacatalog-connectors-hive | google-datacatalog-hive-connector/src/google/datacatalog_connectors/hive/scrape/metadata_sync_event_scraper.py | Python | apache-2.0 | 5,015 | 0 | #!/usr/bin/python
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.datacatalog_connectors.hive import entities
from google.datacatalog_connectors.hive.entities import sync_event
class MetadataSyncEventScraper:
@classmethod
def get_database_metadata(cls, message):
event = message.get('event')
if not event:
raise Exception('Message does not contain a event type')
if event not in [event.name for event in sync_event.SyncEvent]:
raise Exception('Unsupported event type: {}'.format(event))
if event == sync_event.SyncEvent.CREATE_TABLE.name:
return cls.__build_metadata_entities_for_create_table_event(
message)
if event == sync_event.SyncEvent.ALTER_TABLE.name:
return cls.__build_metadata_entities_for_update_table_event(
message)
if event == sync_event.SyncEvent.CREATE_DATABASE.name:
return cls.__build_metadata_entities_for_create_database_event(
message)
if event == sync_event.SyncEvent.DROP_TABLE.name:
return cls.__build_metadata_entities_for_drop_table_event(message)
if event == sync_event.SyncEvent.DROP_DATABASE.name:
ret | urn cls.__build_metadata_entities_for_drop_database_event(
message)
@classmethod
def __buil | d_metadata_entities_for_create_table_event(cls, message):
database, table = cls.__build_common_metadata_fields(message['table'])
table.table_params = []
tables = [table]
database.tables = tables
return {'databases': [database]}
@classmethod
def __build_metadata_entities_for_create_database_event(cls, message):
database, _ = cls.__build_database_fields(message['database'])
database.tables = []
return {'databases': [database]}
@classmethod
def __build_metadata_entities_for_drop_table_event(cls, message):
database, table = cls.__build_common_metadata_fields(message['table'])
table.table_params = []
tables = [table]
database.tables = tables
return {'databases': [database]}
@classmethod
def __build_metadata_entities_for_drop_database_event(cls, message):
database, _ = cls.__build_database_fields(message['database'])
database.tables = []
return {'databases': [database]}
@classmethod
def __build_metadata_entities_for_update_table_event(cls, message):
new_table = message['newTable']
database, table = cls.__build_common_metadata_fields(
message['newTable'])
parameters_message = new_table['parameters']
table_param = entities.TableParams()
table_param.id = 1
table_param.param_key = 'last_modified_time'
table_param.param_value = parameters_message['last_modified_time']
table.table_params = [table_param]
tables = [table]
database.tables = tables
return {'databases': [database]}
@classmethod
def __build_common_metadata_fields(cls, table_message):
database = entities.Database()
database.id = None
database.name = table_message['dbName']
table = entities.Table()
table.id = None
table.name = table_message['tableName']
table.type = 'table'
table.create_time = table_message['createTime']
table.database_id = None
table.sd_id = None
storage_message = table_message['sd']
table_storage = entities.TableStorage()
table_storage.sd_id = None
table_storage.location = storage_message['location']
table_storage.cd_id = None
cols_message = storage_message['cols']
columns = []
for col_message in cols_message:
column = entities.Column()
column.id = None
column.name = col_message['name']
column.type = col_message['type']
column.comment = col_message['comment']
columns.append(column)
table_storage.columns = columns
table_storages = [table_storage]
table.table_storages = table_storages
return database, table
@classmethod
def __build_database_fields(cls, database_message):
database = entities.Database()
database.id = None
database.name = database_message['name']
database.uri = database_message['locationUri']
return database, None
|
Cat5TV/nems-scripts | temper.py | Python | gpl-3.0 | 14,754 | 0.007794 | #!/usr/bin/env python3
# temper.py -*-python-*-
# Copyright 2018 by Pham Urwen (urwen@mail.ru)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# Standard python3 modules
import argparse
import binascii
import json
import os
import re
import select
import struct
import sys
# Non-standard modules
try:
import serial
except ImportError:
print('Cannot import "serial". Please sudo apt-get install python3-serial')
sys.exit(1)
class USBList(object):
'''Get a list of all of the USB devices on a system, along with their
associated hidraw or serial (tty) devices.
'''
SYSPATH = '/sys/bus/usb/devices'
def _readfile(self, path):
'''Read data from 'path' and return it as a string. Return the empty string
if the file does not exist, cannot be read, or has an error.
'''
try:
with open(path, 'r') as fp:
return fp.read().strip()
except:
return ''
def _find_devices(self, dirname):
'''Scan a directory hierarchy for names that start with "tty" or "hidraw".
Return these names in a set.
'''
devices = set()
for entry in os.scandir(dirname):
if entry.is_dir() and not entry.is_symlink():
devices |= self._find_devices(os.path.join(dirname, entry.name))
if re.search('tty.*[0-9]', entry.name):
devices.add(entry.name)
if re.search('hidraw[0-9]', entry.name):
devices.add(entry.name)
return devices
def _get_usb_device(self, dirname):
'''Examine the files in 'dirname', looking for files with well-known
names expected to be in the /sys hierarchy under Linux for USB devices.
Return a dictionary of the information gathered. If no information is found
(i.e., because the directory is not for a USB device) return None.
'''
info = dict()
vendorid = self._readfile(os.path.join(dirname, 'idVendor'))
if vendorid == '':
return None
info['vendorid'] = int(vendorid, 16)
productid = self._readfile(os.path.join(dirname, 'idProduct'))
info['productid'] = int(productid, 16)
info['manufacturer'] = self._readfile(os.path.join(dirname,
'manufacturer'))
info['product'] = self._readfile(os.path.join(dirname, 'product'))
info['busnum'] = int(self._readfile(os.path.join(dirname, 'busnum')))
info['devnum'] = int(self._readfile(os.path.join(dirname, 'devnum')))
info['devices'] = sorted(self._find_devices(dirname))
return info
def get_usb_devices(self):
'''Scan a well-known Linux hierarchy in /sys and try to find all of the
USB devices on a system. Return these as a dictionary indexed by the path.
'''
info = dict()
for entry in os.scandir(Temper.SYSPATH):
if entry.is_dir():
path = os.path.join(Temper.SYSPATH, entry.name)
device = self._get_usb_device(path)
if device is not None:
info[path] = device
return info
class USBRead(object):
'''Read temperature and/or humidity information from a specified USB device.
'''
def __init__(self, device, verbose=False):
self.device = device
self.verbose = verbose
def _parse_bytes(self, name, offset, divisor, bytes, info):
'''Data is returned from several devices in a similar format. In the first
8 bytes, the internal sensors are returned in bytes 2 and 3 (temperature)
and in bytes 4 and 5 (humidity). In the second 8 bytes, external sensor
information is returned. If there are only external sensors, then only 8
bytes are returned, and the caller is expected to use the correct 'name'.
The caller is also expected to detect the firmware version and provide the
appropriate divisor, which is usually 100 or 256.
There is no return value. Instead 'info[name]' is update directly, if a
value is found.
'''
try:
if bytes[offset] == 0x4e and bytes[offset+1] == 0x20:
return
except:
return
try:
info[name] = struct.unpack_from('>h', bytes, offset)[0] / divisor
except:
return
def _read_hidraw(self, device):
'''Using the Linux hidraw device, send the special commands and receive the
raw data. Then call '_parse_bytes' based on the firmware version to provide
temperature and humidity information.
A dictionary of temperature and humidity info is returned.
'''
path = os.path.join('/dev', device)
fd = os.open(path, os.O_RDWR)
| # Get firmware identifier
os.write(fd, struct.pack('8B', 0x01, 0x86, 0xff, 0x01, 0, 0, 0, 0))
firmware = b''
while True:
r, _, _ = select.select([fd], [], [], 0.1)
if fd not in r:
break
data = os.read(fd, 8)
firmware += data
if firmware == b'':
os.close(fd)
return { 'error' : 'Cannot read firmware identifier from device' }
if self.verbose:
print('Fi | rmware value: %s' % binascii.b2a_hex(firmware))
# Get temperature/humidity
os.write(fd, struct.pack('8B', 0x01, 0x80, 0x33, 0x01, 0, 0, 0, 0))
bytes = b''
while True:
r, _, _ = select.select([fd], [], [], 0.1)
if fd not in r:
break
data = os.read(fd, 8)
bytes += data
os.close(fd)
if self.verbose:
print('Data value: %s' % binascii.hexlify(bytes))
info = dict()
info['firmware'] = str(firmware, 'latin-1').strip()
info['hex_firmware'] = str(binascii.b2a_hex(firmware), 'latin-1')
info['hex_data'] = str(binascii.b2a_hex(bytes), 'latin-1')
if info['firmware'][:10] == 'TEMPerF1.4':
info['firmware'] = info['firmware'][:10]
self._parse_bytes('internal temperature', 2, 256.0, bytes, info)
return info
if info['firmware'][:15] == 'TEMPerGold_V3.1':
info['firmware'] = info['firmware'][:15]
self._parse_bytes('internal temperature', 2, 100.0, bytes, info)
return info
if info['firmware'][:12] in [ 'TEMPerX_V3.1', 'TEMPerX_V3.3' ]:
info['firmware'] = info['firmware'][:12]
self._parse_bytes('internal temperature', 2, 100.0, bytes, info)
self._parse_bytes('internal humidity', 4, 100.0, bytes, info)
self._parse_bytes('external temperature', 10, 100.0, bytes, info)
self._parse_bytes('external humidity', 12, 100.0, bytes, info)
return info
info['error'] = 'Unknown firmware %s: %s' % (info['firmware'],
binascii.hexlify(bytes))
return info
def _read_serial(self, device):
'''Using the Linux serial device, send the special commands and receive the
text data, which is parsed directly in this method.
A dictionary of device info (like that returned by USBList) combined with
temperature and humidity info is returned.
'''
path = os.path.join('/dev', device)
s = serial.Serial(path, 9600)
s.bytesize = serial.EIGHTBITS
s.parity = serial.PARITY_NONE
s.stopbits = serial.STOPBITS_ONE
s.timeout = 1
s.xonoff = False
s.rtscts = False
s.dsrdtr = False
s.writeTimeout = 0
# Send the "Version" command and save the reply.
s.write(b'Version')
firmware = str(s.readline(), 'latin-1').strip()
# Send the "Rea |
cblop/tropic | instal-linux/instal/firstprinciples/fluentchange/TestInitiateNormsPerms.py | Python | epl-1.0 | 1,549 | 0.005165 | from instal.firstprinciples.TestEngine import InstalSingleShotTestRunner, InstalTestCase
from instal.instalexceptions import InstalParserError
class InitiateNormsPerms(InstalTestCase):
def test_initiate_perms_institutional(self):
runner = InstalSingleShotTestRunner(input_files=["fluentchange/perminitiates.ial"], bridge_file=None,
| domain_files=[
"fluentchange/fluentchange.idc"],
fact_files=["fluentchange/fluentinitiates_permissions.iaf"])
in_a_true_condition = [
{"holdsat": ["holdsat(perm(ex_a(foo)),fluentchange)"]}]
self.assertEqual(runner.run_test(query_file="fluentchange/one_in_a.iaq", ver | bose=self.verbose,
conditions=in_a_true_condition), 0,
"A permission initiated by an institutional event")
def test_initiate_perms_exogenous(self):
runner = InstalSingleShotTestRunner(input_files=["fluentchange/perminitiates_exogenous.ial"], bridge_file=None,
domain_files=[
"fluentchange/fluentchange.idc"],
fact_files=["fluentchange/fluentinitiates_permissions.iaf"])
with self.assertRaises(InstalParserError):
runner.run_test(query_file="fluentchange/one_in_a.iaq",
verbose=self.verbose, conditions=[])
|
lbjworld/article-search | article-manager/site/employee/views.py | Python | mit | 526 | 0.003802 | # codin | g: utf-8
from rest_framework import generics
from rest_framework import filters
from employee.models import Operator
from employee.serializers import EmployeeDetailSerializer
class IsOwnerFilterBackend(filters.BaseFilterBackend | ):
def filter_queryset(self, request, queryset, view):
return queryset.filter(user=request.user)
class EmployeeDetail(generics.RetrieveAPIView):
queryset = Operator.objects.all()
serializer_class = EmployeeDetailSerializer
filter_backends = (IsOwnerFilterBackend,)
|
Frikanalen/frikanalen | packages/utils/prom-check-video-stream/snapshot.py | Python | lgpl-3.0 | 674 | 0.004451 | import asyncio
import aiohttp
class Snapshot:
filename = 'fk_test.ts'
async def update(self):
async with aiohttp.ClientSession() as session:
async with session.get('http://192.168.3.1:9094/frikanalen.ts') as resp:
with open(self.filename, 'wb') as fd:
while fd.tell() <= 10000000:
chunk = aw | ait resp.content.read(1024)
if not chunk:
break
fd.write(chunk)
if __name__ == '__main__':
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
result = loop.run_until_complete(Snap | shot.update())
|
burnpanck/traits | traits/tests/test_class_traits.py | Python | bsd-3-clause | 1,363 | 0 | """
Unit tests for the `HasTraits.class_traits` class function.
"""
from __future__ import absolute_import
from traits import _py2to3
from traits.testing.unittest_tools import unittest
from ..api import HasTraits, Int, List, Str
class A(HasTraits):
x = Int
name = Str(marked=True)
class B(A):
pass
class C(B):
lst = List(marked=False)
y = Int(marked=True)
class TestClassTraits(unittest.TestCase):
def test_all_class_traits(self):
expected = ['x', 'name', 'trait_added', 'trait_modified']
_py2to3.assertCountEqual(self, A.class_traits(), expected)
# Check that derived classes report the correct traits.
_py2to3.assertCountEqual(self, B.class_traits(), expected)
expected.extend(('lst', 'y'))
_py2to3.assertCountEqual(self, C.class_traits(), expected)
def test_class_traits_with_metadata(self):
# Retri | eve all traits that have the `marked` metadata
# attrib | ute set to True.
traits = C.class_traits(marked=True)
_py2to3.assertCountEqual(self, traits.keys(), ('y', 'name'))
# Retrieve all traits that have a `marked` metadata attribute,
# regardless of its value.
marked_traits = C.class_traits(marked=lambda attr: attr is not None)
_py2to3.assertCountEqual(self, marked_traits, ('y', 'name', 'lst'))
|
stackforge/blazar | blazar/tests/plugins/oshosts/test_physical_host_plugin.py | Python | apache-2.0 | 103,122 | 0 | # Copyright (c) 2013 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from unittest import mock
import ddt
from novaclient import client as nova_client
from novaclient import exceptions as nova_exceptions
from oslo_config import cfg
from oslo_config import fixture as conf_fixture
import testtools
from blazar import context
from blazar.db import api as db_api
from blazar.db import exceptions as db_exceptions
from blazar.db import utils as db_utils
from blazar.manager import exceptions as manager_exceptions
from blazar.manager import service
from blazar.plugins import oshosts as plugin
from blazar.plugins.oshosts import host_plugin
from blazar import tests
from blazar.utils.openstack import base
from blazar.utils.openstack import nova
from blazar.utils.openstack import placement
from blazar.utils import trusts
CONF = cfg.CONF
class AggregateFake(object):
def __init__(self, i, name, hosts):
self.id = i
self.name = name
self.hosts = hosts
class PhysicalHostPluginSetupOnlyTestCase(tests.TestCase):
def setUp(self):
super(PhysicalHostPluginSetupOnlyTestCase, self).setUp()
self.cfg = self.useFixture(conf_fixture.Config(CONF))
self.cfg.config(os_admin_username='fake-user')
self.cfg.config(os_admin_password='fake-passwd')
self.cfg.config(os_admin_user_domain_name='fake-user-domain')
self.cfg.config(os_admin_project_name='fake-pj-name')
self.cfg.config(os_admin_project_domain_name='fake-pj-domain')
self.context = context
self.patch(self.context, 'BlazarContext')
self.patch(base, 'url_for').return_value = 'http://foo.bar'
self.host_plugin = host_plugin
self.fake_phys_plugin = self.host_plugin.PhysicalHostPlugin()
self.nova = nova
self.rp_create = self.patch(self.nova.ReservationPool, 'create')
self.db_api = db_api
self.db_host_extra_capability_get_all_per_host = (
self.patch(self.db_api, 'host_extra_capability_get_all_per_host'))
def test_configuration(self):
self.assertEqual("fake-user", self.fake_phys_plugin.username)
self.assertEqual("fake-passwd", self.fake_phys_plugin.password)
self.assertEqual("fake-user-domain",
self.fake_phys_plugin.user_domain_name)
self.assertEqual("fake-pj-name", self.fake_phys_plugin.project_name)
self.assertEqual("fake-pj-domain",
self.fake_phys_plugin.project_domain_name)
def test__get_extra_capabilities_with_values(self):
self.db_host_extra_capability_get_all_per_host.return_value = [
{'id': 1,
'capability_name': 'foo',
'capability_value': 'bar',
'other': 'value',
'computehost_id': 1
},
{'id': 2,
'capability_name': 'buzz',
'capability_value': 'word',
'computehost_id': 1
}]
res = self.fake_phys_plugin._get_extra_capabilities(1)
self.assertEqual({'foo': 'bar', 'buzz': 'word'}, res)
def test__get_extra_capabilities_with_no_capabilities(self):
self.db_host_extra_capability_get_all_per_host.return_value = []
res = self.fake_phys_plugin._get_extra_capabilities(1)
self.assertEqual({}, res)
@ddt.ddt
class PhysicalHostPluginTestCase(tests.TestCase):
def setUp(self):
super(PhysicalHostPluginTestCase, self).setUp()
self.cfg = cfg
self.context = context
self.patch(self.context, 'BlazarContext')
self.nova_client = nova_client
self.nova_client = self.patch(self.nova_client, 'Client').return_value
self.service = service
self.manager = self.service.ManagerService()
self.fake_host_id = '1'
self.fake_host = {
'id': self.fake_host_id,
'hypervisor_hostname': 'hypvsr1',
'service_name': 'compute1',
'vcpus': 4,
'cpu_info': 'foo',
'hypervisor_type': 'xen',
'hypervisor_version': 1,
'memory_mb': 8192,
'local_gb': 10,
'trust_id': 'exxee111qwwwwe',
}
self.patch(base, 'url_for').return_value = 'http://foo.bar'
self.host_plugin = host_plugin
self.fake_phys_plugin = self.host_plugin.PhysicalHostPlugin()
self.db_api = db_api
self.db_utils = db_utils
self.db_host_get = self.patch(self.db_api, 'host_get')
self.db_host_get.return_value = self.fake_host
self.db_host_list = self.patch(self.db_api, 'host_list')
self.db_host_create = self.patch(self.db_api, 'host_create')
self.db_host_update = self.patch(self.db_api, 'host_update')
self.db_host_destroy = self.patch(self.db_api, 'host_destroy')
self.db_host_extra_capability_get_all_per_host = self.patch(
self.db | _api, 'host_extra_capability_get_all_per_host')
self.db_host_extra_capability_get_all_per_name = self.patch(
self.db_api, 'host_extra_capability_get_all_per_name')
self.db_host_extra_capability_create = self.patch(
| self.db_api, 'host_extra_capability_create')
self.db_host_extra_capability_update = self.patch(
self.db_api, 'host_extra_capability_update')
self.nova = nova
self.rp_create = self.patch(self.nova.ReservationPool, 'create')
self.patch(self.nova.ReservationPool, 'get_aggregate_from_name_or_id')
self.add_compute_host = self.patch(self.nova.ReservationPool,
'add_computehost')
self.remove_compute_host = self.patch(self.nova.ReservationPool,
'remove_computehost')
self.get_host_details = self.patch(self.nova.NovaInventory,
'get_host_details')
self.get_host_details.return_value = self.fake_host
self.get_servers_per_host = self.patch(
self.nova.NovaInventory, 'get_servers_per_host')
self.get_servers_per_host.return_value = None
self.get_extra_capabilities = self.patch(
self.fake_phys_plugin, '_get_extra_capabilities')
self.get_extra_capabilities.return_value = {
'foo': 'bar',
'buzz': 'word',
}
self.placement = placement
self.prov_create = self.patch(self.placement.BlazarPlacementClient,
'create_reservation_provider')
self.prov_create.return_value = {
"generation": 0,
"name": "blazar_foo",
"uuid": "7d2590ae-fb85-4080-9306-058b4c915e3f",
"parent_provider_uuid": "542df8ed-9be2-49b9-b4db-6d3183ff8ec8",
"root_provider_uuid": "542df8ed-9be2-49b9-b4db-6d3183ff8ec8"
}
self.prov_delete = self.patch(self.placement.BlazarPlacementClient,
'delete_reservation_provider')
self.fake_phys_plugin.setup(None)
self.trusts = trusts
self.trust_ctx = self.patch(self.trusts, 'create_ctx_from_trust')
self.trust_create = self.patch(self.trusts, 'create_trust')
self.ServerManager = nova.ServerManager
def test_get_host(self):
host = self.fake_phys_plugin.get_computehost(self.fake_host_id)
self.db_host_get.assert_called_once_with('1')
expected = self.fake_host.copy()
expected.update({'foo': 'bar', 'buzz': 'word'})
self.assertEqual(expected, host)
def test_get_host_without_extracapabilities(self):
self.get_e |
onto/sonata | sonata/library.py | Python | gpl-3.0 | 66,200 | 0.000785 | import os
import re
import gettext
import locale
import threading # libsearchfilter_toggle starts thread libsearchfilter_loop
import operator
import gtk
import gobject
import pango
import ui
import misc
import formatting
import mpdhelper as mpdh
from consts import consts
import breadcrumbs
def library_set_data(album=None, artist=None, genre=None, year=None,
path=None):
if album is not None:
album = unicode(album)
if artist is not None:
artist = unicode(artist)
if genre is not None:
genre = unicode(genre)
if year is not None:
year = unicode(year)
if path is not None:
path = unicode(path)
return (album, artist, genre, year, path)
def library_get_data(data, *args):
name_to_index = {'album': 0, 'artist': 1, 'genre': 2, 'year': 3, 'path': 4}
# Data retrieved from the gtktreeview model is not in
# unicode anymore, so convert it.
retlist = [unicode(data[name_to_index[arg]]) if data[name_to_index[arg]] \
else None for arg in args]
if len(retlist) == 1:
return retlist[0]
else:
return retlist
class Library(object):
def __init__(self, config, mpd, artwork, TAB_LIBRARY, album_filename,
settings_save, filtering_entry_make_red,
filtering_entry_revert_color, filter_key_pressed,
on_add_item, connected, on_library_button_press, new_tab,
get_multicd_album_root_dir):
self.artwork = artwork
self.config = config
self.mpd = mpd
self.librarymenu = None # cyclic dependency, set later
self.album_filename = album_filename
self.settings_save = settings_save
self.filtering_entry_make_red = filtering_entry_make_red
self.filtering_entry_revert_color = filtering_entry_revert_color
self.filter_key_pressed = filter_key_pressed
self.on_add_item = on_add_item
self.connected = connected
self.on_library_button_press = on_library_button_press
self.get_multicd_album_root_dir = get_multicd_album_root_dir
self.NOTAG = _("Untagged")
self.VAstr = _("Various Artists")
self.search_terms = [_('Artist'), _('Title'), _('Album'), _('Genre'),
_('Filename'), _('Everything')]
self.search_terms_mpd = ['artist', 'title', 'album', 'genre', 'file',
'any']
self.libfilterbox_cmd_buf = None
self.libfilterbox_cond = None
self.libfilterbox_source = None
self.prevlibtodo_base = None
self.prevlibtodo_base_results = None
self.prevlibtodo = None
self.save_timeout = None
self.libsearch_last_tooltip = None
self.lib_view_filesystem_cache = None
self.lib_view_artist_cache = None
self.lib_view_genre_cache = None
self.lib_view_album_cache = None
self.lib_list_genres = None
self.lib_list_artists = None
self.lib_list_albums = None
self.lib_list_years = None
self.view_caches_reset()
self.libraryvbox = gtk.VBox()
self.library = ui.treeview()
self.library_selection = self.library.get_selection()
self.breadcrumbs = breadcrumbs.CrumbBox()
self.breadcrumbs.props.spacing = 2
expanderwindow2 = ui.scrollwindow(add=self.library)
self.searchbox = gtk.HBox()
self.searchcombo = ui.combo(items=self.search_terms)
self.searchcombo.set_tooltip_text(_("Search terms"))
self.searchtext = ui.entry()
self.searchtext.set_tooltip_text(_("Search library"))
self.searchbutton = ui.button(img=ui.image(stock=gtk.STO | CK_CANCEL),
h=self.searchcombo.size_request()[1])
self.searchbutton.set_no_show_all(True)
self.searchbutton.hide()
self.searchbutton.set_tooltip_text(_("End Search"))
self.libraryview = ui.button(relief=gtk.RELIEF_NONE)
sel | f.libraryview.set_tooltip_text(_("Library browsing view"))
# disabled as breadcrumbs replace this:
# self.searchbox.pack_start(self.libraryview, False, False, 1)
# self.searchbox.pack_start(gtk.VSeparator(), False, False, 2)
self.searchbox.pack_start(ui.label(_("Search:")), False, False, 3)
self.searchbox.pack_start(self.searchtext, True, True, 2)
self.searchbox.pack_start(self.searchcombo, False, False, 2)
self.searchbox.pack_start(self.searchbutton, False, False, 2)
self.libraryvbox.pack_start(self.breadcrumbs, False, False, 2)
self.libraryvbox.pack_start(expanderwindow2, True, True)
self.libraryvbox.pack_start(self.searchbox, False, False, 2)
self.tab = new_tab(self.libraryvbox, gtk.STOCK_HARDDISK, TAB_LIBRARY,
self.library)
# Assign some pixbufs for use in self.library
self.openpb2 = self.library.render_icon(gtk.STOCK_OPEN,
gtk.ICON_SIZE_LARGE_TOOLBAR)
self.harddiskpb2 = self.library.render_icon(gtk.STOCK_HARDDISK,
gtk.ICON_SIZE_LARGE_TOOLBAR)
self.openpb = self.library.render_icon(gtk.STOCK_OPEN,
gtk.ICON_SIZE_MENU)
self.harddiskpb = self.library.render_icon(gtk.STOCK_HARDDISK,
gtk.ICON_SIZE_MENU)
self.albumpb = gtk.gdk.pixbuf_new_from_file_at_size(
album_filename, consts.LIB_COVER_SIZE, consts.LIB_COVER_SIZE)
self.genrepb = self.library.render_icon('gtk-orientation-portrait',
gtk.ICON_SIZE_LARGE_TOOLBAR)
self.artistpb = self.library.render_icon('artist',
gtk.ICON_SIZE_LARGE_TOOLBAR)
self.sonatapb = self.library.render_icon('sonata', gtk.ICON_SIZE_MENU)
# list of the library views: (id, name, icon name, label)
self.VIEWS = [
(consts.VIEW_FILESYSTEM, 'filesystem',
gtk.STOCK_HARDDISK, _("Filesystem")),
(consts.VIEW_ALBUM, 'album',
'album', _("Albums")),
(consts.VIEW_ARTIST, 'artist',
'artist', _("Artists")),
(consts.VIEW_GENRE, 'genre',
gtk.STOCK_ORIENTATION_PORTRAIT, _("Genres")),
]
self.library_view_assign_image()
self.library.connect('row_activated', self.on_library_row_activated)
self.library.connect('button_press_event',
self.on_library_button_press)
self.library.connect('key-press-event', self.on_library_key_press)
self.library.connect('query-tooltip', self.on_library_query_tooltip)
expanderwindow2.connect('scroll-event', self.on_library_scrolled)
self.libraryview.connect('clicked', self.library_view_popup)
self.searchtext.connect('key-press-event',
self.libsearchfilter_key_pressed)
self.searchtext.connect('activate', self.libsearchfilter_on_enter)
self.searchbutton.connect('clicked', self.on_search_end)
self.libfilter_changed_handler = self.searchtext.connect(
'changed', self.libsearchfilter_feed_loop)
searchcombo_changed_handler = self.searchcombo.connect(
'changed', self.on_library_search_combo_change)
# Initialize library data and widget
self.libraryposition = {}
self.libraryselectedpath = {}
self.searchcombo.handler_block(searchcombo_changed_handler)
self.searchcombo.set_active(self.config.last_search_num)
self.searchcombo.handler_unblock(searchcombo_changed_handler)
self.librarydata = gtk.ListStore(gtk.gdk.Pixbuf, gobject.TYPE_PYOBJECT,
str)
self.library.set_model(self.librarydata)
self.library.set_search_column(2)
self.librarycell = gtk.CellRendererText()
self.librarycell.set_property("ellipsize", pango.ELLIPSIZE_END)
self.libraryimg = gtk.CellRe |
MonoCloud/pyoanda | setup.py | Python | mit | 2,560 | 0 | from setuptools import setup, find_packages
from codecs import open
from os import path
import pyoanda
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='PyOanda',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=pyoanda.__version__,
description=long_description,
long_description=long_description,
url='https://github.com/toloco/pyoanda',
author='Tolo Palmer',
author_email='tolopalmer@gmail.com',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# '3 - Alpha',
# '4 - Beta',
'5 - Production/Stable',
# 'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Office/Business :: Financial :: Investment',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='oanda, wrapper',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find | _packages(exclude=['contrib', 'docs', 'tests*' | ]),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['requests'],
tests_require=['nose', 'coveralls', 'requests-mock'],
test_suite='nose.collector',
)
|
DadanielZ/incubator-eagle | eagle-external/hadoop_jmx_collector/lib/kafka-python/kafka/common.py | Python | apache-2.0 | 4,849 | 0.002269 | import inspect
import sys
from collections import namedtuple
###############
# Structs #
###############
# Request payloads
ProduceRequest = namedtuple("ProduceRequest",
["topic", "partition", "messages"])
FetchRequest = namedtuple("FetchRequest",
["topic", "partition", "offset", "max_bytes"])
OffsetRequest = namedtuple("OffsetRequest",
["topic", "partition", "time", "max_offsets"])
OffsetCommitRequest = namedtuple("OffsetCommitRequest",
["topic", "partition", "offset", "metadata"])
MetadataRequest = namedtuple("MetadataRequest",
["topics"])
OffsetFetchRequest = namedtuple("OffsetFetchRequest", ["topic", "partition"])
MetadataResponse = namedtuple("MetadataResponse",
["brokers", "topics"])
# Response payloads
ProduceResponse = namedtuple("ProduceResponse",
["topic", "partition", "error", "offset"])
FetchResponse = namedtuple("FetchResponse", ["topic", "partition", "error",
"highwaterMark", "messages"])
OffsetResponse = namedtuple("OffsetResponse",
["topic", "partition", "error", "offsets"])
OffsetCommitResponse = namedtuple("OffsetCommitResponse",
["topic", "partition", "error"])
OffsetFetchResponse = namedtuple("OffsetFetchResponse",
["topic", "partition", "offset",
"metadata", "error"])
# Other useful structs
BrokerMetadata = namedtuple("BrokerMetadata",
["nodeId", "host", "port"])
TopicMetadata = namedtuple("TopicMetadata",
["topic", "error", "partitions"])
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader", "replicas", "isr", "error"])
OffsetAndMessage = namedtuple("OffsetAndMessage",
["offset", "message"])
Message = namedtuple("Message",
["magic", "attributes", "key", "value"])
TopicAndPartition = namedtuple("TopicAndPartition",
["topic", "partition"])
KafkaMessage = namedtuple("KafkaMessage",
["topic", "partition", "offset", "key", "value"])
#################
# Exceptions #
#################
class KafkaError(RuntimeError):
pass
class BrokerResponseError(KafkaError):
pass
class UnknownError(BrokerResponseError):
errno = -1
message = 'UNKNOWN'
class OffsetOutOfRangeError(BrokerResponseError):
errno = 1
message = 'OFFSET_OUT_OF_RANGE'
class InvalidMessageError(BrokerResponseError):
errno = 2
message = 'INVALID_MESSAGE'
class UnknownTopicOrPartitionError(BrokerResponseError):
errno = 3
message = 'UNKNOWN_TOPIC_OR_PARTITON'
class InvalidFetchRequestError(BrokerResponseError):
errno = 4
message = 'INVALID_FETCH_SIZE'
class LeaderNotAvailableError(BrokerResponseError):
errno = 5
message = 'LEADER_NOT_AVAILABLE'
class NotLeaderForPartitionError(BrokerResponseError):
errno = 6
message = 'NOT_LEADER_FOR_PARTITION'
class RequestTimedOutError(BrokerResponseError):
errno = 7
message = 'REQUEST_TIMED_OUT'
class BrokerNotAvailableError(BrokerResponseError):
errno = 8
message = 'BROKER_NOT_AVAILABLE'
class ReplicaNotAvailableError(BrokerResponseError):
errno = 9
message = 'REPLICA_NOT_AVAILABLE'
class MessageSizeTooLargeError(BrokerResponseError):
errno = 10
message = 'MESSAGE_SIZE_TOO_LARGE'
class StaleControllerEpochError(BrokerResponseError):
errno = 11
message = 'STALE_CONTROLLER_EPOCH'
class OffsetMetadataTooLargeError(BrokerResponseError):
errno = 12
message = 'OFFSET_METADATA_TOO_LARGE'
class StaleLeaderEpochCodeError(BrokerResponseError):
errno = 13
message = 'STALE_LEADER_EPOCH_CODE'
class KafkaUnavailableError(KafkaError):
pass
class KafkaTimeoutError(KafkaError):
pass
class FailedPayloadsError(KafkaError):
pass
class ConnectionError(KafkaError):
pass
class BufferUnderflowError(KafkaError):
pass
class ChecksumError(KafkaError):
pass
class ConsumerFetchSizeTooSmall(KafkaError):
pass
class ConsumerNoMoreData(KafkaError):
pass
class ConsumerTimeout(KafkaError):
pass
class | ProtocolError(KafkaError):
pass
class UnsupportedCodecError(KafkaError):
pass
class KafkaConfigurationError(KafkaError):
pass
def _iter_broker_errors():
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, BrokerResponseError) and obj != BrokerRespo | nseError:
yield obj
kafka_errors = dict([(x.errno, x) for x in _iter_broker_errors()])
def check_error(response):
if response.error:
error_class = kafka_errors.get(response.error, UnknownError)
raise error_class(response)
|
Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/PIL/ImageChops.py | Python | gpl-3.0 | 44 | 0.022727 | ../../../../share | /pyshared/PIL/ImageChops.p | y |
nacc/autotest | client/tools/tapfd_helper.py | Python | gpl-2.0 | 1,866 | 0.001608 | #!/usr/bin/python
import sys, os, re, logging
import common
from autotest.client.shared import logging_manager
from autotest.client.virt import virt_utils
def destroy_tap(tapfd_list):
f | or tapfd in tapfd_list:
try:
os.close(tapfd)
# File descriptor is already closed
except OSError:
pass
if __name__ | == "__main__":
logging_manager.configure_logging(virt_utils.VirtLoggingConfig(),
verbose=True)
if len(sys.argv) <= 2:
logging.info("Usage: %s bridge_name qemu_command_line", sys.argv[0])
sys.exit(255)
brname = sys.argv[1]
cmd_line = ' '.join(sys.argv[2:])
if re.findall("-netdev\s", cmd_line):
# so we get the new qemu cli with netdev parameter.
tap_list_re = r"tap,id=(.*?),"
tap_replace_re = r"(tap,id=%s.*?,fd=)\d+"
else:
# the old cli contain "-net" parameter.
tap_list_re = r"tap,vlan=(\d+),"
tap_replace_re = r"(tap,vlan=%s,fd=)\d+"
tap_list = re.findall(tap_list_re, cmd_line)
if not tap_list:
print "Could not find tap device."
sys.exit(1)
tapfd_list = []
for tap in tap_list:
try:
ifname = "tap-%s" % tap
tapfd = virt_utils.open_tap("/dev/net/tun", ifname)
virt_utils.add_to_bridge(ifname, brname)
virt_utils.bring_up_ifname(ifname)
pattern = tap_replace_re % tap
cmd_line = re.sub(pattern, "\g<1>%s " % tapfd, cmd_line)
tapfd_list.append(tapfd)
except Exception, e:
destroy_tap(tapfd_list)
print "Error: %s" % e
sys.exit(2)
try:
# Run qemu command.
logging.info("TAP fd open to %s: %s", brname, tapfd_list)
os.system(cmd_line)
finally:
destroy_tap(tapfd_list)
|
garbas/mozilla-releng-services | src/shipit_taskcluster/tests/test_tc_utils.py | Python | mpl-2.0 | 912 | 0.002193 | # -*- coding: utf-8 -*-
import pytest
from unittest.mock import patch
import os
import json
from shipit_taskcluster.taskcluster_utils import get_queue_group_state
def mocked_listtaskgroup(task_group_id):
filename = os.path.join(os.path.dirname(__file__), 'testdata', task_group_id)
with open(filename, 'r') as f:
return json.loads(f.read())
@patch('shipit_taskcluster.taskcluster_utils.TC_QUEUE.listTaskGroup', new=mocked_listtaskgroup)
@pytest.mark.parametrize('task_group_id, result', (
(
'allcompletedid',
'completed',
),
(
'somependingid' | ,
'running',
),
(
'somefailedid',
'failed',
),
(
'someexceptionid',
'failed',
),
(
'badformatid',
'exception'
),
))
def test_get_queue_group_state(task_group_id, result):
assert get_queue_group_state(task_ | group_id) == result
|
heryandi/mnp | mnp/test/test_command.py | Python | mit | 6,569 | 0.00548 | import StringIO
import sys
import unittest
from mock import patch
from mnp.command import *
# For tests needing subprocess
class SubprocessTest(unittest.TestCase):
def setUp(self):
patcher1 = patch("subprocess.check_call")
self.addCleanup(patcher1.stop)
self.mockSubprocess_check_call = patcher1.start()
class DownloadTest(SubprocessTest):
def test_download_one(self):
download(["package1"], "indexUrl1")
self.mockSubprocess_check_call.assert_called_once_with(["pip", "install", "package1", "--extra-index-url", "indexUrl1"])
def test_download_multiple(self):
download(["package1", "package2", "package3"], "indexUrl1")
self.mockSubprocess_check_call.assert_called_once_with(["pip", "install", "package1", "package2", "package3", "--extra-index-url", "indexUrl1"])
def test_download_indexUrl2(self):
download(["package1"], "indexUrl2")
self.mockSubprocess_check_call.assert_called_once_with(["pip", "install", "package1", "--extra-index-url", "indexUrl2"])
def test_download_additional_args(self):
download(["package1"], "indexUrl1", ["-v", "-v", "-v"])
self.mockSubprocess_check_call.assert_called_once_with(["pip", "install", "package1", "--extra-index-url", "indexUrl1", "-v", "-v", "-v"])
class NormalUploadTest(SubprocessTest):
def test_normal_upload_simple(self):
normal_upload("default")
self.mockSubprocess_check_call.assert_called_once_with(["python", "setup.py", "register", "-r", "default", "sdist", "upload", "-r", "default"])
def test_normal_upload_additional_args(self):
normal_upload("default", ["anything", "everything"])
self.mockSubprocess_check_call.assert_called_once_with(["python", "setup.py", "register", "-r", "default", "sdist", "upload", "-r", "default", "anything", "everything"])
class GitHubUploadTest(SubprocessTest):
def test_github_upload_simple(self):
github_upload("default")
self.mockSubprocess_check_call.assert_called_once_with(["python", "setup.py", "github_register", "-r", "default", "sdist", "github_upload", "-r", "default"])
def test_github_upload_additional_args(self):
github_upload("default", ["anything", "everything"])
self.mockSubprocess_check_call.assert_called_once_with(["python", "setup.py", "github_register", "-r", "default", "sdist", "github_upload", "-r", "default", "anything", "everything"])
# For tests needing xmlrpclib
class XmlRpcTest(unittest.TestCase):
def setUp(self):
# print method for debugging
self.originalStdout = sys.stdout
self._print = lambda x: self.originalStdout.write(str(x) + "\n")
patcher1 = patch("xmlrpclib.ServerProxy")
self.addCleanup(patcher1.stop)
self.mockServerProxy = patcher1.start()
patcher2 = patch("sys.stdout", new_callable = StringIO.StringIO)
self.addCleanup(patcher2.stop)
self.stringIO = patcher2.start()
class ListPackageTest(XmlRpcTest):
def setUp(self):
super(ListPackageTest, self).setUp()
self.list_packages_return_value = ["package1", "package2", "package3"]
self.mockServerProxy.return_value.list_packages.return_value = self.list_packages_return_value
def test_list(self):
list_packages("indexUrl1")
self.mockServerProxy.assert_called_once_with("indexUrl1")
self.mockServerProxy.return_value.list_packages.assert_called_once_with()
self.assertEquals(self.stringIO.getvalue(), "\n".join(self.list_packages_return_value) + "\n")
class SearchPackageTest(XmlRpcTest):
def setUp(self):
super(SearchPackageTest, self).setUp()
self.search_return_value = [
{"name": "package1", "version": "1.0.0", "summary": "summary1"},
{"name": "package1", "version": "1.1.0", "summary": "summary1"},
{"name": "package2", "version": "1.0.0", "summary": "summary2"},
]
self.mockServerProxy.return_value.search.return_value = self.search_return_value
def test_search(self):
search("indexUrl1", "query1")
self.mockServerProxy.assert_called_once_with("indexUrl1")
self.mockServerProxy.return_value.search.assert_called_once_with({"name": "query1", "summary": "q | uery1"}, "or")
newest_packages = [
{"name": "package1", "version": "1.1.0", "summary": "summary1"},
{"name": "package2", "version": "1.0.0", "summary": "summary2"},
]
expected_string = "\n".join("- ".join([p["name"].ljust(20), p["summary"]]) for p in newest_packages)
self.assertEquals(self.stringIO.getvalue(), expected_string + "\n")
c | lass DocsTest(XmlRpcTest):
def setUp(self):
super(DocsTest, self).setUp()
self.docs_return_value = "Some String"
self.mockServerProxy.return_value.docs.return_value = self.docs_return_value
def test_docs(self):
docs("indexUrl1", "package1")
self.mockServerProxy.assert_called_once_with("indexUrl1")
self.mockServerProxy.return_value.docs.assert_called_once_with("package1")
self.assertEquals(self.stringIO.getvalue(), self.docs_return_value + "\n")
class InfoTest(XmlRpcTest):
def setUp(self):
super(InfoTest, self).setUp()
self.info_return_value = {
"name": "name",
"version": "version",
"summary": "summary",
"author": "author",
"home_page": "home_page",
"project_page": "project_page"
}
self.mockServerProxy.return_value.info.return_value = self.info_return_value
def test_info(self):
info("indexUrl1", "package1", "version1")
self.mockServerProxy.assert_called_once_with("indexUrl1")
self.mockServerProxy.return_value.info.assert_called_once_with("package1", "version1")
expected_string = "\n".join(["Name: " + self.info_return_value["name"],
"Version: " + self.info_return_value["version"],
"Summary: " + self.info_return_value["summary"],
"Author: " + self.info_return_value["author"],
"Home page: " + self.info_return_value["home_page"],
"Project page: " + self.info_return_value["project_page"]])
self.assertEquals(self.stringIO.getvalue(), expected_string + "\n")
def test_info_no_version(self):
info("indexUrl1", "package1")
self.mockServerProxy.assert_called_once_with("indexUrl1")
self.mockServerProxy.return_value.info.assert_called_once_with("package1", -1)
|
Apostol3/race_env_editor | map.py | Python | mit | 1,833 | 0.001091 | import json
import math
__author__ = 'apostol3'
class Map:
def __init__(self, w, h):
self.max_time = 120
self.size = (w, h)
self.walls = []
self.headline = []
self.cars = []
self.finish = []
self.objects = []
self.car_size = (1.8/2, 4.6/2)
def start_new_wall(self):
self.walls.append([])
def append_wall_point(self, x, y):
if x > self.size[0] or y > self.size[1]:
self.start_new_wall()
return
self.walls[-1].append((x, y))
def append_headline_point(self, x, y):
if x > self.size[0] or y > self.size[1]:
return
self.headline.append((x, y))
|
def create_car(self, x, y):
self.cars.append((x, y, 3 * math.pi / 2))
def append_finish_ | point(self, x, y):
if x > self.size[0] or y > self.size[1]:
self.finish.clear()
if len(self.finish) < 2:
self.finish.append((x, y))
else:
self.finish = [(x, y)]
@staticmethod
def open_from_file(file):
f = open(file, 'r')
doc = json.load(f)
f.close()
size = doc['size']
map = Map(*size)
map.max_time = doc['max_time']
map.walls = doc['walls']
map.finish = doc['finish']
map.headline = doc['headline']
map.cars = doc['cars']
return map
def save_to_file(self, file):
filename = open(file, 'w')
doc = {'size': self.size, 'max_time': self.max_time, 'finish': self.finish,
'walls': self.walls, 'headline': self.headline, 'cars': self.cars}
if len(doc['walls']) != 0 and len(doc['walls'][-1]) == 0:
doc['walls'].pop()
out_inf = json.dumps(doc, indent=4)
filename.write(out_inf)
filename.close()
|
jacobbieker/Insights | insights/google/GCalendar2SQLite.py | Python | gpl-2.0 | 4,019 | 0.001244 | '''
Copyright (C) 2015 Jacob Bieker, jacob@bieker.us, www.jacobbieker.com
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''
__author__ = 'Jacob Bieker'
import os
from glob import glob
from insights.config.databaseSetup import Calendars
from insights.io import config
# Have to do this because when the command is called from the import in any subfolder it cannot find the dbconfig
if __name__ != "__main__":
configuration_files = config.import_yaml_files(".", ["constants"])
constants = configuration_files[0]
else:
configuration_files = config.import_yaml_files("..", ["cons | tants"])
constants = configuration_files[0]
def process_calendar(calendar) | :
with open(calendar, "r", encoding="latin-1") as source:
# Dictionary of values to insert later
event_data = []
which_calendar = os.path.basename(calendar)
which_calendar = which_calendar.replace(".ics", "")
print(which_calendar)
for line in source:
components_temp = line.split(":")
components = []
for component in components_temp:
component = component.replace("\n", "")
components.append(component)
type_of_data = components[0].split(";")
if type_of_data[0] == "BEGIN" and components[1] == "VEVENT":
event_data.append({'which_calendar': which_calendar})
event_data.append({'is_task': False})
# Start of new record
print("Begin new event")
elif type_of_data[0] == "DTSTART":
event_data.append({'start_date': components[1]})
elif type_of_data[0] == "DTEND":
event_data.append({'end_date': components[1]})
elif type_of_data[0] == "DESCRIPTION":
event_data.append({'description': components[1]})
elif type_of_data[0] == "LOCATION":
event_data.append({'location': components[1]})
elif type_of_data[0] == "SUMMARY":
event_data.append({'name': components[1]})
elif type_of_data[0] == "END" and components[1] == "VEVENT":
all_event_data = {}
for item in event_data:
all_event_data.update(item)
# Insert Contact into database
Calendars.insert(is_task=all_event_data.get('is_task'),
name=all_event_data.get('name'),
start_date=all_event_data.get('start_date'),
end_date=all_event_data.get('end_date'),
location=all_event_data.get('location'),
description=all_event_data.get('description'),
type=all_event_data.get('which_calendar'),
).execute()
print("Inserted Event")
# Reset data
del event_data[:]
#pool = Pool(processes=2)
calendars_list = []
rootdir = os.path.join(constants.get("dataDir"), "Takeout", "Calendar")
calendars = [y for x in os.walk(rootdir) for y in glob(os.path.join(x[0], '*.ics'))]
print("Starting Google Calendar Parsing")
for calendar in calendars:
calendars_list.append(calendar)
process_calendar(calendar)
#pool.imap_unordered(process_calendar, calendars_list)
|
martbhell/wasthereannhlgamelastnight | src/lib/pyasn1/codec/cer/encoder.py | Python | mit | 9,409 | 0.001275 | #
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pyasn1/license.html
#
from pyasn1 import error
from pyasn1.codec.ber import encoder
from pyasn1.compat.octets import str2octs, null
from pyasn1.type import univ
from pyasn1.type import useful
__all__ = ['encode']
class BooleanEncoder(encoder.IntegerEncoder):
def encodeValue(self, value, asn1Spec, encodeFun, **options):
if value == 0:
substrate = (0,)
else:
substrate = (255,)
return substrate, False, False
class RealEncoder(encoder.RealEncoder):
def _chooseEncBase(self, value):
m, b, e = value
return self._dropFloatingPoint(m, b, e)
# specialized GeneralStringEncoder here
class TimeEncoderMixIn(object):
Z_CHAR = ord('Z')
PLUS_CHAR = ord('+')
MINUS_CHAR = ord('-')
COMMA_CHAR = ord(',')
DOT_CHAR = ord('.')
ZERO_CHAR = ord('0')
MIN_LENGTH = 12
MAX_LENGTH = 19
def encodeValue(self, value, asn1Spec, encodeFun, **options):
# CER encoding constraints:
# - minutes are mandatory, seconds are optional
# - sub-seconds must NOT be zero / no meaningless zeros
# - no hanging fraction dot
# - time in UTC (Z)
# - only dot is allowed for fractions
if asn1Spec is not None:
| value | = asn1Spec.clone(value)
numbers = value.asNumbers()
if self.PLUS_CHAR in numbers or self.MINUS_CHAR in numbers:
raise error.PyAsn1Error('Must be UTC time: %r' % value)
if numbers[-1] != self.Z_CHAR:
raise error.PyAsn1Error('Missing "Z" time zone specifier: %r' % value)
if self.COMMA_CHAR in numbers:
raise error.PyAsn1Error('Comma in fractions disallowed: %r' % value)
if self.DOT_CHAR in numbers:
isModified = False
numbers = list(numbers)
searchIndex = min(numbers.index(self.DOT_CHAR) + 4, len(numbers) - 1)
while numbers[searchIndex] != self.DOT_CHAR:
if numbers[searchIndex] == self.ZERO_CHAR:
del numbers[searchIndex]
isModified = True
searchIndex -= 1
searchIndex += 1
if searchIndex < len(numbers):
if numbers[searchIndex] == self.Z_CHAR:
# drop hanging comma
del numbers[searchIndex - 1]
isModified = True
if isModified:
value = value.clone(numbers)
if not self.MIN_LENGTH < len(numbers) < self.MAX_LENGTH:
raise error.PyAsn1Error('Length constraint violated: %r' % value)
options.update(maxChunkSize=1000)
return encoder.OctetStringEncoder.encodeValue(
self, value, asn1Spec, encodeFun, **options
)
class GeneralizedTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
MIN_LENGTH = 12
MAX_LENGTH = 20
class UTCTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
MIN_LENGTH = 10
MAX_LENGTH = 14
class SetOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, value, asn1Spec, encodeFun, **options):
chunks = self._encodeComponents(
value, asn1Spec, encodeFun, **options)
# sort by serialised and padded components
if len(chunks) > 1:
zero = str2octs('\x00')
maxLen = max(map(len, chunks))
paddedChunks = [
(x.ljust(maxLen, zero), x) for x in chunks
]
paddedChunks.sort(key=lambda x: x[0])
chunks = [x[1] for x in paddedChunks]
return null.join(chunks), True, True
class SequenceOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, value, asn1Spec, encodeFun, **options):
if options.get('ifNotEmpty', False) and not len(value):
return null, True, True
chunks = self._encodeComponents(
value, asn1Spec, encodeFun, **options)
return null.join(chunks), True, True
class SetEncoder(encoder.SequenceEncoder):
@staticmethod
def _componentSortKey(componentAndType):
"""Sort SET components by tag
Sort regardless of the Choice value (static sort)
"""
component, asn1Spec = componentAndType
if asn1Spec is None:
asn1Spec = component
if asn1Spec.typeId == univ.Choice.typeId and not asn1Spec.tagSet:
if asn1Spec.tagSet:
return asn1Spec.tagSet
else:
return asn1Spec.componentType.minTagSet
else:
return asn1Spec.tagSet
def encodeValue(self, value, asn1Spec, encodeFun, **options):
substrate = null
comps = []
compsMap = {}
if asn1Spec is None:
# instance of ASN.1 schema
inconsistency = value.isInconsistent
if inconsistency:
raise inconsistency
namedTypes = value.componentType
for idx, component in enumerate(value.values()):
if namedTypes:
namedType = namedTypes[idx]
if namedType.isOptional and not component.isValue:
continue
if namedType.isDefaulted and component == namedType.asn1Object:
continue
compsMap[id(component)] = namedType
else:
compsMap[id(component)] = None
comps.append((component, asn1Spec))
else:
# bare Python value + ASN.1 schema
for idx, namedType in enumerate(asn1Spec.componentType.namedTypes):
try:
component = value[namedType.name]
except KeyError:
raise error.PyAsn1Error('Component name "%s" not found in %r' % (namedType.name, value))
if namedType.isOptional and namedType.name not in value:
continue
if namedType.isDefaulted and component == namedType.asn1Object:
continue
compsMap[id(component)] = namedType
comps.append((component, asn1Spec[idx]))
for comp, compType in sorted(comps, key=self._componentSortKey):
namedType = compsMap[id(comp)]
if namedType:
options.update(ifNotEmpty=namedType.isOptional)
chunk = encodeFun(comp, compType, **options)
# wrap open type blob if needed
if namedType and namedType.openType:
wrapType = namedType.asn1Object
if wrapType.tagSet and not wrapType.isSameTypeWith(comp):
chunk = encodeFun(chunk, wrapType, **options)
substrate += chunk
return substrate, True, True
class SequenceEncoder(encoder.SequenceEncoder):
omitEmptyOptionals = True
tagMap = encoder.tagMap.copy()
tagMap.update({
univ.Boolean.tagSet: BooleanEncoder(),
univ.Real.tagSet: RealEncoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
useful.UTCTime.tagSet: UTCTimeEncoder(),
# Sequence & Set have same tags as SequenceOf & SetOf
univ.SetOf.tagSet: SetOfEncoder(),
univ.Sequence.typeId: SequenceEncoder()
})
typeMap = encoder.typeMap.copy()
typeMap.update({
univ.Boolean.typeId: BooleanEncoder(),
univ.Real.typeId: RealEncoder(),
useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(),
useful.UTCTime.typeId: UTCTimeEncoder(),
# Sequence & Set have same tags as SequenceOf & SetOf
univ.Set.typeId: SetEncoder(),
univ.SetOf.typeId: SetOfEncoder(),
univ.Sequence.typeId: SequenceEncoder(),
univ.SequenceOf.typeId: SequenceOfEncoder()
})
class Encoder(encoder.Encoder):
fixedDefLengthMode = False
fixedChunkSize = 1000
#: Turns ASN.1 object into CER octet stream.
#:
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: walks all its components recursively and produces a CER octet stream.
#:
#: Parameters
#: ----------
|
kyle8998/Practice-Coding-Questions | CTCI/Chapter3/3.3-Stack_Of_Plates.py | Python | unlicense | 1,838 | 0.005985 | # CTCI 3.1
# Three in One
import unittest
class MultiStack():
def __init__(self, capacity):
self.capacity = capacity
self.stacks = []
def push(self, item):
if len(self.stacks) and (len(self.stacks[-1]) < self.capacity):
self.stacks[-1].append(item)
else:
self.stacks.append([item])
def pop(self):
while len(self.stacks) and (len(self.stacks[-1]) == 0):
self.stacks.pop()
if len(self.stacks) == 0:
return None
item = self.stacks[-1].pop()
if len(self.stacks[-1]) == 0:
self.stacks.pop()
return item
def pop_at(self, stack_number):
if (stack_number < 0) or (len(self.stacks) <= stack_number):
return None
if len(self.stacks[stack_number]) == 0:
return None
return self.stacks[stack_number].pop()
#-------------------------------------------------------------------------------
# CTCI Solution
#-------------------------------------------------------------------------------
#Testing
class Test(unittest.TestCase):
def test_multi_stack(self):
stack = MultiStack(3)
stack.push(11)
stack.push( | 22)
stack.push(33)
stack.push(44)
stack.push(55)
stack.push(66)
stack.push(77)
stack.push(88)
self.assertEqual(stack.pop(), 88)
self.assertEqual(stack.pop_at(1), 66)
self.assertEqual(stack.pop_at(0), 33)
self.assertEqual(stack.pop_at(1), 55)
se | lf.assertEqual(stack.pop_at(1), 44)
self.assertEqual(stack.pop_at(1), None)
stack.push(99)
self.assertEqual(stack.pop(), 99)
self.assertEqual(stack.pop(), 77)
self.assertEqual(stack.pop(), 22)
self.assertEqual(stack.pop(), 11)
self.assertEqual(stack.pop(), None)
if __name__ == "__main__":
unittest.main()
|
Symmetry-Innovations-Pty-Ltd/Python-2.7-for-QNX6.5.0-x86 | usr/pkg/lib/python2.7/distutils/unixccompiler.py | Python | mit | 14,430 | 0.00194 | """distutils.unixccompiler
Contains the UnixCCompiler class, a subclass of CCompiler that handles
the "typical" Unix-style command-line C compiler:
* macros defined with -Dname[=value]
* macros undefined with -Uname
* include search directories specified with -Idir
* libraries specified with -lllib
* library search directories specified with -Ldir
* compile handled by 'cc' (or similar) executable with -c option:
compiles .c to .o
* link static library handled by 'ar' command (possibly with 'ranlib')
* link shared library handled by 'cc -shared'
"""
__revision__ = "$Id$"
import os, sys, re
from types import StringType, NoneType
from distutils import sysconfig
from distutils.dep_util import newer
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.errors import \
DistutilsExecError, CompileError, LibError, LinkError
from distutils import log
# XXX Things not currently handled:
# * optimization/debug/warning flags; we just use whatever's in Python's
# Makefile and live with it. Is this adequate? If not, we might
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
# SunCCompiler, and I suspect down that road lies madness.
# * even if we don't know a warning flag from an optimization flag,
# we need some way for outsiders to feed preprocessor/compiler/linker
# flags in to us -- eg. a sysadmin might want to mandate certain flags
# via a site config file, or a user might want to set something for
# compiling this module distribution only via the setup.py command
# line, whatever. As long as these options come from something on the
# current system, they can be as system-dependent as they like, and we
# should just happily stuff them into the preprocessor/compiler/linker
# options and carry on.
def _darwin_compiler_fixup(compiler_so, cc_args):
"""
This function will strip '-isysroot PATH' and '-arch ARCH' from the
compile flags if the user has specified one them in extra_compile_flags.
This is needed because '-arch ARCH' adds another architecture to the
build, without a way to remove an architecture. Furthermore GCC will
barf if multiple '-isysroot' arguments are present.
"""
stripArch = stripSysroot = 0
compiler_so = list(compiler_so)
kernel_version = os.uname()[2] # 8.4.3
major_version = int(kernel_version.split('.')[0])
if major_version < 8:
# OSX before 10.4.0, these don't support -arch and -isysroot at
# all.
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
stripSysroot = '-isysroot' in cc_args
if stripArch or 'ARCHFLAGS' in os.environ:
while 1:
try:
index = compiler_so.index('-arch')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
if 'ARCHFLAGS' in os.environ and not stripArch:
# User specified different -arch flags in the environ,
# see also distutils.sysconfig
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
if stripSysroot:
try:
index = compiler_so.index('-isysroot')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
pass
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
if '-isysroot' in cc_args:
idx = cc_args.index('-isysroot')
sysroot = cc_args[idx+1]
elif '-isysroot' in compiler_so:
idx = compiler_so.index('-isysroot')
sysroot = compiler_so[idx+1]
if sysroot and not os.path.isdir(sysroot):
log.warn("Compiling with an SDK that doesn't seem to exist: %s",
sysroot)
log.warn("Please check your Xcode installation")
return compiler_so
class UnixCCompiler(CCompiler):
compiler_type = 'unix'
# These are used by CCompiler in two places: the constructor sets
# instance attributes 'preprocessor', 'compiler', etc. from them, and
# 'set_executable()' allows any of these to be set. The defaults here
# are pretty generic; they will probably have to be set by an outsider
# (eg. using information discovered by the sysconfig about building
# Python extensions).
executables = {'preprocessor' : None,
'compiler' : ["cc"],
'compiler_so' : ["cc"],
'compiler_cxx' : ["cc"],
'linker_so' : ["cc", "-shared"],
'linker_exe' : ["cc"],
'archiver' : ["ar", "-cr"],
'ranlib' : None,
}
if sys.platform[:6] == "darwin":
executables['ranlib'] = ["ranlib"]
# Needed for the filename generation methods provided by the base
# class, CCompiler. NB. whoever instantiates/uses a particular
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
# reasonable common default here, but it's not necessarily used on all
# Unices!
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
obj_extension = ".o"
static_lib_extension = ".a"
shared_lib_extension = ".so"
dylib_lib_extension = ".dylib"
st | atic_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
if sys.platform == "cygwin":
exe_extension = ".exe"
def preprocess(self, source,
output_file=None, macros=None, include_dirs=None,
extra_preargs=None, extra_postargs=None):
ignore, macros, include_dirs = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = self.preprocessor | + pp_opts
if output_file:
pp_args.extend(['-o', output_file])
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or we're
# generating output to stdout, or there's a target output file and
# the source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError, msg:
raise CompileError, msg
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
compiler_so = self.compiler_so
if sys.platform == 'darwin':
compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs)
try:
self.spawn(compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
def create_static_lib(self, objects, output_libname,
output_dir=None, debug=0, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
output_filename = \
self.library_filename(output_libname, output_dir=output_dir)
if self._need_link(objects, output_filename):
self.mkpath(os.path.dirname(output_filename))
self.spawn(self.archiver +
[output_filename] +
objects + self.objects)
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
# think the only major Unix that does. Maybe we need some
# platform intelligence here to skip ranlib if it's not
# needed -- or maybe Python's configure script took care of
# it for us, hence the check for leading colon.
if self.ranlib:
try:
|
mountainstorm/MobileDevice | crashmover.py | Python | mit | 4,328 | 0.028195 | #!/usr/bin/python
# coding: utf-8
# Copyright (c) 2013 Mountainstorm
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from MobileDevice import *
from amdevice import *
from plistservice import *
import os
import time
class CrashMover(object):
u'''Moves crash logs from their various scattered locations into the afc
crash log directory'''
def __init__(self, amdevice):
self.s = amdevice.start_service(u'com.apple.crashreportmover')
if self.s is None:
raise RuntimeError(u'Unable to launch: com.apple.crashreportmover')
def disconnect(self):
os.close(self.s)
def move_crashlogs(self, extensions=None):
u'''Moves all crash logs into the afc crash log directory
Arguments:
extensions -- if present a list of crash file extensions to move
XXX not currently working
'''
# XXX should we wait just in case?
time.sleep(2)
buf = os.read(self.s, 1)
while True:
buf += os.read(self.s, 1)
if buf == 'ping':
break # done!
def register_argparse_crashmover(cmdargs):
import argparse
import sys
impo | rt afccrashlogdirectory
import posixpath
import stat
def cmd_crashmove(args, dev):
cm = CrashMover(dev)
cm.move_crashlogs()
cm.disconnect()
def get_logs(afc, path, dest):
dirlist = []
for name in afc.listdir(path):
info = afc.lstat(posixpath.join(path, name))
if info.st_ifmt == stat.S_I | FDIR:
dirlist.append((
posixpath.join(path, name),
os.path.join(dest, name)
))
try:
os.mkdir(os.path.join(dest, name))
except OSError:
pass # it already exists
elif info.st_ifmt == stat.S_IFLNK:
pass # XXX handle symlinks e.g. LatestCrash*
else:
s = afc.open(posixpath.join(path, name), u'r')
d = open(os.path.join(dest, name), u'w+')
d.write(s.readall())
d.close()
s.close()
for names in dirlist:
get_logs(afc, names[0], names[1])
def del_logs(afc, path):
dirlist = []
for name in afc.listdir(path):
info = afc.lstat(posixpath.join(path, name))
if info.st_ifmt == stat.S_IFDIR:
dirlist.append(posixpath.join(path, name))
else:
try:
afc.remove(posixpath.join(path, name))
except:
print('unable to remove file: %s' % name)
for name in dirlist:
del_logs(afc, name)
afc.remove(name)
def cmd_crashget(args, dev):
# move the crashes
cm = CrashMover(dev)
cm.move_crashlogs()
cm.disconnect()
# retrieve the crashes
afc = afccrashlogdirectory.AFCCrashLogDirectory(dev)
get_logs(afc, u'/', args.dest.decode(u'utf-8'))
# optionally, delete the crashes
if args.delete_logs:
del_logs(afc, u'/')
afc.disconnect()
# cmd_crashmove command
crashparser = cmdargs.add_parser(
u'crash',
help=u'manipulates crash logs'
)
crashcmd = crashparser.add_subparsers()
crashmovecmd = crashcmd.add_parser(
u'move',
help=u'moves crash logs into the afc directory'
)
crashmovecmd.set_defaults(func=cmd_crashmove)
# get the crash logs
crashgetcmd = crashcmd.add_parser(
u'get',
help=u'retrieves crash logs from the device'
)
crashgetcmd.add_argument(
u'-d',
dest=u'delete_logs',
action=u'store_true',
help=u'if specified, delete the crash logs after retrieval'
)
crashgetcmd.add_argument(
u'dest',
help=u'destination directory; files are appended into it'
)
crashgetcmd.set_defaults(func=cmd_crashget)
|
phe-bioinformatics/emm-typing-tool | modules/phe_exceptions.py | Python | gpl-3.0 | 2,133 | 0.007501 | '''
Base module for all of the exceptions classes used internally.
Created on 10 Dec 2013
@author: alex
'''
class PheException(Exception):
'''
This is the top level class that EVERYTHING must be derived from. In particular,
this class contains an abstract property called 'phe_return_code'. This property
must be implemented and the individual implementation will have it's own
exit code. which will be propogated to the calling functions, if needs be.
PheException must not be passed as is.
'''
def __init__(self, msg, cause, phe_return_code=255):
'''
Constructor
'''
super(Exception, self).__init__(msg)
self._phe_return_code = phe_return_code
self._cause = cause
@property
def phe_return_code(self):
'''
Read-only attribute that holds the return status that should be exited with.
'''
return self._phe_return_code
@property
def cause(self):
'''
Read-only attribute that indicates the root cause of the exception raised.
'''
return self._cause
class PheExternalError(PheException):
'''
Exception class designed to be raised when an external command/process
fails. Instead of falling over quietly, this exception can be raised. The
exception includes the message to be put into the logs and the cause o | f
the exception. In this case, the cause should generally be subprocess.CallerProcessError.
The particu | lars of the failed command can be found inside the cause.
If the catcher of this exception choses to exit the code, 'phe_return_code'
should be used to indicate the cause of it all.
'''
def __init__(self, msg, cause):
'''
Constructor for the PheExternalError
@param msg: Message to be displayed with the exception.
@type msg: str.
@param cause: Cause of this exception, usually subprocess.CalledProcessError.
@type cause: class.
'''
super(PheExternalError, self).__init__(msg, cause, 55)
|
workflo/dxf2gcode | python_examples/NURBS_fitting_by_Biarc_curves.py | Python | gpl-3.0 | 69,179 | 0.021943 | #!/usr/bin/python
# -*- coding: cp1252 -*-
#
#NURBS_fittin_by_Biarc_curves
#Programmer: Christian Kohlöffel
#E-mail: n/A
#
#Copyright 2008 Christian Kohlöffel
#
#Distributed under the terms of the GPL (GNU Public License)
#
#dxf2gcode is free software; you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation; either version 2 of the License, or
#(at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#import matplotlib
#matplotlib see: http://matplotlib.sourceforge.net/ and http://www.scipy.org/Cookbook/Matplotlib/
#numpy see: http://numpy.scipy.org/ and http://sourceforge.net/projects/numpy/
#matplotlib.use('TkAgg')
from matplotlib.numerix import arange, sin, pi
from matplotlib.axes import Subplot
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
from matplotlib.figure import Figure
from Tkconstants import TOP, BOTH, BOTTOM, LEFT, RIGHT,GROOVE
from Tkinter import Tk, Button, Frame
from math import sqrt, sin, cos, atan2, radians, degrees, pi, floor, ceil
import sys
class NURBSClass:
def __init__(self,degree=0,Knots=[],Weights=None,CPoints=None):
self.degree=degree #Spline degree
self.Knots=Knots #Knoten Vektor
self.CPoints=CPoints #Kontrollpunkte des Splines [2D]
self.Weights=Weights #Gewichtung der Einzelnen Punkte
#Initialisieren von errechneten Größen
self.HCPts=[] #Homogenepunkte Vektoren [3D]
#Punkte in Homogene Punkte umwandeln
self.CPts_2_HCPts()
#Erstellen der BSplineKlasse zur Berechnung der Homogenen Punkte
self.BSpline=BSplineClass(degree=self.degree,\
Knots=self.Knots,\
CPts=self.HCPts)
def check_NURBSParameters(self,tol):
#Überprüfen des Knotenvektors
#Suchen von mehrfachen Knotenpunkte (Anzahl über degree+1 => Fehler?!)
knt_nr=1
knt_vec=[[self.Knots[0]]]
self.knt_m_change=[]
while knt_nr < len(self.Knots):
if self.Knots[knt_nr]==knt_vec[-1][-1]:
knt_vec[-1].append(self.Knots[knt_nr])
else:
knt_vec.append([self.Knots[knt_nr]])
knt_nr+=1
for knt_spts in knt_vec:
if (len(knt_spts)>self.degree+1):
raise ValueError, "Same Knots Nr. bigger then degree+1"
#Überprüfen der Steigungdifferenz vor und nach dem Punkt wenn Mehrfachknoten
elif ((len(knt_spts)>self.degree)and(knt_spts[-1]>0.0)and(knt_spts[-1]<1.0)):
temp, tangent0=self.NURBS_evaluate(n=1,u=knt_spts[0]-1e-12)
temp, tangent1=self.NURBS_evaluate(n=1,u=knt_spts[0])
if abs(tangent0-tangent1)>1e-6:
self.knt_m_change.append(knt_spts[0])
#Überprüfen der Kontrollpunkte
#Suchen von mehrachen Kontrollpunkten (Anzahl über degree+2 => nicht errechnen
ctlpt_nr=0
ctlpt_vec=[[ctlpt_nr]]
while ctlpt_nr < len(self.CPoints)-1:
ctlpt_nr+=1
if self.CPoints[ctlpt_nr].isintol(self.CPoints[ctlpt_vec[-1][-1]],tol):
ctlpt_vec[-1].append(ctlpt_nr)
else:
ctlpt_vec.append([ctlpt_nr])
self.ignor=[]
for same_ctlpt in ctlpt_vec:
if (len(same_ctlpt)>self.degree):
self.ignor.append([self.Knots[same_ctlpt[0]+self.degree/2],\
self.Knots[same_ctlpt[-1]+self.degree/2]])
#raise ValueError, "Same Controlpoints Nr. bigger then degree+1"
#print("Same Controlpoints Nr. bigger then degree+2")
for ignor in self.ignor:
print("Ignoring u's between u: %s and u: %s" %(ignor[0],ignor[1]))
if len(self.knt_m_change):
print("Non steady Angles between Knots: %s" %self.knt_m_change)
#Berechnen von eine Anzahl gleichmässig verteilter Punkte und bis zur ersten Ableitung
def calc_curve(self,n=0, cpts_nr=20):
#Anfangswerte für Step und u
u=0; Points=[]; tang=[]
step=self.Knots[-1]/(cpts_nr-1)
while u<=1.0:
Pt,tangent=self.NURBS_evaluate(n=n,u=u)
Points.append(Pt)
#Für die erste Ableitung wird den Winkel der tangente errechnet
if n>=1:
tang.append(tangent)
u+=step
if n>=1:
return Points, tang
else:
return Points
#Berechnen eines Punkts des NURBS und der ersten Ableitung
def NURBS_evaluate(self,n=0,u=0):
#Errechnen der korrigierten u's
#cor_u=self.correct_u(u)
#Errechnen de | r Homogenen Punkte bis zur n ten Ableitung
HPt=self.BSpline.bspline_ders | _evaluate(n=n,u=u)
#Punkt wieder in Normal Koordinaten zurück transformieren
Point=self.HPt_2_Pt(HPt[0])
#Errechnen der ersten Ableitung wenn n>0 als Richtungsvektor
dPt=[]
tangent=None
if n>0:
# w(u)*A'(u)-w'(u)*A(u)
#dPt=---------------------
# w(u)^2
for j in range(len(HPt[0])-1):
dPt.append((HPt[0][-1]*HPt[1][j]-HPt[1][-1]*HPt[0][j])/
pow(HPt[0][-1],2))
#Berechnen des Winkels des Vektors
tangent=atan2(dPt[1],dPt[0])
return Point, tangent
else:
return Point
#Umwandeln der NURBS Kontrollpunkte und Weight in einen Homogenen Vektor
def CPts_2_HCPts(self):
for P_nr in range(len(self.CPoints)):
HCPtVec=[self.CPoints[P_nr].x*self.Weights[P_nr],\
self.CPoints[P_nr].y*self.Weights[P_nr],\
self.Weights[P_nr]]
self.HCPts.append(HCPtVec[:])
#Umwandeln eines Homogenen PunktVektor in einen Punkt
def HPt_2_Pt(self,HPt):
return PointClass(x=HPt[0]/HPt[-1],y=HPt[1]/HPt[-1])
class BSplineClass:
def __init__(self,degree=0,Knots=[],CPts=[]):
self.degree=degree
self.Knots=Knots
self.CPts=CPts
self.Knots_len=len(self.Knots)
self.CPt_len=len(self.CPts[0])
self.CPts_len=len(self.CPts)
#Eingangsprüfung, ober KnotenAnzahl usw. passt
if self.Knots_len< self.degree+1:
raise ValueError, "degree greater than number of control points."
if self.Knots_len != (self.CPts_len + self.degree+1):
print ("shall be: %s" %(self.CPts_len + self.degree+1))
print ("is: %s" %self.Knots_len)
raise ValueError, "Knot/Control Point/degree number error."
#Berechnen von eine Anzahl gleichmässig verteilter Punkte bis zur n-ten Ableitung
def calc_curve(self,n=0,cpts_nr=20):
#Anfangswerte für Step und u
u=0
step=float(self.Knots[-1])/(cpts_nr-1)
Points=[]
#Wenn die erste Ableitung oder höher errechnet wird die ersten
#Ableitung in dem tan als Winkel in rad gespeichert
tang=[]
while u<=self.Knots[-1]:
CK=self.bspline_ders_evaluate(n=n,u=u)
#Den Punkt in einem Punkt List abspeichern
Points.append(PointClass(x=CK[0][0],y=CK[0][1]))
#Für die erste Ableitung wird den Winkel der tangente errechnet
if n>=1:
|
joebos/django-allauth | allauth/socialaccount/providers/oauth/views.py | Python | mit | 3,898 | 0.001539 | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from allauth.socialaccount.helpers import render_authentication_error, send_social_connection_error_email
from allauth.socialaccount.providers.oauth.client import (OAuthClient,
OAuthError)
from alla | uth.socialaccount.helpers import complete_social_login
from allauth.socialaccount import providers
from allauth.socialaccount.models import SocialToken, SocialLogin
from ..base import AuthAction
class OAuthAdapter(object):
def complete_login(self, request, app):
"""
Returns a SocialLogin instance
"""
raise NotImplementedError
def get_provider(self):
return provide | rs.registry.by_id(self.provider_id)
class OAuthView(object):
@classmethod
def adapter_view(cls, adapter):
def view(request, *args, **kwargs):
self = cls()
self.request = request
self.adapter = adapter()
return self.dispatch(request, *args, **kwargs)
return view
def _get_client(self, request, callback_url):
provider = self.adapter.get_provider()
app = provider.get_app(request)
scope = ' '.join(provider.get_scope())
parameters = {}
if scope:
parameters['scope'] = scope
client = OAuthClient(request, app.client_id, app.secret,
self.adapter.request_token_url,
self.adapter.access_token_url,
callback_url,
parameters=parameters, provider=provider)
return client
class OAuthLoginView(OAuthView):
def dispatch(self, request):
callback_url = reverse(self.adapter.provider_id + "_callback")
SocialLogin.stash_state(request)
action = request.GET.get('action', AuthAction.AUTHENTICATE)
provider = self.adapter.get_provider()
auth_url = provider.get_auth_url(request, action) or self.adapter.authorize_url
client = self._get_client(request, callback_url)
try:
return client.get_redirect(auth_url)
except OAuthError as e:
return render_authentication_error(request, {'error': e.message})
except Exception as ex:
send_social_connection_error_email(request, {'error': ex.message})
class OAuthCallbackView(OAuthView):
def dispatch(self, request):
"""
View to handle final steps of OAuth based authentication where the user
gets redirected back to from the service provider
"""
login_done_url = reverse(self.adapter.provider_id + "_callback")
client = self._get_client(request, login_done_url)
if not client.is_valid():
if 'denied' in request.GET:
return HttpResponseRedirect(reverse('socialaccount_login_cancelled'))
extra_context = dict(oauth_client=client)
return render_authentication_error(request, extra_context)
app = self.adapter.get_provider().get_app(request)
try:
access_token = client.get_access_token()
token = SocialToken(app=app,
token=access_token['oauth_token'],
token_secret=access_token['oauth_token_secret'])
login = self.adapter.complete_login(request, app, token)
token.account = login.account
login.token = token
login.state = SocialLogin.unstash_state(request)
return complete_social_login(request, login)
except OAuthError as e:
return render_authentication_error(request, {'error': e.message})
except Exception as ex:
send_social_connection_error_email(request, {'error': ex.message}) |
CaymanUnterborn/burnman | burnman/minerals/HP_2011_fluids.py | Python | gpl-2.0 | 5,104 | 0.001763 | # This file is part of BurnMan - a thermoelastic and thermodynamic toolkit for the Earth and Planetary Sciences
# Copyright (C) 2012 - 2017 by the BurnMan team, released under the GNU
# GPL v2 or later.
"""
HP_2011_fluids
^^^^^^^^
| Fluids from Holland and Powell 2011 and references therein.
CORK parameters:
CHO gases from Holland and Powell, 1991. ["CO2",304.2,0.0738],["CH4",190.6,0.0460],["H2",41.2,0.0211],["CO",132.9,0.0350]
H2O and S2 from Wikipedia, 2012/10/23. ["H2O",647.096,0.22060],["S2",1314.00,0.21000]
H2S from ancyclopedia.airliquide.com, 2012/10/23. ["H2S",373.15,0.08937]
NB: Units for cork[i] | in Holland and Powell datasets are
a = kJ^2/kbar*K^(1/2)/mol^2 -> multiply by 1e-2
b = kJ/kbar/mol -> multiply by 1e-5
c = kJ/kbar^1.5/mol -> multiply by 1e-9
d = kJ/kbar^2/mol -> multiply by 1e-13
Individual terms are divided through by P, P, P^1.5, P^2, so
[0][j] -> multiply by 1e6
[1][j] -> multiply by 1e3
[2][j] -> multiply by 1e3
[3][j] -> multiply by 1e3
cork_P: kbar -> multiply by 1e8
"""
from __future__ import absolute_import
from ..mineral import Mineral
from ..processchemistry import dictionarize_formula, formula_mass
class CO2 (Mineral):
def __init__(self):
formula = 'CO2'
formula = dictionarize_formula(formula)
self.params = {
'name': 'carbon dioxide',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 304.2,
'cork_P': 0.0738e8,
'H_0': -393.51e3,
'S_0': 213.7,
'Cp': [87.8, -2.644e-3, 706.4e3, -998.9],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class CH4 (Mineral):
def __init__(self):
formula = 'CH4'
formula = dictionarize_formula(formula)
self.params = {
'name': 'methane',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 190.6,
'cork_P': 0.0460e8,
'H_0': -74.81e3,
'S_0': 186.26,
'Cp': [150.1, 0.002063, 3427700., -2650.4],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class O2 (Mineral):
def __init__(self):
formula = 'O2'
formula = dictionarize_formula(formula)
self.params = {
'name': 'oxygen',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 0.,
'cork_P': 1.0e5,
'H_0': 0.,
'S_0': 205.2,
'Cp': [48.3, -0.000691, 499200., -420.7],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class H2 (Mineral):
def __init__(self):
formula = 'H2'
formula = dictionarize_formula(formula)
self.params = {
'name': 'hydrogen',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 41.2,
'cork_P': 0.0211e8,
'H_0': 0.,
'S_0': 130.7,
'Cp': [23.3, 0.004627, 0.0, 76.3],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class S2 (Mineral):
def __init__(self):
formula = 'S2'
formula = dictionarize_formula(formula)
self.params = {
'name': 'sulfur',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 1314.00,
'cork_P': 0.21000e8,
'H_0': 128.54e3,
'S_0': 231.0,
'Cp': [37.1, 0.002398, -161000.0, -65.0],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
class H2S (Mineral):
def __init__(self):
formula = 'H2S'
formula = dictionarize_formula(formula)
self.params = {
'name': 'hydrogen sulfide',
'formula': formula,
'equation_of_state': 'cork',
'cork_params': [[5.45963e1, -8.63920e0], [9.18301e-1], [-3.30558e-2, 2.30524e-3], [6.93054e-4, -8.38293e-5]],
'cork_T': 373.15,
'cork_P': 0.08937e8,
'H_0': 128.54e3,
'S_0': 231.0,
'Cp': [47.4, 0.010240, 615900., -397.8],
'n': sum(formula.values()),
'molar_mass': formula_mass(formula)}
Mineral.__init__(self)
|
awacha/cct | attic/gui/toolframes/accounting.py | Python | bsd-3-clause | 3,520 | 0.003409 | import logging
from ..core.functions import update_comboboxtext_choices
from ..core.toolframe import ToolFrame
from ...core.services.accounting import Accounting, PrivilegeLevel
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class AccountingFrame(ToolFrame):
def __init__(self, *args, **kwargs):
self._acctconn = []
self._projectid_changed_disable = None
self._updating_privilegeselector = False
super().__init__(*args, **kwargs)
def init_gui(self, *args, **kwargs):
self.on_user_changed(self.instrument.services['accounting'],
self | .instrument.services['accounting'].get_user())
self.on_accounting_privlevel_changed(self.instrument.services['accounting'],
self.instrument.services['accounting'].get_privilegelevel())
self._acctconn = [self.instrument.services['accounting'].connect('project-changed', self.on_project_changed),
self.instrument.services['accountin | g'].connect('privlevel-changed',
self.on_accounting_privlevel_changed),
self.instrument.services['accounting'].connect('user-changed', self.on_user_changed)]
self.on_project_changed(self.instrument.services['accounting'])
def cleanup(self):
for c in self._acctconn:
self.instrument.services['accounting'].disconnect(c)
self._acctconn = []
return super().cleanup()
def on_projectid_changed(self, comboboxtext):
if self._projectid_changed_disable:
return
pid = comboboxtext.get_active_text()
if self.instrument.services['accounting'].get_project().projectid != pid:
self.instrument.services['accounting'].select_project(pid)
def on_project_changed(self, accountingservice: Accounting):
pidsel = self.builder.get_object('projectid_selector')
self._projectid_changed_disable = True
try:
proj = accountingservice.get_project()
update_comboboxtext_choices(pidsel, sorted(self.instrument.services['accounting'].get_projectids()),
set_to=proj.projectid)
self.builder.get_object('proposer_label').set_text(
proj.proposer)
self.builder.get_object('projectname_label').set_text(
proj.projectname)
finally:
self._projectid_changed_disable = False
def on_privileges_changed(self, selector):
if not self._updating_privilegeselector:
self.instrument.services['accounting'].set_privilegelevel(selector.get_active_text())
return False
def on_user_changed(self, accountingservice: Accounting, user):
self.builder.get_object('operatorname_label').set_text(
user.username)
def on_accounting_privlevel_changed(self, accountingservice: Accounting, privlevel: PrivilegeLevel):
logger.debug('Updating privileges selector. Current privilege level: {}'.format(privlevel))
self._updating_privilegeselector = True
try:
update_comboboxtext_choices(
self.builder.get_object('privileges_selector'),
accountingservice.get_accessible_privlevels_str(accountingservice.current_user.privlevel),
set_to=privlevel.name)
finally:
self._updating_privilegeselector = False
|
softinus/IMDB_DataMiner | imdb.com/Parse_err.py | Python | apache-2.0 | 3,935 | 0.012706 | # written by python 2.*
import urllib2
import string
import os
import re
import sys
from datetime import datetime
from bs4 import BeautifulSoup
import logging
import ssl
import Config
from AllMovies_imdb import upload_data, add_empty_data, get_connections, get_business_of_movie, get_companycredits_of_movie, get_fullcredits_of_movie, get_releaseinfo_of_movie, get_techinfo_of_movie, get_detail_of_movie, register_toServer, init_page
logging.basicConfig(filename=Config.LOG_PATH,level=logging.DEBUG)
ssl._create_default_https_context = ssl._create_unverified_context
os.environ["PARSE_API_ROOT"] = Config.PARSE_SERVER_URI
from parse_rest.datatypes import Function, Object, GeoPoint, File
from parse_rest.connection import register
from parse_rest.query import QueryResourceDoesNotExist
from parse_rest.connection import ParseBatcher
from parse_rest.core import ResourceRequestBadRequest, ParseError
register(Config.APPLICATION_ID, Config.REST_API_KEY, master_key=Config.MASTER_KEY)
reload(sys)
sys.setdefaultencoding('utf-8')
# for saving server progress to backend
class Parsing(Object):
pass
class Parsing_err(Object):
pass
class Parsing_err2(Object):
pass
def check_errsheets(currServer):
errsheets = Parsing_err.Query.all().limit(1000)
todo = None
for sheet in errsheets:
if (sheet.status == "pend") or (sheet.status == "pending") or (sheet.status == ""):
todo= sheet
break
if todo is None:
print "There's nothing to do anymore."
return None
else:
print 'Found the err tasks.'
return todo
def get_err_movies(session, task):
#count var
ERR_PAGE= task.page_num
ERR_IDX= task.entity_num
ITEMS_PER_PAGE= 50.0
count_total= 0
filePath= "Output_Err_page_"+str(ERR_PAGE)+"_idx_"+str(ERR_IDX)+".txt"
# spliter between tasks
text_file = open(filePath, "a")
text_file.write("\n")
text_file.close()
task.status = "working"
print "Now assigned to the server and begin the task."
task.save()
url= task.err_url
print url
try:
idx= 0
page = urllib2.urlopen(url)
soup = BeautifulSoup(page, "lxml")
task.last_page= url
task.save()
items = soup.findAll("span", { "class":"lister-item-header" })
print "item count in this page : "+ str(len(items))
for idx,item in enumerate(items):
print "page : "+str(ERR_PAGE)+", idx : "+str(idx) + " out of " + str(ITEMS_PER_PAGE) + ", ERR_IDX was " + str(ERR_IDX)
if idx < ERR_IDX:
print "skip"
continue
item_href= item.span.findNext('span').a['href']
item_name= item.span.findNext('span').text
count_total += 1
task.done_count = count_total
task.save()
arrData= get_detail_of_movie(item_href, item_name)
text_file = open(filePath, "a")
for data in arrData:
text_file.write(str(data) + "|")
text_file.write('\n')
text_file.close()
except Exception, e:
logging.exception(e)
print e
currError = Parsing_err2(page_num=task.page_num, entity_num=idx, err_url=url)
currError.save()
relation= currError.relation("Parsing_err")
relation.add(task)
return filePath
#Here's the main
while True:
session= register_toServer() # 1. try to register
todo = check_errsheets(session) # 2. obtain the todo list
if todo is None: # 3. if no longer things to do, exit
print "Done."
break
else:
fileuri= get_err_movies(session, | todo) # 4. if it has things to do, do work.
if fileuri == "":
print "There's no file."
| todo.status = "done"
todo.save()
else:
upload_data(session, todo, fileuri)
|
hellsgate1001/thatforum_django | thatforum/models.py | Python | mit | 3,446 | 0 | from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.template.defaultfilters import slugify
from mptt.models import MPTTModel, TreeForeignKey
class ForumCategory(MPTTModel):
parent = TreeForeignKey(
'self', blank=True, null=True, related_name='children'
)
name = models.CharField(max_length=255)
slug = models.SlugField(max_length=255)
description = models.CharField(max_length=255, blank=True)
order = models.PositiveIntegerField(blank=True, null=True)
def __unicode__(self):
return self.name
@property
def last_post(self):
if self.parent is None:
return None
response = None
for thread in self.forumthread_set.all():
if response is None:
response = thread.last_post
else:
if thread.last_post.created > response.created:
response = thread.last_post
return response
@property
def post_count(self):
count = 0
for thread in self.forumthread_set.all():
count += thread.forumpost_set.count()
return count
class Meta:
verbose_name_plural = 'Forum categories'
class ForumThread(models.Model):
category = models.ForeignKey(ForumCategory)
title = models.CharField(max_length=255)
slug = models.SlugField(max_length=255)
author = models.ForeignKey(settings.AUTH_USER_MODEL)
created = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('thread_home', kwargs={'slug': self.slug})
@property
def last_post(self):
return self.forumpost_set.order_by('-created').first()
@property
def num_replies(self):
return self.forumpost_set.filter(is_thread_starter=False).count()
@property
def thread_starter(self):
return self.forumpost_set.get(thread=self, is_thread_starter=True)
def save(self, *args, **kwargs):
if | self.slug == '':
self.slug = slugify(self.title)
return super(ForumThread, se | lf).save(*args, **kwargs)
class ForumPost(models.Model):
thread = models.ForeignKey(ForumThread)
post = models.TextField()
author = models.ForeignKey(settings.AUTH_USER_MODEL)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
reply_to = models.ForeignKey('self', blank=True, null=True)
is_thread_starter = models.BooleanField(default=False)
def __unicode__(self):
return '%(thread)s - %(pk)s' % {
'thread': self.thread.title,
'pk': self.pk
}
def get_breadcrumb(self):
breadcrumb = [
(
self.thread.title,
reverse(
'thread_home',
kwargs={'slug': self.thread.slug}
)
),
]
category = self.thread.category
while True:
breadcrumb_item = (
category.name,
reverse(
'category_home',
kwargs={'slug': category.slug}
),
)
breadcrumb.insert(0, breadcrumb_item)
if category.parent is None:
break
category = category.parent
return breadcrumb
|
ppizarror/Hero-of-Antair | data/images/pil/FitsStubImagePlugin.py | Python | gpl-2.0 | 1,644 | 0.002433 | #
# T | he Python Imaging Library
# $Id$
#
# FITS stub adapter
#
# Copyright (c) 1998-2003 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
import Image, ImageFile
_handler = None
##
# Install application-specific FITS image handler.
#
# @param handler Handler object.
def register_handler(handler):
| global _handler
_handler = handler
# --------------------------------------------------------------------
# Image adapter
def _accept(prefix):
return prefix[:6] == "SIMPLE"
class FITSStubImageFile(ImageFile.StubImageFile):
format = "FITS"
format_description = "FITS"
def _open(self):
offset = self.fp.tell()
if not _accept(self.fp.read(6)):
raise SyntaxError("Not a FITS file")
# FIXME: add more sanity checks here; mandatory header items
# include SIMPLE, BITPIX, NAXIS, etc.
self.fp.seek(offset)
# make something up
self.mode = "F"
self.size = 1, 1
loader = self._load()
if loader:
loader.open(self)
def _load(self):
return _handler
def _save(im, fp, filename):
if _handler is None or not hasattr("_handler", "save"):
raise IOError("FITS save handler not installed")
_handler.save(im, fp, filename)
# --------------------------------------------------------------------
# Registry
Image.register_open(FITSStubImageFile.format, FITSStubImageFile, _accept)
Image.register_save(FITSStubImageFile.format, _save)
Image.register_extension(FITSStubImageFile.format, ".fit")
Image.register_extension(FITSStubImageFile.format, ".fits")
|
Pantkowsky/electricitymap | feeder/parsers/ENTSOE.py | Python | gpl-3.0 | 15,636 | 0.004605 | from bs4 import BeautifulSoup
from collections import defaultdict
import arrow, os, re, requests
ENTSOE_ENDPOINT = 'https://transparency.entsoe.eu/api'
ENTSOE_PARAMETER_DESC = {
'B01': 'Biomass',
'B02': 'Fossil Brown coal/Lignite',
'B03': 'Fossil Coal-derived gas',
'B04': 'Fossil Gas',
'B05': 'Fossil Hard coal',
'B06': 'Fossil Oil',
'B07': 'Fossil Oil shale',
'B08': 'Fossil Peat',
'B09': 'Geothermal',
'B10': 'Hydro Pumped Storage',
'B11': 'Hydro Run-of-river and poundage',
'B12': 'Hydro Water Reservoir',
'B13': 'Marine',
'B14': 'Nuclear',
'B15': 'Other renewable',
'B16': 'Solar',
'B17': 'Waste',
'B18': 'Wind Offshore',
'B19': 'Wind Onshore',
'B20': 'Other',
}
ENTSOE_PARAMETER_BY_DESC = {v: k for k, v in ENTSOE_PARAMETER_DESC.iteritems()}
# Define all ENTSOE country_code <-> domain mapping
ENTSOE_DOMAIN_MAPPINGS = {
'AL': '10YAL-KESH-----5',
'AT': '10YAT-APG------L',
'BA': '10YBA-JPCC-----D',
'BE': '10YBE----------2',
'BG': '10YCA-BULGARIA-R',
'BY': '10Y1001A1001A51S',
'CH': '10YCH-SWISSGRIDZ',
'CZ': '10YCZ-CEPS-----N',
'DE': '10Y1001A1001A83F',
'DK': '10Y1001A1001A65H',
'EE': '10Y1001A1001A39I',
'ES': '10YES-REE------0',
'FI': '10YFI-1--------U',
'FR': '10YFR-RTE------C',
'GB': '10YGB----------A',
'GB-NIR': '10Y1001A1001A016',
'GR': '10YGR-HTSO-----Y',
'HR': '10YHR-HEP------M',
'HU': '10YHU-MAVIR----U',
'IE': '10YIE-1001A00010',
'IT': '10YIT-GRTN-----B',
'LT': '10YLT-1001A0008Q',
'LU': '10YLU-CEGEDEL-NQ',
'LV': '10YLV-1001A00074',
# 'MD': 'MD',
'ME': '10YCS-CG-TSO---S',
'MK': '10YMK-MEPSO----8',
'MT': '10Y1001A1001A93C',
'NL': '10YNL----------L',
'NO': '10YNO-0--------C',
'PL': '10YPL-AREA-----S',
'PT': '10YPT-REN------W',
'RO': '10YRO-TEL------P',
'RS': '10YCS-SERBIATSOV',
'RU': '10Y1001A1001A49F',
'SE': '10YSE-1--------K',
'SI': '10YSI-ELES-----O',
'SK': '10YSK-SEPS-----K',
'TR': '10YTR-TEIAS----W',
'UA': '10Y1001A1001A869'
}
def query_ENTSOE(session, params):
now = arrow.utcnow()
params['periodStart'] = now.replace(hours=-24).format('YYYYMMDDHH00')
params['periodEnd'] = now.replace(hours=+24).format('YYYYMMDDHH00')
if not 'ENTSOE_TOKEN' in os.environ:
raise Exception('No ENTSOE_TOKEN found! Please add it into secrets.env!')
params['securityToken'] = os.environ['E | NTSOE_TOKEN']
return session.get(ENTSOE_ENDPOINT, params=params)
def query_consumption(domain, session):
params = {
'documentType': 'A65',
'processType': 'A16',
'outBiddingZone_Domain': domain,
}
response = query_ENTSOE(session, params)
if response.ok: return | response.text
else:
# Grab the error if possible
soup = BeautifulSoup(response.text, 'html.parser')
error_text = soup.find_all('text')[0].contents[0]
if 'No matching data found' in error_text: return
raise Exception('Failed to get consumption. Reason: %s' % error_text)
def query_production(psr_type, in_domain, session):
params = {
'psrType': psr_type,
'documentType': 'A75',
'processType': 'A16',
'in_Domain': in_domain,
}
response = query_ENTSOE(session, params)
if response.ok: return response.text
else:
return # Return by default
# Grab the error if possible
soup = BeautifulSoup(response.text, 'html.parser')
error_text = soup.find_all('text')[0].contents[0]
if 'No matching data found' in error_text: return
print 'Failed for psr %s' % psr_type
print 'Reason:', error_text
def query_exchange(in_domain, out_domain, session):
params = {
'documentType': 'A11',
'in_Domain': in_domain,
'out_Domain': out_domain,
}
response = query_ENTSOE(session, params)
if response.ok: return response.text
else:
# Grab the error if possible
soup = BeautifulSoup(response.text, 'html.parser')
error_text = soup.find_all('text')[0].contents[0]
if 'No matching data found' in error_text: return
raise Exception('Failed to get exchange. Reason: %s' % error_text)
def query_price(domain, session):
params = {
'documentType': 'A44',
'in_Domain': domain,
'out_Domain': domain,
}
response = query_ENTSOE(session, params)
if response.ok: return response.text
else:
# Grab the error if possible
soup = BeautifulSoup(response.text, 'html.parser')
error_text = soup.find_all('text')[0].contents[0]
if 'No matching data found' in error_text: return
raise Exception('Failed to get price. Reason: %s' % error_text)
def datetime_from_position(start, position, resolution):
m = re.search('PT(\d+)([M])', resolution)
if m:
digits = int(m.group(1))
scale = m.group(2)
if scale == 'M':
return start.replace(minutes=position * digits)
raise NotImplementedError('Could not recognise resolution %s' % resolution)
def parse_consumption(xml_text):
if not xml_text: return None
soup = BeautifulSoup(xml_text, 'html.parser')
# Get all points
quantities = []
datetimes = []
for timeseries in soup.find_all('timeseries'):
resolution = timeseries.find_all('resolution')[0].contents[0]
datetime_start = arrow.get(timeseries.find_all('start')[0].contents[0])
for entry in timeseries.find_all('point'):
quantities.append(float(entry.find_all('quantity')[0].contents[0]))
position = int(entry.find_all('position')[0].contents[0])
datetimes.append(datetime_from_position(datetime_start, position, resolution))
return quantities, datetimes
def parse_production(xml_text):
if not xml_text: return None
soup = BeautifulSoup(xml_text, 'html.parser')
# Get all points
productions = []
storages = []
datetimes = []
for timeseries in soup.find_all('timeseries'):
resolution = timeseries.find_all('resolution')[0].contents[0]
datetime_start = arrow.get(timeseries.find_all('start')[0].contents[0])
is_production = len(timeseries.find_all('inBiddingZone_Domain.mRID'.lower())) > 0
for entry in timeseries.find_all('point'):
quantity = float(entry.find_all('quantity')[0].contents[0])
position = int(entry.find_all('position')[0].contents[0])
datetime = datetime_from_position(datetime_start, position, resolution)
try:
i = datetimes.index(datetime)
if is_production:
productions[i] = quantity
else:
storages[i] = quantity
except ValueError: # Not in list
datetimes.append(datetime)
productions.append(quantity if is_production else 0)
storages.append(quantity if not is_production else 0)
return productions, storages, datetimes
def parse_exchange(xml_text, is_import, quantities=None, datetimes=None):
if not xml_text: return None
if not quantities: quantities = []
if not datetimes: datetimes = []
soup = BeautifulSoup(xml_text, 'html.parser')
# Get all points
for timeseries in soup.find_all('timeseries'):
resolution = timeseries.find_all('resolution')[0].contents[0]
datetime_start = arrow.get(timeseries.find_all('start')[0].contents[0])
for entry in timeseries.find_all('point'):
quantity = float(entry.find_all('quantity')[0].contents[0])
if not is_import: quantity *= -1
position = int(entry.find_all('position')[0].contents[0])
datetime = datetime_from_position(datetime_start, position, resolution)
# Find out whether or not we should update the net production
try:
i = datetimes.index(datetime)
quantities[i] += quantity
except ValueError: # Not in list
quantities.append(quantity)
datetimes.append(datetime)
return |
JuliaSprenger/elephant | elephant/statistics.py | Python | bsd-3-clause | 44,328 | 0 | # -*- coding: utf-8 -*-
"""
Statistical measures of spike trains (e.g., Fano factor) and functions to
estimate firing rates.
Tutorial
--------
:doc:`View tutorial <../tutorials/statistics>`
Run tutorial interactively:
.. image:: https://mybinder.org/badge.svg
:target: https://mybinder.org/v2/gh/NeuralEnsemble/elephant/master
?filepath=doc/tutorials/statistics.ipynb
.. current_module elephant.statistics
Functions overview
------------------
Rate estimation
~~~~~~~~~~~~~~~
.. autosummary::
:toctree: toctree/statistics/
mean_firing_rate
instantaneous_rate
time_histogram
optimal_kernel_bandwidth
Spike interval statistics
~~~~~~~~~~~~~~~~~~~~~~~~~
.. autosummary::
:toctree: toctree/statistics/
isi
cv
cv2
lv
lvr
Statistics across spike trains
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autosummary::
:toctree: toctree/statistics/
fanofactor
complexity_pdf
:copyright: Copyright 2014-2020 by the Elephant team, see `doc/authors.rst`.
:license: Modified BSD, see LICENSE.txt for details.
"""
from __future__ import division, print_function
# do not import unicode_literals
# (quantities rescale does not work with unicodes)
import warnings
import neo
import numpy as np
import math
import quantities as pq
import scipy.signal
import scipy.stats
from neo.core import SpikeTrain
import elephant.kernels as kernels
from elephant.conversion import BinnedSpikeTrain
from elephant.utils import deprecated_alias, get_common_start_stop_times, \
check_neo_consistency
from elephant.utils import is_time_quantity
__all__ = [
"isi",
"mean_firing_rate",
"fanofactor",
"cv",
"cv2",
"lv",
"lvr",
"instantaneous_rate",
"time_histogram",
"complexity_pdf",
"fftkernel",
"optimal_kernel_bandwidth"
]
cv = scipy.stats.variation
def isi(spiketrain, axis=-1):
"""
Return an array containing the inter-spike intervals of the spike train.
Accepts a `neo.SpikeTrain`, a `pq.Quantity` array, a `np.ndarray`, or a
list of time spikes. If either a `neo.SpikeTrain` or `pq.Quantity` is
provided, the return value will be `pq.Quantity`, otherwise `np.ndarray`.
The units of `pq.Quantity` will be the same as `spiketrain`.
Parameters
----------
spiketrain : neo.SpikeTrain or pq.Quantity or array-like
The spike times.
axis : int, optional
The axis along which the difference is taken.
Default: the last axis.
Returns
-------
intervals : np.ndarray or pq.Quantity
The inter-spike intervals of the `spiketrain`.
Warns
-----
UserWarning
When the input array is not sorted, negative intervals are returned
with a warning.
"""
if isinstance(spiketrain, neo.SpikeTrain):
intervals = np.diff(spiketrain.magnitude, axis=axis)
# np.diff makes a copy
intervals = pq.Quantity(intervals, units=spiketrain.units, copy=False)
else:
intervals = np.diff(spiketrain, axis=axis)
if (intervals < 0).any():
warnings.warn("ISI evaluated to negative values. "
"Please sort the input array.")
return intervals
def mean_firing_rate(spiketrain, t_start=None, t_stop=None, axis=None):
"""
Return the firing rate of the spike train.
The firing rate is calculated as the number of spikes in the spike train
in the range `[t_start, t_stop]` divided by the time interval
`t_stop - t_start`. See the description below for cases when `t_start` or
`t_stop` is None.
Accepts a `neo.SpikeTrain`, a `pq.Quantity` array, or a plain
`np.ndarray`. If either a `neo.SpikeTrain` or `pq.Quantity` array is
provided, the return value will be a `pq.Quantity` array, otherwise a
plain `np.ndarray`. The units of the `pq.Quantity` array will be the
inverse of the `spiketrain`.
Parameters
----------
spiketrain : neo.SpikeTrain or pq.Quantity or np.ndarray
The spike times.
t_start : float or pq.Quantity, optional
The start time to use for the interval.
If None, retrieved from the `t_start` attribute of `spiketrain`. If
that is not present, default to 0. All spiketrain's spike times below
this value are ignored.
Default: None.
t_stop : float or pq.Quantity, optional
The stop time to use for the time points.
If not specified, retrieved from the `t_stop` attribute of
`spiketrain`. If that is not present, default to the maximum value of
`spiketrain`. All spiketrain's spike times above this value are
ignored.
Default: None.
axis : int, optional
The axis over which to do the calculation; has no effect when the
input is a neo.SpikeTrain, because a neo.SpikeTrain is always a 1-d
vector. If None, do the calculation over the flattened array.
Default: None.
Returns
-------
float or pq.Quantity or np.ndarray |
The firing rate of the `spiketrain`
Raises
---- | --
TypeError
If the input spiketrain is a `np.ndarray` but `t_start` or `t_stop` is
`pq.Quantity`.
If the input spiketrain is a `neo.SpikeTrain` or `pq.Quantity` but
`t_start` or `t_stop` is not `pq.Quantity`.
ValueError
If the input spiketrain is empty.
"""
if isinstance(spiketrain, neo.SpikeTrain) and t_start is None \
and t_stop is None and axis is None:
# a faster approach for a typical use case
n_spikes = len(spiketrain)
time_interval = spiketrain.t_stop - spiketrain.t_start
time_interval = time_interval.rescale(spiketrain.units)
rate = n_spikes / time_interval
return rate
if isinstance(spiketrain, pq.Quantity):
# Quantity or neo.SpikeTrain
if not is_time_quantity(t_start, allow_none=True):
raise TypeError("'t_start' must be a Quantity or None")
if not is_time_quantity(t_stop, allow_none=True):
raise TypeError("'t_stop' must be a Quantity or None")
units = spiketrain.units
if t_start is None:
t_start = getattr(spiketrain, 't_start', 0 * units)
t_start = t_start.rescale(units).magnitude
if t_stop is None:
t_stop = getattr(spiketrain, 't_stop',
np.max(spiketrain, axis=axis))
t_stop = t_stop.rescale(units).magnitude
# calculate as a numpy array
rates = mean_firing_rate(spiketrain.magnitude, t_start=t_start,
t_stop=t_stop, axis=axis)
rates = pq.Quantity(rates, units=1. / units)
return rates
elif isinstance(spiketrain, (np.ndarray, list, tuple)):
if isinstance(t_start, pq.Quantity) or isinstance(t_stop, pq.Quantity):
raise TypeError("'t_start' and 't_stop' cannot be quantities if "
"'spiketrain' is not a Quantity.")
spiketrain = np.asarray(spiketrain)
if len(spiketrain) == 0:
raise ValueError("Empty input spiketrain.")
if t_start is None:
t_start = 0
if t_stop is None:
t_stop = np.max(spiketrain, axis=axis)
time_interval = t_stop - t_start
if axis and isinstance(t_stop, np.ndarray):
t_stop = np.expand_dims(t_stop, axis)
rates = np.sum((spiketrain >= t_start) & (spiketrain <= t_stop),
axis=axis) / time_interval
return rates
else:
raise TypeError("Invalid input spiketrain type: '{}'. Allowed: "
"neo.SpikeTrain, Quantity, ndarray".
format(type(spiketrain)))
def fanofactor(spiketrains, warn_tolerance=0.1 * pq.ms):
r"""
Evaluates the empirical Fano factor F of the spike counts of
a list of `neo.SpikeTrain` objects.
Given the vector v containing the observed spike counts (one per
spike train) in the time window [t0, t1], F is defined as:
.. math::
F := \frac{var(v)}{mean(v)}
The Fano factor is typically computed for spike trains representing the
activity of |
mongodb/mongo-python-driver | test/test_transactions.py | Python | apache-2.0 | 20,801 | 0.00149 | # Copyright 2018-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Execute Transactions Spec tests."""
import os
import sys
from io import BytesIO
sys.path[0:0] = [""]
from test import client_context, unittest
from test.utils import (
OvertCommandListener,
TestCreator,
rs_client,
single_client,
wait_until,
)
from test.utils_spec_runner import SpecRunner
from gridfs import GridFS, GridFSBucket
from pymongo import WriteConcern, client_session
from pymongo.client_session import TransactionOptions
from pymongo.errors import (
CollectionInvalid,
ConfigurationError,
ConnectionFailure,
InvalidOperation,
OperationFailure,
)
from pymongo.operations import IndexModel, InsertOne
from pymongo.read_concern import ReadConcern
from pymongo.read_preferences import ReadPreference
# Location of JSON test specifications.
TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "transactions", "legacy")
_TXN_TESTS_DEBUG = os.environ.get("TRANSACTION_TESTS_DEBUG")
# Max number of operations to perform after a transaction to prove unpinning
# occurs. Chosen so that there's a low false positive rate. With 2 mongoses,
# 50 attempts yields a one in a quadrillion chance of a false positive
# (1/(0.5^50)).
UNPIN_TEST_MAX_ATTEMPTS = 50
class TransactionsBase(SpecRunner):
@classmethod
def setUpClass(cls):
super(TransactionsBase, cls).setUpClass()
if client_context.supports_transactions():
for address in client_context.mongoses:
cls.mongos_clients.append(single_client("%s:%s" % address))
@classmethod
def tearDownClass(cls):
for client in cls.mongos_clients:
client.close()
super(TransactionsBase, cls).tearDownClass()
def maybe_skip_scenario(self, test):
super(TransactionsBase, self).maybe_skip_scenario(test)
if (
"secondary" in self.id()
and not client_context.is_mongos
and not client_context.has_secondaries
):
raise unittest.SkipTest("No secondaries")
class TestTransactions(TransactionsBase):
RUN_ON_SERVERLESS = True
@client_context.require_transactions
def test_transaction_options_validation(self):
default_options = TransactionOptions()
self.assertIsNone(default_options.read_concern)
self.assertIsNone(default_options.write_concern)
self.assertIsNone(default_options.read_preference)
self.assertIsNone(default_options.max_commit_time_ms)
# No error when valid options are provided.
TransactionOptions(
read_concern=ReadConcern(),
write_concern=WriteConcern(),
read_preference=ReadPreference.PRIMARY,
max_commit_time_ms=10000,
)
with self.assertRaisesRegex(TypeError, "read_concern must be "):
TransactionOptions(read_concern={}) # type: ignore
with self.assertRaisesRegex(TypeError, "write_concern must be "):
TransactionOptions(write_concern={}) # type: ignore
with self.assertRaisesRegex(
ConfigurationError, "transactions do not support unacknowledged write concern"
):
TransactionOptions(write_concern=WriteConcern(w=0))
with self.assertRaisesRegex(TypeError, "is not valid for read_preference"):
TransactionOptions(read_preference={}) # type: ignore
with self.assertRaisesRegex(TypeError, "max_commit_time_ms must be an integer or None"):
TransactionOptions(max_commit_time_ms="10000") # type: ignore
@client_context.require_transactions
def test_transaction_write_concern_override(self):
"""Test txn overrides Client/Database/Collection write_concern."""
client = rs_client(w=0)
self.addCleanup(client.close)
db = client.test
coll = db.test
coll.insert_one({})
with client.start_session() as s:
with s.start_transaction(write_concern=WriteConcern(w=1)):
self.assertTrue(coll.insert_one({}, session=s).acknowledged)
self.assertTrue(coll.insert_many([{}, {}], session=s).acknowledged)
self.assertTrue(coll.bulk_write([InsertOne({})], session=s).acknowledged)
self.assertTrue(coll.replace_one({}, {}, session=s).acknowledged)
self.assertTrue(coll.update_one({}, {"$set": {"a": 1}}, session=s).acknowledged)
self.assertTrue(coll.update_many({}, {"$set": {"a": 1}}, session=s).acknowledged)
self.assertTrue(coll.delete_one({}, session=s).acknowledged)
self.assertTrue(coll.delete_many({}, session=s).acknowledged)
coll.find_one_and_delete({}, session=s)
coll.find_one_and_replace({}, {}, session=s)
coll.find_one_and_update({}, {"$set": {"a": 1}}, session=s)
unsupported_txn_writes: list = [
(client.drop_database, [db.name], {}),
(db.drop_collection, ["collection"], {}),
(coll.drop, [], {}),
(coll.rename, ["collection2"], {}),
# Drop collection2 between tests of "rename", above.
(coll.database.drop_collection, ["collection2"], {}),
(coll.create_indexes, [[IndexModel("a")]], {}),
(coll.create_index, ["a"], {}),
(coll.drop_index, ["a_1"], {}),
(coll.drop_indexes, [], {}),
(coll.aggregate, [[{"$out": "aggout"}]], {}),
]
# Creating a collection in a transaction requires MongoDB 4.4+.
if client_context.version < (4, 3, 4):
unsupported_txn_writes.extend(
[
(db.create_collection, ["collection"], {}),
]
)
for op in unsupported_txn_writes:
op, args, kwargs = op
with client.start_session() as s:
kwargs["session"] = s
s.start_transaction(write_concern=WriteConcern(w=1))
with self.assertRaises(OperationFailure):
op(*args, **kwargs)
s.abort_transaction()
@client_context.require_transactions
@client_context.require_multiple_mongoses
def test_unpin_for_next_transaction(self):
# Increase localThresholdMS and wait until both nodes are discovered
# to avoid false positives.
client = rs_client(client_context.mongos_seeds(), localThresholdMS=1000)
wait_until(lambda: len(client.nodes) > 1, "discover both mongoses")
coll = client.test.test
# Create the collection.
coll.insert_one({})
self.addCleanup(client.close)
with client.start_session() as s:
| # Session is pinned to Mongos.
with s.start_transaction():
coll.insert_one({}, session=s)
addresses = set()
for _ in range(UNPIN_TEST_MAX_ATTEMPTS):
with s.start_transaction():
cursor = coll.find({}, session=s)
self.assertTrue(next(cursor))
| addresses.add(cursor.address)
# Break early if we can.
if len(addresses) > 1:
break
self.assertGreater(len(addresses), 1)
@client_context.require_transactions
@client_context.require_multiple_mongoses
def test_unpin_for_non_transaction_operation(self):
# Increase localThresholdMS and wait until both nodes are discovered
# to avoid false positives.
client = rs_client(client_context.mongos_seeds(), localThresholdMS=1000)
wait_until( |
chubbymaggie/simuvex | simuvex/procedures/libc___so___6/__isoc99_sscanf.py | Python | bsd-2-clause | 684 | 0.00731 | import simuvex
from simuvex.s_format import FormatParser
from simuvex.s_type import SimTypeInt, SimTypeString
import logging
l = logging.getLogger("simuvex.procedures.libc.system")
class __isoc99_sscanf | (FormatParser):
#pylint:disable=arguments-differ
def run(self, scan, fmt):
#pylint:disable=attribute-defined-outside-init
self.argument_types = {0: self.ty_ptr(SimTypeString()),
| 1: self.ty_ptr(SimTypeString())}
self.return_type = SimTypeInt(self.state.arch.bits, True)
fmt_str = self._parse(1)
_, items = fmt_str.interpret(self.arg(0), 2, self.arg, region=self.state.memory)
return items
|
gepd/uPiotMicroPythonTool | tools/sampy_manager.py | Python | mit | 9,119 | 0 | # This file is part of the uPiot project, https://github.com/gepd/upiot/
#
# MIT License
#
# Copyright (c) 2017 GEPD
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom | the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCL | UDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sublime
from os import path, mkdir
from ..tools import check_sidebar_folder, make_folder as mkfolder
from ..tools import message, serial, errors, status_color
from ..tools.sampy import Sampy
from ..tools.ampy import files
txt = None
port = None
def start_sampy(quiet=False):
"""
Opens the sampy connection in the selected port. If already is a serial
connection running it will look into it and close it.
Returns:
Sampy -- Sampy object
"""
global txt
global port
port = serial.selected_port()
# close the current connection in open port
if(port in serial.in_use and not quiet):
run_serial = serial.serial_dict[port]
run_serial.stop_task()
# message printer
txt = message.open(port)
txt.set_focus()
if(port and not quiet):
try:
return Sampy(port, data_consumer=txt.print)
except file.PyboardError as e:
from sys import exit
if('failed to access' in str(e)):
txt.print(errors.serialError_noaccess)
status_color.remove()
exit(0)
def finished_action():
"""
This function will be called after an action is finished, and will
re-open the serial connection if was open.
"""
global port
serial.establish_connection(port)
# opens the console window
sublime.active_window().run_command('upiot_console_write')
def run_file(filepath):
"""Run file in device
Runs the given file in the selected device.
Arguments:
filepath {str} -- file path
"""
global txt
sampy = start_sampy()
# print command name
file = path.basename(filepath)
head = '\n\n>> Run {0}\n\n"ctrl+shift+c" to stop the script.\n---'
txt.print(head.format(file))
try:
sampy.run(filepath)
txt.print("\n[done]")
except AttributeError as e:
txt.print("\n\nOpening console...\nRun the command again.")
finished_action()
def list_files():
"""List of files in device
Shows the list of files (and folders) from the selected device
"""
sampy = start_sampy()
txt.print('\n\n>> sampy ls\n')
try:
for filename in sampy.ls():
txt.print('\n' + filename)
except AttributeError as e:
txt.print("\n\nOpening console...\nRun the command again.")
finished_action()
def get_file(filename):
"""Get file from device
Gets the given file from the selected device
Arguments:
filename {str} -- name of the file
"""
sampy = start_sampy()
txt.print('\n\n>> get {0}'.format(filename))
try:
output = sampy.get(filename)
except RuntimeError as e:
output = str(e)
output = output.replace('\r\n', '\n').replace('\r', '\n')
txt.print('\n\n' + output)
finished_action()
def get_files(destination):
"""Get files from devices
Gets all the files in the device and stored in the selected destination
path.
"""
error = False
sampy = start_sampy()
destination = path.normpath(destination)
mkfolder(destination)
txt.print('\n\n>> Storing in {0}\n'.format(destination))
try:
for filename in sampy.ls():
txt.print('\nRetrieving ' + filename + ' ...')
filepath = path.normpath(path.join(destination, filename))
if(filename.endswith('/')):
if(not path.exists(filepath)):
mkdir(filepath)
else:
with open(filepath, 'wb') as file:
sampy.get(filename, file)
output = '[done]'
except AttributeError as e:
output = 'Opening console...\nRun the command again.'
error = True
txt.print('\n\n' + output)
finished_action()
if(error):
return
if(check_sidebar_folder(destination)):
return
caption = "files retrieved, would you like to " \
"add the folder to your current proyect?"
answer = sublime.yes_no_cancel_dialog(caption, "Add", "Append")
if(answer == sublime.DIALOG_CANCEL):
return
if(answer == sublime.DIALOG_YES):
append = False
elif(answer == sublime.DIALOG_NO):
append = True
options = {'folder': destination, 'append': append}
sublime.active_window().run_command('upiot_add_project', options)
def put_file(filepath):
"""Put given file in device
Puts the given in the selected device
Arguments:
filepath {str} -- path of the file to put
"""
sampy = start_sampy()
try:
file = path.basename(filepath)
txt.print('\n\n>> put {0}'.format(file))
try:
sampy.put(path.normpath(filepath))
output = '[done]'
except FileNotFoundError as e:
output = str(e)
except files.PyboardError as e:
txt.print('\n\nError putting the file.\nReason: ' + str(e))
return finished_action()
except AttributeError as e:
output = 'Opening console...\nRun the command again.'
txt.print('\n\n' + output)
except TypeError as e:
txt.print("\n\n" + str(e))
finished_action()
def remove_file(filepath):
"""Remove file in device
Removes the given file in the selected device
Arguments:
filepath {str} -- file to remove
"""
sampy = start_sampy()
file = path.basename(filepath)
txt.print('\n\n>> rm {0}'.format(file))
try:
sampy.rm(filepath)
output = '[done]'
except RuntimeError as e:
output = str(e)
except AttributeError as e:
output = 'Opening console...\nRun the command again.'
txt.print('\n\n' + output)
finished_action()
def make_folder(folder_name):
"""Create folder
Makes a folder in the selected device
Arguments:
folder_name {str} -- folder name
"""
sampy = start_sampy()
txt.print('\n\n>> mkdir {0}'.format(folder_name))
try:
sampy.mkdir(folder_name)
output = '[done]'
except files.DirectoryExistsError as e:
output = str(e)
except AttributeError as e:
output = 'Opening console...\nRun the command again.'
txt.print('\n\n' + output)
finished_action()
def remove_folder(folder_name):
"""Remove folder from device
Removes the given folder in the selected device
Arguments:
folder_name {str} -- folder to remvoe
"""
sampy = start_sampy()
txt.print('\n\n>> rmdir {0}'.format(folder_name))
try:
sampy.rmdir(folder_name)
output = '[done]'
except RuntimeError as e:
output = str(e)
except AttributeError as e:
output = 'Opening console...\nRun the command again.'
txt.print('\n\n' + output)
finished_action()
def help():
"""Show commands help
Displays the sampy command usage
"""
start_sampy(quiet=True)
_help = """\n\nUsage: sampy COMMAND [ARGS]...
sampy - Sublime Text |
acrsilva/animated-zZz-machine | bundle_final_app/source/panelScatter.py | Python | lgpl-3.0 | 11,134 | 0.009263 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
from PyQt4.uic import loadUiType
from pyqtgraph.Qt import QtCore, QtGui
#from matplotlib.figure import Figure
from matplotlib import pyplot as plt
import numpy as np
from matplotlib.backends.backend_qt4agg import (
FigureCanvasQTAgg as FigureCanvas,
NavigationToolbar2QT as NavigationToolbar)
import selEpisodio
import matplotlib.dates as md
from sklearn import preprocessing
import colores
import lectorFichero as lf
DEBUG = 0
class PanelScatter():
def __init__(self, selep, layout, cbSueno, cbSedentario, cbLigera, cbModerada, cbIzq, cbDer, btnPrev, btnNext, label):
self.layoutMatplot1 = layout
self.cbSueno = cbSueno
self.cbSedentario = cbSedentario
self.cbLigera = cbLigera
self.cbModerada = cbModerada
self.cbx_izq = cbIzq
self.cbx_der = cbDer
self.btnPrev = btnPrev
self.btnNext = btnNext
self.label = label
self.selep = selep
self.configureComboBox()
self.updateView()
self.cbSueno.clicked.connect(self.filtrarSueno)
self.cbSedentario.clicked.connect(self.filtrarSedentario)
self.cbLigera.clicked.connect(self.filtrarLigera)
self.cbModerada.clicked.connect(self.filtrarModerada)
self.btnPrev.clicked.connect(self.retroceder)
self.btnNext.clicked.connect(self.avanzar)
self.cbx_izq.activated[str].connect(self.cbx_izqListener)
self.cbx_der.activated[str].connect(self.cbx_derListener)
self.filSueno = True
self.filSedentario = True
self.filLigero =True
self.filModerado = True
def configureComboBox(self):
print "Configurando combobox"
self.cbx_izq.clear()
self.cbx_der.clear()
for i in self.selep.epFiltro:
self.cbx_izq.addItem(i.nombre)
self.cbx_der.addItem(i.nombre)
if(len(self.selep.epFiltro) > 1):
self.cbx_der.setCurrentIndex(1)
else:
self.cbx_der.setCurrentIndex(0)
self.cbx_izq.setCurrentIndex(0)
def openFile(self):
self.selep = self.loadData()
self.configureComboBox()
self.limpiarLayout()
self.updateView()
def loadData(self):
if(DEBUG): fname = '../data.csv'
else: fname = QtGui.QFileDialog.getOpenFileName(self, 'Open file')
print "Abriendo fichero ", fname
csv = lf.LectorFichero(fname).getDatos()
selep = selEpisodio.selEpisodio(csv)
return selep
#ep 0 -> plot izquierdo
#ep 1 -> plot derecho
def getTime(self, a, b, ep):
if(ep == 0):
cbxId = self.cbx_izq.currentIndex()
else:
cbxId = self.cbx_der.currentIndex()
print "get time", cbxId
for i in self.selep.epFiltro[cbxId].temp:
if(a == i):
ind = 0
for k in self.selep.epFiltro[cbxId].flujo:
if(b == k):
print "encontrado"
return self.selep.epFiltro[cbxId].tiempo[ind]
else:
ind += 1
def onpick(self, event, ep):
thisline = event.artist
xdata, ydata = thisline.get_data()
ind = event.ind
print xdata[ind[0]], ydata[ind[0]]
self.label.setText('Instante ' + str(self.getTime(xdata[ind[0]], ydata[ind[0]], ep)))
def creaFiguras(self, ep):
""" ep: tiempo, temp, flujo"""
#Serie temporal
fig0 = plt.figure(tight_layout=True)
#Normalizar
preprocessing.scale(ep.temp, copy=True)
preprocessing.scale(ep.flujo, copy=True)
#Curva temperatura
ax1 = fig0.add_subplot(111)
ax1.plot(ep.tiempo, ep.temp, '-', color=colores.temperatura)
ax1.set_ylim([self.selep.csv.cotas.temp_min,self.selep.csv.cotas.temp_max])
#ax1.set_xlabel('Tiempo (m)')
ax1.set_ylabel('Temperatura (ºC)', color=colores.temperatura)
for tl in ax1.get_yticklabels():
tl.set_color(colores.temperatura)
fig0.autofmt_xdate()
xfmt = md.DateFormatter('%H:%M')
ax1.xaxis.set_major_formatter(xfmt)
start, end = ax1.get_xlim()
#ax1.xaxis.set_ticks(np.arange(start, end, 30))
ax1.grid(True)
#Curva flujo térmico
ax2 = ax1.twinx()
ax2.plot(ep.tiempo, ep.flujo, '-', color=colores.flujo)
ax2.set_ylim([self.selep.csv.cotas.flujo_min, self.selep.csv.cotas.flujo_max])
ax2.set_ylabel('Flujo térmico', color=colores.flujo)
for tl in ax2.get_yticklabels():
tl.set_color(colores.flujo)
#Scatterplot
#Lineas verticales con la clasificación de sueños
if(ep.tipo == selEpisodio.tipoSueno):
profundo = self.selep.getProfundo(ep.ini, ep.fin)
despierto = self.selep.getDespierto(ep.ini, ep.fin)
for i in profundo:
ax1.axvspan(i[0], i[1], facecolor=colores.suenoProfundo, alpha=0.3, edgecolor=colores.suenoProfundo)
for i in despierto:
ax1.axvspan(i[0], i[1], facecolor=colores.despierto, alpha=0.5, edgecolor=colores.despierto)
fig1 = plt.figure(tight_layout=True)
ax1f1 = fig1.add_subplot(111)
k = 0
for i in range(ep.ini, ep.fin):
t = self.selep.getColorSueno(i)
ax1f1.plot(ep.temp[k], ep.flujo[k], 'o', picker=5, color=t)
k+=1
ax1f1.set_xlabel('Temperatura (ºC)', color=colores.temperatura)
ax1f1.set_ylabel('Flujo térmico', color=colores.flujo)
else:
fig1 = plt.figure(tight_layout=True)
ax1f1 = fig1.add_subplot(111)
line, = ax1f1.plot(ep.temp, ep.flujo, 'o', picker=5, color = "b")
ax1f1.set_xlabel('Temperatura (ºC)', color=colores.temperatura)
ax1f1.set_ylabel('Flujo térmico', color=colores.flujo)
#ax1f1.set_xlim([self.selep.csv.cotas.temp_min, self.selep.csv.cotas.temp_max])
#ax1f1.set_ylim([self.selep.csv.cotas.flujo_min, self.selep.csv.cotas.flujo_max])
return fig0, fig1
def crearWidget(self, ep, derecho):
"""
ep: Episodio a visualizar
derecho: 0/1 episodio izquierdo o derecho
"""
fig10, fig11 = self.creaFiguras(ep)
canvas1 = FigureCanvas(fig10)
canvas2 = FigureCanvas(fig11)
vbox = QtGui.QGridLayout()
vbox.addWidget(QtGui.QLabel(" | <b>Episodio:</b> " + ep.nombre))
vbox.addWidget(QtGui.QLabel("<b>Inicio:</b> " + str(ep.tiempo[0])))
vbox.addWidget(QtGui.QLabel("<b>Final:</b> " + str(ep.tiempo[-1])))
vbox.addWidget(QtGui.QLabel("<b>Duración:</b> %s min" % (ep.tiempo[-1] - ep.tiempo[0])))
vbox.addWidget(QtGui.QL | abel("<b>Coeficiente de correlación:</b> " + str(ep.correlacion)[:5]))
vbox.addWidget(QtGui.QLabel("<b>Calorías consumidas:</b> " + str(ep.numCalorias)[:6] + " (" + str(ep.numCalorias/self.selep.totalCal*100)[:4] + "%)"))
vbox.addWidget(canvas1)
vbox.addWidget(canvas2)
canvas2.mpl_connect('pick_event', lambda event: self.onpick(event, derecho))
return vbox
#Inserta elementos en el layout con los nuevos episodios
def updateView(self):
if(len(self.selep.epFiltro) > 0):
self.vbox = self.crearWidget(self.selep.epFiltro[self.cbx_izq.currentIndex()], 0)
self.layoutMatplot1.addLayout(self.vbox)
if(len(self.selep.epFiltro) > 1):
self.vbox2 = self.crearWidget(self.selep.epFiltro[self.cbx_der.currentIndex()], 1)
self.layoutMatplot1.addLayout(self.vbox2)
#Elimina el contenido del layout actual
def limpiarLayout(self):
plt.close('all') #Cerrar todos las gráficas dibujadas para vaciar memoria
for cnt in reversed(range(self.vbox.c |
Matt-Deacalion/django | tests/schema/fields.py | Python | bsd-3-clause | 2,822 | 0.002835 | from django.db import models
from django.db.models.fields.related import (
RECURSIVE_RELATIONSHIP_CONSTANT, ManyToManyDescriptor,
ManyToManyField, ManyToManyRel, RelatedField,
create_many_to_many_intermediary_model,
)
from django.utils.functional import curry
class CustomManyToManyField(RelatedField):
"""
Ticket #24104 - Need to have a custom ManyToManyField,
which is not an inheritor of ManyToManyField.
"""
many_to_many = True
def __init__(self, to, db_constraint=True, swappable=True, **kwargs):
try:
to._meta
except AttributeError:
to = str(to)
kwargs['rel'] = ManyToManyRel(
self, to,
related_name=kwargs.pop('related_name', None),
related_query_name=kwargs.pop('related_query_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', to == RECURSIVE_RELATIONSHIP_CONSTANT),
through=kwargs.pop('through', None),
through_fields=kwargs.pop('through_fields', None),
db_constraint=db_constraint,
)
self.swappable = swappable
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
super(CustomManyToManyField, self).__init__(**kwargs)
def contribute_to_class(self, cls, name, **kwargs):
if self.remote_field.symmetrical and (
self.remote_field.model == "self" or self.remote_field.model == cls._meta.object_name):
self.remote_field.related_name = "%s_rel_+" % name
super(CustomManyToManyField, self).contribute_to_class(cls, name, **kwargs)
if not self.remote_field.through and not cls._meta.abstract and not cls._meta.swapped:
self.r | emote_field.through = create_many_to_many_intermediary_model(self, cls)
setattr(cls, self.name, ManyToManyDescriptor(self.remote_field))
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
def get_internal_type(self):
return 'ManyToManyField'
# Copy those methods from ManyToManyField because they don't call super() internally
c | ontribute_to_related_class = ManyToManyField.__dict__['contribute_to_related_class']
_get_m2m_attr = ManyToManyField.__dict__['_get_m2m_attr']
_get_m2m_reverse_attr = ManyToManyField.__dict__['_get_m2m_reverse_attr']
_get_m2m_db_table = ManyToManyField.__dict__['_get_m2m_db_table']
class InheritedManyToManyField(ManyToManyField):
pass
class MediumBlobField(models.BinaryField):
"""
A MySQL BinaryField that uses a different blob size.
"""
def db_type(self, connection):
return 'MEDIUMBLOB'
|
toway/towaymeetups | mba/views/admin/meetup_types.py | Python | gpl-3.0 | 3,010 | 0.008174 | #!/usr/bin/python
# coding: utf-8
from datetime import datetime
import deform
import colander
import jinja2
from deform import ValidationFailure
from deform.widget import CheckedPasswordWidget
from pyramid.view import view_config
from pyramid.httpexceptions import HTTPForbidden
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
from pyramid.renderers import render_to_response
from pyramid.encode import urlencode
from formencode.validators import Email
from pyramid.request import Response
from kotti import get_settings
from kotti.security import get_principals
from kotti import DBSession
from kotti.security import get_user
from mba import _
#from mba.utils.decorators import wrap_user
from mba.utils import wrap_user
from mba.views.infomation import InfoAddForm, InfoEditForm
from mba.resources import MeetupType
__author__ = 'sunset'
__date__ = '20140904'
__description__ = u'活动类别管理'
from js.jquery import jquery
@view_config(route_name='admin_meetup_types', renderer='admin/meetup_types.jinja2',permission='view')
@view_config(route_name='admin_meetup_types', renderer='json',permission='view',xhr=True)
def view_meetup_types(context, request):
jquery.need()
user = get_user(request)
if not user:
return HTTPFound(location="/login?came_from=%s" % request.url)
err_msg = u""
if 'method' in request.POST:
# mt stands for meetup-type
try:
method = request.POST['method'] # add-mt, del-mt, mdf-mt
if method == 'add-mt':
new_type_title = request.POST['mt-title']
DBSession.add( MeetupType(title=new_type_title))
request.session.flash((u"成功添加:'%s'" % new_type_title), 'success')
else:
mt_id = int(request.POST['mt-id'])
to_op_mt = DBSession.query(MeetupType).filter_by(id=mt_id).first()
mt_title = request.POST['mt-title']
if not to_op_mt:
raise Exception(u"错误的参数")
if meth | od == 'del-mt':
DBSession.delete(to_op_mt)
request.session.flash(_(u"成功删除'%s'" % mt_title), 'succe | ss')
elif method == 'mdf-mt':
to_op_mt.title = mt_title
request.session.flash(_(u"修改成功!"), 'success')
else:
err_msg = u"错误的方法"
request.session.flash(_(u"错误的参数"))
except Exception,ex:
err_msg = "%s" % ex
request.session.flash(_(u"错误:'%s'" % err_msg), 'danger')
finally:
return {}
queried = DBSession.query(MeetupType)
count = DBSession.query(MeetupType).count()
return wrap_user(request, {'meetup_types': queried,'count': count } )
def includeme(config):
config.add_route('admin_meetup_types','/admin/meetups/types')
config.scan(__name__)
|
sapfo/ancientselection | ancientselection/ancientselection.py | Python | gpl-3.0 | 12,851 | 0.028947 | #!/usr/bin/env python
import sys,os,random,errno
import numpy as np
import argparse
from argparse import RawTextHelpFormatter
# ---------------- Arguments
parser = argparse.ArgumentParser(description='''Take as input time se | rial data (one locus). The output can be either mles or | likelihood surface over a grid or the mles'
An example of a run: ./ancientselection.py -i TestData_sel0_Jeff.py --dirout Out --codedir ../ancientselection/ --run run1 --exhaust''',formatter_class=RawTextHelpFormatter)
#./ancientselection.py -i TestData_sel0_Jeff.py --dirout Out --codedir ../ancientselection/ --run run1 --exhaust cube
#parser = argparse.ArgumentParser(description='''Take as input time serial data. The output can be either a likelihood values for a grid or the mles'
#An example of a run: ./main_optimize_nelder.py TestData_Jeff.py 400 Oct4run1''',formatter_class=argparse.ArgumentDefaultsHelpFormatter)
#parser.add_argument('--mikefile', help='mikeinfile',type=argparse.FileType('r'),required=True)
#parser.add_argument('--toselect', help='list of indivs to select from mikefile, one per line',type=str,required=True)
#parser.add_argument('--toorder', help='list of pops to reorder indivs from mikefile, one per line',type=str,required=True)
#parser.add_argument('--mikenew', help='mike formatted output file',type=argparse.FileType('wa'),required=True)
helpdatafile = '''datafile is a python script that contains
the data in the following format
For the data itself, here is an example:
M_ = [10,10,10] #for 10 chrom at 3 time points
I_ = [3,3,3] #for 3 derived alleles at each time point
T_ = [-100,-50,0] #the time points in generations
dominance_ = 0.5 #if codominance
where;
M_: python list with the total number of chromosomes
I_: python list wiyth the number of derived alleles
T_: python list with the sampling times generations
dominance_: float with the dominance coefficient for the data (usually 0, 0.5 or 1)
For the parameters:
e.g.
Upper_bounds_ = [0,10,1000] #(t0_up,gamma_up,Ne_up)
Lower_bounds_ = [-150,-10,500] #(t0_low,gamma_low,Ne_low)
fixed_params_ = [None,None,1000] #for t0 and gamma to be free while the pop size is set to 1000.
where:
Upper_bounds_ = python list (t0_up,gamma_up,Ne_up) upper bounds for
t0, gamma and Ne (in this order!!)
Lower_bounds_ = python list (t0_low,gamma_low,Ne_low) lower bounds for
t0, gamma and Ne (in this order!!)
fixed_params = python list indicating which parameters should be fixed
(same order: t0,gamma,Ne). The value
is set to None if the parameters is not to be fixed or to the value it
should be fixed at. The fixed values should be compatible with the bounds.
'''
parser.add_argument('--version','-v', action='version', version='%(prog)s 0.0')
parser.add_argument('--datafile','-i', help=helpdatafile,type=str,required=True)
parser.add_argument('--run','-r', help='''added string to the project name,
only used to label output files (by default it is the datafile name minus '.py' extension)''',type=str,required=False,default='')
parser.add_argument('--dirout', help='directory out (if does not exist, will be created) --default Out',type=str,required=False, default='Out')
parser.add_argument('--codedir', help='directory where the code lives (to be added to your path)',type=str,required=True,default="../bin")
parser.add_argument('--gridsize', help='size of the grid (H) -- default 400',type=int,required=False,default=400)
parser.add_argument('--gridtype', help='type of grid, either of (default,symmetric,uniform,expo) --default default',type=str,required=False,default='default')
parser.add_argument('--expmethod1', help='''exponential method 1, used always if gamma small enough,\neither of (alglin,pade,prec)
alglin: in detail in the paper
pade: implemented in scipy
prec: arbitrary precision, the grid has to be the default grid: !!not ready yet!!!
--default alglin''',type=str,required=False,default='alglin')
parser.add_argument('--expmethod2', help='''exponential method 2 (see above)
used for large abs(gamma),\neither of (pade,prec)
prec: not implemented yet
-- default pade''',type=str,required=False,default='pade')
parser.add_argument('--exhaust', help='''computes the likelihood on a grid:
either cube or predefinite running time
(usage --exhaust cube or --exhaust time)
Note: if --exhaust not specified will try
to find the maximum likelihood using a
nelder-mead algorithm -- default cube)''',required=False,default=False)
parser.add_argument('--T0dim', help='number of evaluations for the age (default 5), only in use if --exhaust cube',type=int,required=False,default=5)
parser.add_argument('--Gammadim', help='number of evaluations for gamma (default 5), only in use if --exhaust cube',type=int,required=False,default=5)
parser.add_argument('--NEdim', help='number of evaluations for Ne (default 5), only in use if --exhaust cube',type=int,required=False,default=5)
parser.add_argument('--runningtime', help='''only in use if --cube time,
you can specify how long you want it to run.
The number of points per paramaters will be the same (if not fixed) -- default 300''',type=int,required=False,default=5*60)
parser.add_argument('--nonconditional', help='''likelihood either conditional (default) on allele segragating at the last sampling time
-- default is to condition, i.e. without --nonconditional flag''',action='store_true',required=False,default=False)
parser.add_argument('--verbose', help='increase standard out',action='store_true',required=False,default=False)
parser.add_argument('--debug', help='debug, lots of standard out',action='store_true',required=False,default=False)
args = parser.parse_args()
# parse all arguments
verbose = args.verbose
debug = args.debug
datafile = args.datafile
run = args.run
project = datafile.split('.py')[0]+'_'+run
dirout = args.dirout
codedir = args.codedir #(to add to the path)
H = args.gridsize
gridtype = args.gridtype
expmethod1 = args.expmethod1
expmethod2 = args.expmethod2
exhaust = args.exhaust
runningtime = args.runningtime
T0dim = args.T0dim
Gammadim = args.Gammadim
NEdim = args.NEdim
nonconditional = args.nonconditional
#parse datafile
execfile(datafile)
#append path etc.
sys.path.append(codedir)
import inference ## ll function
import optimize ## nelder mead and exhaustive
import funcs ##for the domain definition
#create output directory
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
if not os.path.isdir(dirout):
if verbose:
print "your outputdir does not exist, created now"
mkdir_p(dirout)
#"check" the datafile
try:
data=(I_,M_,T_)
dominance_
except:
print "Something is missing in your datafile!"
print "Check that you have all variables\nI_,M_,T_,dominance_\nUpper_bounds_,Lower_bounds_,fixed_params_ defined in your datafile"
sys.exit()
#check the mode
if exhaust and exhaust not in ['cube','time']:
print "--exhaust can only be followed by cube or time"
sys.exit()
# print output
if verbose:
print "Datafile:\t",datafile
print "project name:\t",project
print "Data:"
print "M:\t",M_
print "I:\t",I_
print "T:\t",T_
print "dominance:\t",dominance_
print "Parameters:"
print "Upper Bounds:\t",Upper_bounds_
print "Lower Bounds:\t",Lower_bounds_
print "Fixed params:\t",fixed_params_
print "Grid size (H):\t",H
print "Grid type:\t",gridtype
print "expmethod1: ",expmethod1
print "expmethod2: ",expmethod2
if nonconditional == True:
print "Will use the unconditional process (Q matrix)..."
else:
print "Will use the conditional matrix (q matrix)..."
if exhaust:
print "exhaustive mode"
else:
print "optimization mode (nelder-mead)"
if debug: print "Debugging ..."
#further checks: same size of M, I, T
if len(set([len(M_),len(T_),len(I_)]))!=1:
print "M_,I_,T_: all have to be the same length!!"
print "ckeck: "
print M_,I_,T_
sy |
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/social/backends/shopify.py | Python | agpl-3.0 | 3,362 | 0 | """
Shopify OAuth2 backend, docs at:
http://psa.matiasaguirre.net/docs/backends/shopify.html
"""
import imp
import six
from social.utils import handle_http_errors
from social.backends.oauth import BaseOAuth2
from social.exceptions import AuthFailed, AuthCanceled
class ShopifyOAuth2(BaseOAuth2):
"""Shopify OAuth2 authentication backend"""
name = 'shopify'
ID_KEY = 'shop'
EXTRA_DATA = [
('shop', 'shop'),
('website', 'website'),
('expires', 'expires')
]
@property
def shopifyAPI(self):
if not hasattr(self, '_shopify_api'):
fp, pathname, description = imp.find_module('shopify')
self._shopify_api = imp.load_module('shopify', fp, pathname,
description)
return self._shopify_api
def get_user_details(self, response):
"""Use the shopify store name as the username"""
return {
'username': six.text_type(response.get('shop', '')).replace(
'.myshopify.com', ''
)
}
def extra_data(self, user, uid, response, details=None, *args, **kwargs):
"""Return access_token and extra defined names to store in
extra_data field"""
data = super(ShopifyOAuth2, self).extra_data(user, uid, response,
details, *args, **kwargs)
session = self.shopifyAPI.Session(self.data.get('shop').strip())
# Get, and store the permanent token
token = session.request_token(data['access_token'].dicts[1])
data['access_token'] = token
return dict(data)
def auth_url(self):
key, secret = self.get_key_and_secret()
self.shopifyAPI.Session.setup(api_key=key, secret=secret)
scope = self.get_scope()
state = self.state_token()
self.strategy.session_set(self.name + '_state', state)
redirect_uri = self.get_redirec | t_uri(state)
session = self.shopifyAPI.Session(self.data.get('shop').strip())
return session.create_permission_url(
scope=scope,
redirect_uri=redirect_uri
)
@handle_http_errors
| def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance"""
self.process_error(self.data)
access_token = None
key, secret = self.get_key_and_secret()
try:
shop_url = self.data.get('shop')
self.shopifyAPI.Session.setup(api_key=key, secret=secret)
shopify_session = self.shopifyAPI.Session(shop_url, self.data)
access_token = shopify_session.token
except self.shopifyAPI.ValidationException:
raise AuthCanceled(self)
else:
if not access_token:
raise AuthFailed(self, 'Authentication Failed')
return self.do_auth(access_token, shop_url, shopify_session.url,
*args, **kwargs)
def do_auth(self, access_token, shop_url, website, *args, **kwargs):
kwargs.update({
'backend': self,
'response': {
'shop': shop_url,
'website': 'http://{0}'.format(website),
'access_token': access_token
}
})
return self.strategy.authenticate(*args, **kwargs)
|
adamfast/isthatanearthquake | isthatanearthquake/settings.py | Python | bsd-3-clause | 7,204 | 0.002082 | import os
import socket
# Django settings for isthatanearthquake project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
if socket.gethostname() == 'adam-air' or socket.gethostname() == 'TWC101037.local':
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'isthatanearthquake', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake'
},
}
elif socket.gethostname() == 'adam-air2':
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'isthatanearthquake', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
else:
from bundle_config import config
if "postgres" in config:
if "DATABASES" in locals():
DATABASES['default'] = {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
# 'ENGINE': "django.db.backends.postgresql_psycopg2",
'NAME': config['postgres']['database'],
'USER': config['postgres']['username'],
'PASSWORD': config['postgres']['password'],
'HOST': config[' | postgres']['host'],
}
else:
DATABASE_ENGINE = "postgresql_psycopg2"
DATABASE_NAME = config['postgres']['data | base']
DATABASE_USER = config['postgres']['username']
DATABASE_PASSWORD = config['postgres']['password']
DATABASE_HOST = config['postgres']['host']
if "redis" in config:
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '%s:%d' % (config['redis']['host'], int(config['redis']['port'])),
'OPTIONS': {
'DB': 1,
'PASSWORD': config['redis']['password'],
'PARSER_CLASS': 'redis.connection.HiredisParser'
},
},
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'm0t#tbp@@in7@0k!+&b*7@0xx7w5s&e9mp1g5b2t%-1br%o^@7'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'isthatanearthquake.urls'
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, "../templates"),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.gis',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'quakes',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
sanacl/GrimoireELK | grimoire/elk/gmane.py | Python | gpl-3.0 | 995 | 0.001005 | #!/usr/bin/python3
# | -*- coding: utf-8 -*-
#
#
# Copyright (C) 2015 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# i | t under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors:
# Alvaro del Castillo San Felix <acs@bitergia.com>
#
from grimoire.elk.mbox import MBoxEnrich
class GmaneEnrich(MBoxEnrich):
pass # GmaneEnrich is the same than MBoxEnrich with a different name
|
millen1m/flask-restplus-server-example | tests/conftest.py | Python | mit | 2,625 | 0.002286 | # encoding: utf-8
# pylint: disable=redefined-outer-name,missing-docstring
import pytest
from tests import utils
from app import create_app
@pytest.yield_fixture(scope='session')
def flask_app():
app = create_app(flask_config_name='testing')
from app.extensions import db
with app.app_context():
db.create_all()
yield app
db.drop_all()
@pytest.yield_fixture()
def db(flask_app):
# pylint: disable=unused-argument,invalid-name
from app.extensions import db as db_instance
yield db_instance
db_instance.session.rollback()
@pytest.fixture(scope='session')
def flask_app_client(flask_app): |
flask_app.test_client_class = utils.AutoAuthFlaskClient
flask_app.response_class = utils.JSONResponse
return flask_app.test_client()
@pytest.yield_fixture(scope='session')
def regular_user(flask_app):
# pylint: disable | =invalid-name,unused-argument
from app.extensions import db
regular_user_instance = utils.generate_user_instance(
username='regular_user'
)
db.session.add(regular_user_instance)
db.session.commit()
yield regular_user_instance
db.session.delete(regular_user_instance)
db.session.commit()
@pytest.yield_fixture(scope='session')
def readonly_user(flask_app):
# pylint: disable=invalid-name,unused-argument
from app.extensions import db
readonly_user_instance = utils.generate_user_instance(
username='readonly_user',
is_regular_user=False
)
db.session.add(readonly_user_instance)
db.session.commit()
yield readonly_user_instance
db.session.delete(readonly_user_instance)
db.session.commit()
@pytest.yield_fixture(scope='session')
def admin_user(flask_app):
# pylint: disable=invalid-name,unused-argument
from app.extensions import db
admin_user_instance = utils.generate_user_instance(
username='admin_user',
is_admin=True
)
db.session.add(admin_user_instance)
db.session.commit()
yield admin_user_instance
db.session.delete(admin_user_instance)
db.session.commit()
@pytest.yield_fixture(scope='session')
def internal_user(flask_app):
# pylint: disable=invalid-name,unused-argument
from app.extensions import db
internal_user_instance = utils.generate_user_instance(
username='internal_user',
is_regular_user=False,
is_admin=False,
is_active=True,
is_internal=True
)
db.session.add(internal_user_instance)
db.session.commit()
yield internal_user_instance
db.session.delete(internal_user_instance)
db.session.commit()
|
numericube/twistranet | twistranet/notifier/models.py | Python | agpl-3.0 | 2,203 | 0.005901 | """
This is the content used as a notification.
"""
import pickle
from django.db import models
from django.utils.translation import ugettext as _
from twistranet.twistapp.models import Twistable, Content
from twistranet.twistapp.lib import permissions
class Notification(Content):
"""
ACCOUNT did WHAT [on ACCOUNT/CONTENT].
This is an internal system message, available to people following either the first or second mentionned account.
It's meant to be posted by SystemAccount only.
Aut | hor is usually SystemAccount.
Publisher is usually the community (or account) this content belongs to.
"""
# Parameters as a dict
_encoded_parameters = models.TextField()
# Other type configuration stuff
type_text_template_creation = None
type_html_template_creation = None
# View / permissions overriding support
permission_templates = permissions.ephemeral_templates
type_summary_ | view = "content/summary.notification.part.html"
type_detail_view = None
def get_parameters(self,):
"""
Unpickle parameters
"""
if self._encoded_parameters:
p = self._encoded_parameters
if isinstance(p, unicode):
p = p.encode('ascii')
return pickle.loads(p)
else:
return {}
def set_parameters(self, d):
"""
Pickle parameters
"""
if not isinstance(d, dict):
raise TypeError("parameters must be a dict of picklable values")
self._encoded_parameters = pickle.dumps(d)
parameters = property(get_parameters, set_parameters)
@property
def message(self,):
"""
Print message for this notification.
We do that by de-referencing parameters and then mixing it to the message.
XXX HEAVILY CACHE THIS !!!
"""
n_dict = {}
try:
for k,v in self.parameters.items():
n_dict[k] = Twistable.objects.get(id = v).html_link
except Twistable.DoesNotExist:
return None
return _(self.description) % n_dict
class Meta:
app_label = 'twistapp'
|
bsipocz/astropy | astropy/table/__init__.py | Python | bsd-3-clause | 2,572 | 0.004277 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.table`.
"""
auto_colname = _config.ConfigItem(
'col{0}',
'The template that determines the name of a column if it cannot be '
'determined. Uses new-style (format method) string formatting.',
aliases=['astropy.table.column.auto_colname'])
default_notebook_table_class = _config.ConfigItem(
'table-striped table-bordered table-condensed',
'The table class to be used in Jupyter notebooks when displaying '
'tables (and not overridden). See <http://getbootstrap.com/css/#tables '
'for a list of useful bootstrap classes.')
replace_warnings = _config.ConfigItem(
['slice'],
'List of conditions for issuing a warning when replacing a table '
"column using setitem, e.g. t['a'] = value. Allowed options are "
"'always', 'slice', 'refcount', 'attributes'.",
'list',
)
replace_inplace = _config.ConfigItem(
False,
'Always use in-place update of a table column when using setitem, '
"e.g. t['a'] = value. This overrides the default behavior of "
"replacing the column entirely with the new value when possible. "
"This configuration option will be deprecated and then removed in "
"subsequent major releases."
)
conf = Conf()
from .column import Column, MaskedColumn, StringTruncateWarning, ColumnInfo
from .groups import TableGroups, ColumnGroups
from .table import (Table, QTable, Tab | leColumns, Row, TableFormatter,
NdarrayMixin, TableReplaceWarning)
from .operations import join, setdiff, hstack, cstack, vstack, unique, TableMergeError
from .bst import BST, FastBST, FastRBT
from .sorted_array import SortedArray
from .soco import SCEngine
from .serialize import SerializedColumn, represent_mixins_as_columns
# Finally | import the formats for the read and write method but delay building
# the documentation until all are loaded. (#5275)
from astropy.io import registry
with registry.delay_doc_updates(Table):
# Import routines that connect readers/writers to astropy.table
from .jsviewer import JSViewer
from astropy.io.ascii import connect
from astropy.io.fits import connect
from astropy.io.misc import connect
from astropy.io.votable import connect
from astropy.io.misc.asdf import connect
from astropy.io.misc.pandas import connect
|
ric2b/Vivaldi-browser | chromium/third_party/blink/tools/blinkpy/web_tests/port/base.py | Python | bsd-3-clause | 99,778 | 0.001032 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) | ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Abstract base class for Port classes.
The Port classes encapsulate Port-specific (platform-specific) behavior
in the web test infrastructure.
" | ""
import time
import collections
import json
import logging
import optparse
import os
import re
import sys
import tempfile
from collections import defaultdict
import six
from six.moves import zip_longest
from blinkpy.common import exit_codes
from blinkpy.common import find_files
from blinkpy.common import read_checksum_from_png
from blinkpy.common import path_finder
from blinkpy.common.memoized import memoized
from blinkpy.common.system.executive import ScriptError
from blinkpy.common.system.path import abspath_to_uri
from blinkpy.w3c.wpt_manifest import WPTManifest, MANIFEST_NAME
from blinkpy.web_tests.layout_package.bot_test_expectations import BotTestExpectationsFactory
from blinkpy.web_tests.models.test_configuration import TestConfiguration
from blinkpy.web_tests.models.test_run_results import TestRunException
from blinkpy.web_tests.models.typ_types import TestExpectations, ResultType
from blinkpy.web_tests.port import driver
from blinkpy.web_tests.port import server_process
from blinkpy.web_tests.port.factory import PortFactory
from blinkpy.web_tests.servers import apache_http
from blinkpy.web_tests.servers import pywebsocket
from blinkpy.web_tests.servers import wptserve
_log = logging.getLogger(__name__)
# Path relative to the build directory.
CONTENT_SHELL_FONTS_DIR = "test_fonts"
FONT_FILES = [
[[CONTENT_SHELL_FONTS_DIR], 'Ahem.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Arimo-Bold.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Arimo-BoldItalic.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Arimo-Italic.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Arimo-Regular.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Cousine-Bold.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Cousine-BoldItalic.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Cousine-Italic.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Cousine-Regular.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'DejaVuSans.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'GardinerModBug.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'GardinerModCat.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Garuda.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Gelasio-Bold.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Gelasio-BoldItalic.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Gelasio-Italic.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Gelasio-Regular.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Lohit-Devanagari.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Lohit-Gurmukhi.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Lohit-Tamil.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'MuktiNarrow.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'NotoColorEmoji.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'NotoSansCJKjp-Regular.otf', None],
[[CONTENT_SHELL_FONTS_DIR], 'NotoSansKhmer-Regular.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'NotoSansSymbols2-Regular.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'NotoSansTibetan-Regular.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Tinos-Bold.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Tinos-BoldItalic.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Tinos-Italic.ttf', None],
[[CONTENT_SHELL_FONTS_DIR], 'Tinos-Regular.ttf', None],
]
# This is the fingerprint of wpt's certificate found in
# blinkpy/third_party/wpt/certs. The following line is updated by
# update_cert.py.
WPT_FINGERPRINT = 'Nxvaj3+bY3oVrTc+Jp7m3E3sB1n3lXtnMDCyBsqEXiY='
# One for 127.0.0.1.sxg.pem
SXG_FINGERPRINT = '55qC1nKu2A88ESbFmk5sTPQS/ScG+8DD7P+2bgFA9iM='
# And one for external/wpt/signed-exchange/resources/127.0.0.1.sxg.pem
SXG_WPT_FINGERPRINT = '0Rt4mT6SJXojEMHTnKnlJ/hBKMBcI4kteBlhR1eTTdk='
# A convervative rule for names that are valid for file or directory names.
VALID_FILE_NAME_REGEX = re.compile(r'^[\w\-=]+$')
# This sub directory will be inside the results directory and it will
# contain all the disc artifacts created by web tests
ARTIFACTS_SUB_DIR = 'layout-test-results'
class Port(object):
"""Abstract class for Port-specific hooks for the web_test package."""
# Subclasses override this. This should indicate the basic implementation
# part of the port name, e.g., 'mac', 'win', 'gtk'; there is one unique
# value per class.
# FIXME: Rename this to avoid confusion with the "full port name".
port_name = None
# Test paths use forward slash as separator on all platforms.
TEST_PATH_SEPARATOR = '/'
ALL_BUILD_TYPES = ('debug', 'release')
CONTENT_SHELL_NAME = 'content_shell'
# Update the first line in third_party/blink/web_tests/TestExpectations and
# the documentation in docs/testing/web_test_expectations.md when this list
# changes.
ALL_SYSTEMS = (
('mac10.12', 'x86'),
('mac10.13', 'x86'),
('mac10.14', 'x86'),
('mac10.15', 'x86'),
('mac11', 'x86'),
('mac11-arm64', 'arm64'),
('win7', 'x86'),
('win10.20h2', 'x86'),
('trusty', 'x86_64'),
('fuchsia', 'x86_64'),
)
CONFIGURATION_SPECIFIER_MACROS = {
'mac': [
'mac10.12', 'mac10.13', 'mac10.14', 'mac10.15', 'mac11',
'mac11-arm64'
],
'win': ['win7', 'win10.20h2'],
'linux': ['trusty'],
'fuchsia': ['fuchsia'],
}
# List of ports open on the host that the tests will connect to. When tests
# run on a separate machine (Android and Fuchsia) these ports need to be
# forwarded back to the host.
# 8000, 8080 and 8443 are for http/https tests;
# 8880 is for websocket tests (see apache_http.py and pywebsocket.py).
# 8001, 8081, 8444, and 8445 are for http/https WPT;
# 9001 and 9444 are for websocket WPT (see wptserve.py).
SERVER_PORTS = [8000, 8001, 8080, 8081, 8443, 8444, 8445, 8880, 9001, 9444]
FALLBACK_PATHS = {}
SUPPORTED_VERSIONS = []
# URL to the build requirements page.
BUILD_REQUIREMENTS_URL = ''
# The suffixes of baseline files (not extensions).
BASELINE_SUFFIX = '-expected'
BASELINE_MISMATCH_SUFFIX = '-expected-mismatch'
# All of the non-reftest baseline extensions we use.
BASELINE_EXTENSIONS = ('.wav', '.txt', '.png')
FLAG_EXPECTATIONS_PREFIX = 'FlagExpectations'
# The following is used for concetenating WebDriver test names.
WEBDRIVER_SUBTEST_SEPARATOR = '>>'
# The following is used for concetenating WebDriver test names in pytest format.
WEBDRIVER_SUBTEST_PYTEST_SEPARATOR = '::'
# The following two constants must match. When adding a new WPT root, also
# remember to add an ali |
kodi-addons/plugin.video.espn_3 | resources/lib/adobe_activate_api.py | Python | gpl-3.0 | 11,244 | 0.002046 | import urlparse
import urllib
import uuid
import hashlib
import hmac
import base64
import urllib2
import time
import json
import gzip
import os
import cookielib
from StringIO import StringIO
import requests
import xbmc
from globals import ADDON_PATH_PROFILE
SETTINGS_FILE = 'adobe.json'
UA_ATV = 'AppleCoreMedia/1.0.0.13Y234 (Apple TV; U; CPU OS 9_2 like Mac OS X; en_us)'
TAG = 'ESPN3-adobe-api: '
adobe_session = requests.Session()
adobe_session.headers.update({
'Accept': 'application/json',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-us',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'User-Agent': UA_ATV
})
class AuthorizationException(Exception):
pass
def reset_settings():
save_settings(dict())
def save_settings(settings):
settings_file = os.path.join(ADDON_PATH_PROFILE, SETTINGS_FILE)
with open(settings_file, 'w') as fp:
json.dump(settings, fp, sort_keys=False, indent=4)
def load_settings():
settings_file = os.path.join(ADDON_PATH_PROFILE, SETTINGS_FILE)
if not os.path.isfile(settings_file):
save_settings(dict())
with open(settings_file, 'r') as fp:
return json.load(fp)
def get_device_id():
settings = load_settings()
if 'device_id' not in settings:
settings['device_id'] = str(uuid.uuid1())
save_settings(settings)
return settings['device_id']
def is_expired(expiration):
return (time.time() * 1000) >= int(expiration)
def get_url_response(url, message, body=None, method=None):
# xbmc.log(TAG + 'url %s message %s' % (url, message), xbmc.LOGDEBUG)
headers = {'Authorization': message}
if method == 'DELETE':
resp = requests.delete(url, headers=headers)
elif method == 'POST':
resp = adobe_session.post(url, headers=headers)
else:
resp = adobe_session.get(url, headers=headers)
# xbmc.log(TAG + 'resp %s ' % (resp.text), xbmc.LOGDEBUG)
return resp.json()
def generate_message(method, path):
nonce = str(uuid.uuid4())
today = str(int(time.time() * 1000))
key = 'gB8HYdEPyezeYbR1'
message = method + ' requestor_id=ESPN, | nonce=' + nonce + ', signature_method=HMAC-SHA1, request_time=' + today + ', request_uri=' + path
signature = hmac.new(key, message, hashlib.sha1)
signature = base64.b64encode(signature.digest())
message = message + ', public_key=yKpsHYd8TOITdTMJHmkJOVmgbb2DykNK, signature=' + signature
return message
def is_reg_code_valid() | :
settings = load_settings()
if 'generateRegCode' not in settings:
xbmc.log(TAG + 'Unable to find reg code', xbmc.LOGDEBUG)
return False
# Check code isn't expired
expiration = settings['generateRegCode']['expires']
if is_expired(expiration):
xbmc.log(TAG + 'Reg code is expired at %s' % expiration, xbmc.LOGDEBUG)
return False
return True
# Gets called when the user wants to authorize this device, it returns a registration code to enter
# on the activation website page
# Sample : '{"id":"","code":"","requestor":"ESPN","generated":1463616806831,
# "expires":1463618606831,"info":{"deviceId":"","deviceType":"appletv","deviceUser":null,
# "appId":null,"appVersion":null,"registrationURL":null}}'
# (generateRegCode)
def get_regcode():
params = urllib.urlencode(
{'deviceId': get_device_id(),
'deviceType': 'appletv',
'ttl': '1800'})
path = '/regcode'
url = urlparse.urlunsplit(['https', 'api.auth.adobe.com',
'reggie/v1/ESPN' + path,
params, ''])
message = generate_message('POST', path)
resp = get_url_response(url, message, dict(), 'POST')
settings = load_settings()
settings['generateRegCode'] = resp
save_settings(settings)
return resp['code']
# Authenticates the user after they have been authenticated on the activation website (authenticateRegCode)
# Sample: '{"mvpd":"","requestor":"ESPN","userId":"","expires":"1466208969000"}'
def authenticate(regcode):
params = urllib.urlencode({'requestor': 'ESPN'})
path = '/authenticate/' + regcode
url = urlparse.urlunsplit(['https', 'api.auth.adobe.com',
'api/v1' + path,
params, ''])
message = generate_message('GET', path)
resp = get_url_response(url, message)
settings = load_settings()
settings['authenticateRegCode'] = resp
save_settings(settings)
# Get authn token (re-auth device after it expires), getAuthnToken
def re_authenticate():
params = urllib.urlencode({'requestor': 'ESPN',
'deviceId': get_device_id()})
path = '/tokens/authn'
url = urlparse.urlunsplit(['https', 'api.auth.adobe.com',
'api/v1' + path,
params, ''])
message = generate_message('GET', path)
xbmc.log(TAG + 'Attempting to re-authenticate the device', xbmc.LOGDEBUG)
resp = get_url_response(url, message)
if 'status' in resp and resp['status'] == '410':
raise AuthorizationException()
settings = load_settings()
settings['authenticateRegCode'] = resp
if 'authorize' in settings:
del settings['authorize']
xbmc.log(TAG + 'Re-authenticated device', xbmc.LOGDEBUG)
save_settings(settings)
def get_resource(channel, event_name, event_guid, event_parental_rating):
return '<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/"><channel><title><![CDATA[' + channel + "]]></title><item><title><![CDATA[" + event_name + "]]></title><guid><![CDATA[" + event_guid + ']]></guid><media:rating scheme="urn:v-chip"><![CDATA[' + event_parental_rating + "]]></media:rating></item></channel></rss>"
# Sample '{"resource":"resource","mvpd":"","requestor":"ESPN","expires":"1463621239000"}'
def authorize(resource):
if is_authorized(resource):
xbmc.log(TAG + 'already authorized', xbmc.LOGDEBUG)
return
params = urllib.urlencode({'requestor': 'ESPN',
'deviceId': get_device_id(),
'resource': resource})
path = '/authorize'
url = urlparse.urlunsplit(['https', 'api.auth.adobe.com',
'api/v1' + path,
params, ''])
message = generate_message('GET', path)
resp = get_url_response(url, message)
settings = load_settings()
if 'authorize' not in settings:
settings['authorize'] = dict()
xbmc.log(TAG + 'resource %s' % resource, xbmc.LOGDEBUG)
if 'status' in resp and resp['status'] == 403:
raise AuthorizationException()
settings['authorize'][resource.decode('iso-8859-1').encode('utf-8')] = resp
save_settings(settings)
def deauthorize():
params = urllib.urlencode({'deviceId': get_device_id()})
path = '/logout'
url = urlparse.urlunsplit(['https', 'api.auth.adobe.com',
'api/v1' + path,
params, ''])
message = generate_message('DELETE', path)
try:
resp = get_url_response(url, message, body=None, method='DELETE')
except:
xbmc.log(TAG + 'De-authorize failed', xbmc.LOGDEBUG)
settings = load_settings()
if 'authorize' in settings:
del settings['authorize']
if 'authenticateRegCode' in settings:
del settings['authenticateRegCode']
save_settings(settings)
# getShortMediaToken
# Sample '{"mvpdId":"","expires":"1463618218000","serializedToken":"+++++++=","userId":"",
# "requestor":"ESPN","resource":" resource"}'
def get_short_media_token(resource):
if has_to_reauthenticate():
xbmc.log(TAG + 're-authenticating device', xbmc.LOGDEBUG)
re_authenticate()
params = urllib.urlencode({'requestor': 'ESPN',
'deviceId' : get_device_id(),
'resource' : resource})
path = '/mediatoken'
url = urlparse.urlunsplit(['https', 'api.auth.adobe.com',
'api/v1' + path,
params, |
angelinawawrzyniak/bomberman | game.py | Python | mit | 2,898 | 0.00207 | from artifacts.life import Life
from artifacts.points import Point
from artifacts.super_bomb_artifact import SuperBombArtifact
from bomb import Bomb
from brick import Brick
from context import Context
from game_over_error import GameOverError
from monster import Monster
def draw_scene(context, graphic_buffer):
context.board.draw(graphic_buffer, context)
for brick in context.bricks:
brick.draw(graphic_buffer, context)
for monster in context.monsters:
monster.draw(graphic_buffer, context)
context.user.draw(graphic_buffer, context)
for bomb in context.bombs:
bomb.draw(graphic_buffer, context)
if context.portal is not None:
context.portal.draw(graphic_bu | ffer, context)
for artifact in context.artifacts:
artifact.draw(graphic_buffer, context)
if context.game_over:
letters = list('GAME OVER')
offset = int((len(graphic_buffer[0]) - len(letters)) / 2)
for x in range(0, len(letters)):
| graphic_buffer[int(len(graphic_buffer) / 2)][x + offset] = letters[x]
for row in graphic_buffer:
print(' '.join(row))
print('Level: {}, Lives: {}, Points: {}'.format(context.game_level, context.user.life,
context.user.points))
for bomb in context.bombs:
print('Bomb time: {}'.format(bomb.time))
def remove_elements(context):
for element in context.dead_list:
if isinstance(element, Bomb):
context.bombs.remove(element)
if isinstance(element, Brick):
context.bricks.remove(element)
if isinstance(element, Life):
context.artifacts.remove(element)
if isinstance(element, SuperBombArtifact):
context.artifacts.remove(element)
if isinstance(element, Point):
context.artifacts.remove(element)
if isinstance(element, Monster):
context.monsters.remove(element)
context.dead_list = []
context = Context()
graphic_buffer = [
[' ' for index_x in range(len(context.board.fields[index_y]))] for index_y in range(len(context.board.fields))
]
while True:
draw_scene(context, graphic_buffer)
context.user.make_step(context)
for artifact in context.artifacts:
artifact.make_step(context)
if context.portal is not None:
if (context.user.y, context.user.x) == (context.portal.y, context.portal.x):
context.level_up()
continue
try:
for bomb in context.bombs:
bomb.make_step(context)
except GameOverError as error:
context.game_over = True
draw_scene(context, graphic_buffer)
break
remove_elements(context)
# TODO:
# super bomb - bigger range of bomb explosion
# monster - moving, decrease user life when monster is on user
# user - decrease user life when user is on monster
# unit tests
|
jwbernin/dns-sanity-checker | bindripper.py | Python | gpl-2.0 | 8,378 | 0.015039 | #!/usr/bin/python
import sys
import os
import pprint
import copy
class BINDRipper:
def __init__(self):
self.cfgTokens = []
self.cfgStr = ""
self.curToken = 0
self.saveToken = 0
self.config = {}
return None
def consume(self):
_item = self.cfgTokens[self.curToken]
self.curToken+=1
return _item
def peek(self):
return self.cfgTokens[self.curToken]
def tokensAvail(self):
return self.curToken<len(self.cfgTokens)
def parseFile(self, filename):
self.filename = filename
if not os.path.exists(self.filename):
return
if not os.path.isfile(self.filename):
return
try:
self.fp = open(self.filename)
except:
return
self.uncommentAndStore(self.fp.read())
self.tokenize()
self.parse()
def tokenize(self):
# Yes, this is an ugly hack of a tokenizer. Feel free to fix.
self.cfgStr = self.cfgStr.replace('}', ' } ')
self.cfgStr = self.cfgStr.replace(';', ' ; ')
self.cfgTokens = self.cfgStr.split()
def uncommentAndStore(self, string):
inComment = 0
for line in string.split('\n'):
line = line.strip()
if line.count('#')>0:
self.cfgStr += line.split('#')[0].strip()+' '
continue
if line.count('//')>0:
self.cfgStr += line.split('//')[0].strip()+' '
continue
if line.count('/*')>0:
inComment = 1
self.cfgStr += line.split('/*')[0].strip()+' '
continue
if inComment and line.count('*/')==0:
continue
if inComment and line.count('*/')>0:
inComment = 0
self.cfgStr += line.split('*/')[1].strip()+' '
continue
self.cfgStr += line + ' '
return
def ripAcl(self, aclname):
aclItems = []
if self.consume() != '{':
raise Exception("Parsing error - is it the file or the parser?")
token = self.consume()
while token != '}':
if token == ';':
token = self.consume()
continue
else:
aclItems.append(token)
token = self.consume()
if self.peek() == ';':
self.consume()
if not self.config.has_key('acls'):
self.config['acls'] = {}
return aclItems
def ripList(self):
toRet = []
token = self.consume()
if token != '{':
raise Exception("Parser fail - we should be in a list??")
token = self.consume()
while token != '}':
if token == ';':
token = self.consume()
continue
toRet.append(token)
token = self.consume()
token = self.consume()
if token != ';':
raise Exception("Parser fail - exiting list")
return toRet
def ripOptions(self):
options = {}
if self.consume() != '{':
raise Exception("Parsing error - options fail")
token = self.consume()
while token != '}':
if token == ';':
token = self.consume()
continue
key = token
if self.peek() == '{':
value = self.ripList()
else:
value = self.consume()
options[key] = value
token = self.consume()
if self.consume() != ';':
raise Exception("Parser fail - options close fail")
return options
def parseString(self, string):
self.uncommentAndStore(string)
self.tokenize()
self.parse()
def parse(self):
while self.tokensAvail():
token = self.consume()
if token == 'acl':
aclname = self.consume()
self.config['acls'][aclname] = self.ripAcl(aclname)
elif token == 'include':
subParser = BINDRipper()
filename = self.consume()
if self.peek() == ';':
token = self.consume()
subParser.parseFile(filename[1:-1])
subConfig = subParser.getConfig()
for key in subConfig.keys():
if self.config.has_key(key):
for otherkey in subConfig[key].keys():
self.config[key][otherkey] = copy.deepcopy(subConfig[key][otherkey])
else:
self.config[key] = copy.deepcopy(subConfig[key])
elif token == 'options':
self.config['options'] = self.ripOptions()
elif token == 'logging':
self.config['logging'] = self.ripLogging()
elif token == 'view':
if not self.config.has_key('views'):
self.config['views'] = {}
viewName = self.consume()
self.config['views'][viewName] = self.ripView()
elif token == 'zone':
if not self.config.has_key('zones'):
self.config['zones'] = {}
zoneName = self.consume()
self.config['zones'][zoneName[1:-1]] = self.ripZone(zoneName)
else:
print " ****** UNKNOWN TOKEN: >"+token+"<"
return
def ripView(self):
thisView = {}
if self.consume() != '{':
raise Exception("Parse fail - ripping view entry abort")
token = self.consume()
while token != '}':
if token == 'match-clients':
thisView['matchclients'] = self.getMatches()
token = self.consume()
elif token == 'allow-transfer':
thisView['allowtransfers'] = self.ripList()
token = self.consume()
elif token == 'zone':
zoneName = self.consume()
if not thisView.has_key('zones'):
thisView['zones'] = {}
thisView['zones'][zoneName[1:-1]] = self.ripZone(zoneName)
token = self.consume()
token = self.consume()
if token != ';':
raise Exception("Parse fail - exit view")
return thisView
def getMatches(self):
toRet = {}
toRet['positives'] = []
toRet['negatives'] = []
token = self.consume()
if token != '{':
raise Exception("Parser fail - match entry fail")
token = self.consume()
while token != '}':
if token == ';':
token = self.consume()
continue
if token == '!':
toRet['negatives'].append(self.consume())
else:
toRet['positives'].append(token)
token = self.consume()
token = self.consume()
if token != ';':
raise Exception("Parser fail - exiting matches")
return toRet
pass
def ripZone(self, zoneName):
zone = {}
if zoneName.count("in-addr.arpa")>0:
zone['direction'] = 'reverse'
else:
zone['direction'] = 'forward'
token = self.consume()
zone['zoneClass'] = token
token = self.consume()
if token != '{':
raise Exception("Parse fail - zone entry fail")
token = self.consume()
while token != '}':
if token == ';':
token = self.consume()
continue
key = token
if self.peek() == '{':
value = self.ripList()
else:
value = self.consume()
zone[key] = value
token = self.consume()
if self.consume() != ';':
raise Exception("Parse fail - end zone fail")
return zone
def ripLogChannel(self):
toRet = {}
token = self.consume()
if token != '{':
raise Exception("Parse fail - channel rip")
token = self.consume()
while token != '}':
key = token
value = self.consume()
term = self.consume()
if term != ';':
raise Exception("Parse fail - oops in channel rip")
toRet[key] = value
token = self.consume()
# token = '}'
token = self.consume()
# token = ';'
return toRet;
def ripLogging(self):
logging = {}
logging['channels'] = {}
logging['categories'] = {}
token = self.consume()
if token != '{':
raise Exception("Parse | fail - enter logging")
token = self.consume()
while token != '}':
if token == 'channe | l':
channelName = self.consume()
logging['channels'][channelName] = self.ripLogChannel()
elif token == 'category':
catName = self.consume()
logging['categories'][catName] = self.ripList()
else:
raise Exception("Parser fail - something not channel or category in logging")
token = self.consume()
token = self.consume()
if token != ';':
raise Exception("Parser fail - closing logging")
return logging
def printConfig(self):
pprint.pprint(self.config)
def getConfig(self):
return copy.deepcopy(self.config)
def runTests():
# br = BINDRipper()
# br.parseFile('/home/it |
Snergster/virl-salt | openstack/nova/files/kilo/vif.py | Python | gpl-2.0 | 31,926 | 0.000282 | # Copyright (C) 2011 Midokura KK
# Copyright (C) 2011 Nicira, Inc
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""VIF drivers for libvirt."""
import copy
import os
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from nova import exception
from nova.i18n import _
from nova.i18n import _LE
from nova.network import linux_net
from nova.network import model as network_model
from nova import utils
from nova.virt.libvirt import config as vconfig
from nova.virt.libvirt import designer
LOG = logging.getLogger(__name__)
libvirt_vif_opts = [
cfg.BoolOpt('use_virtio_for_bridges',
default=True,
help='Use virtio for bridge interfaces with KVM/QEMU'),
]
CONF = cfg.CONF
CONF.register_opts(libvirt_vif_opts, 'libvirt')
CONF.import_opt('use_ipv6', 'nova.netconf')
DEV_PREFIX_ETH = 'eth'
def is_vif_model_valid_for_virt(virt_type, vif_model):
valid_models = {
'qemu': [network_model.VIF_MODEL_VIRTIO,
network_model.VIF_MODEL_NE2K_PCI,
network_model.VIF_MODEL_PCNET,
network_model.VIF_MODEL_RTL8139,
network_model.VIF_MODEL_E1000,
network_model.VIF_MODEL_SPAPR_VLAN],
'kvm': [network_model.VIF_MODEL_VIRTIO,
network_model.VIF_MODEL_NE2K_PCI,
network_model.VIF_MODEL_PCNET,
network_model.VIF_MODEL_RTL8139,
network_model.VIF_MODEL_E1000,
network_model.VIF_MODEL_SPAPR_VLAN],
'xen': [network_model.VIF_MODEL_NETFRONT,
network_model.VIF_MODEL_NE2K_PCI,
network_model.VIF_MODEL_PCNET,
network_model.VIF_MODEL_RTL8139,
network_model.VIF_MODEL_E1000],
'lxc': [],
'uml': [],
}
if vif_model is None:
return True
if virt_type not in valid_models:
raise exception.UnsupportedVirtType(virt=virt_type)
return vif_model in valid_models[virt_type]
class LibvirtGenericVIFDriver(object):
"""Generic VIF driver for libvirt networking."""
def _normalize_vif_type(self, vif_type):
return vif_type.replace('2.1q', '2q')
def get_vif_devname(self, vif):
if 'devname' in vif:
return vif['devname']
return ("nic" + vif['id'])[:network_model.NIC_NAME_LEN]
def get_vif_devname_with_prefix(self, vif, prefix):
devname = self.get_vif_devname(vif)
return prefix + devname[3:]
def get_base_config(self, instance, vif, image_meta,
inst_type, virt_type):
conf = vconfig.LibvirtConfigGuestInterface()
# Default to letting libvirt / the hypervisor choose the model
model = None
driver = None
# If the user has specified a 'vif_model' against the
# image then honour that model
if image_meta:
vif_model = image_meta.get('properties',
{}).get('hw_vif_model')
if vif_model is not None:
model = vif_model
disable_csum = image_meta.get('properties',
{}).get('hw_vif_disable_csum', 'yes')
if disable_csum.lower() in ('yes', '1', 'true'):
conf.disable_csum = True
# Else if the virt type is KVM/QEMU, use virtio according
# to the global config parameter
if (model is None and
virt_type in ('kvm', 'qemu') and
CONF.libvirt.use_virtio_for_bridges):
model = network_model.VIF_MODEL_VIRTIO
# Workaround libvirt bug, where it mistakenly
# enables vhost mode, even for non-KVM guests
if (model == network_model.VIF_MODEL_VIRTIO and
virt_type == "qemu"):
driver = "qemu"
if not is_vif_model_valid_for_virt(virt_type,
model):
raise exception.UnsupportedHardware(model=model,
virt=virt_type)
designer.set_vif_guest_frontend_config(
conf, vif['address'], model, driver)
return conf
def get_bridge_name(self, vif):
return vif['network']['bridge']
def get_ovs_interfaceid(self, vif):
return vif.get('ovs_interfaceid') or vif['id']
def get_br_name(self, iface_id):
return ("qbr" + iface_id)[:network_model.NIC_NAME_LEN]
def get_veth_pair_names(self, iface_id):
return (("qvb%s" % iface_id)[:network_model.NIC_NAME_LEN],
("qvo%s" % iface_id)[:network_model.NIC_NAME_LEN])
def get_firewall_required(self, vif):
if vif.is_neutron_filtering_enabled():
return False
if CONF.firewall_driver != "nova.virt.firewall.NoopFirewallDriver":
return True
return False
def get_config_bridge(self, instance, vif, image_meta,
inst_type, virt_type):
"""Get VIF configurations for bridge type."""
conf = self.get_base_config(instance, vif, image_meta,
inst_type, virt_type)
designer.set_vif_host_backend_bridge_config(
conf, self.get_bridge_name(vif),
self.get_vif_devname(vif))
mac_id = vif['address'].replace(':', '')
name = "nova-instance-" + instance.name + "-" + mac_id
if self.get_firewall_required(vif):
conf.filtername = name
designer.set_vif_bandwidth_config(conf, inst_type)
return conf
def get_config_ovs_bridge(self, instance, vif, image_meta,
inst_type, virt_type):
conf = self.get_base_config(instance, vif, image_meta,
inst_type, virt_type)
designer.set_vif_host_backend_ovs_config(
conf, self.get_bridge_name(vif),
self.get | _ovs_interfaceid(vif),
self.get_vif_devname(vif))
designer.set_vif_bandwidth_config(conf, inst_type)
return conf
def get_config_ovs_hybrid(self, instance, vif, image_meta,
| inst_type, virt_type):
newvif = copy.deepcopy(vif)
newvif['network']['bridge'] = self.get_br_name(vif['id'])
return self.get_config_bridge(instance, newvif, image_meta,
inst_type, virt_type)
def get_config_ovs(self, instance, vif, image_meta,
inst_type, virt_type):
if self.get_firewall_required(vif) or vif.is_hybrid_plug_enabled():
return self.get_config_ovs_hybrid(instance, vif,
image_meta,
inst_type,
virt_type)
else:
return self.get_config_ovs_bridge(instance, vif,
image_meta,
inst_type,
virt_type)
def get_config_ivs_hybrid(self, instance, vif, image_meta,
inst_type, virt_type):
newvif = copy.deepcopy(vif)
newvif['network']['bridge'] = self.get_br_name(vif['id'])
return self.get_config_bridge(instance,
newvif,
image_meta,
inst_type,
virt_type)
def get_config_ivs_ |
alex-bauer/kelvin-power-challenge | src/viz/power_analysis.py | Python | mit | 708 | 0.012712 | """Visualizes top 5 power lines by variance"""
import sys
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
sys.path.append("../")
from utils.utils | import *
interval = '1D'
def d_resample(df):
if interval:
return df.resample(interval).mean()
else:
return df
scale=lambda x: MinMaxScaler().fit_transform(x)
target = d_resample(pd.read_pickle(config.data_folder + '/target.pkl'))
cols=config.target_cols
cols=sorted(cols, key=lambda col:target[col].std(), reverse=True)
fig, axs = plt.subplots(5, 1, sharex=True)
for ix,col in enumerate(cols[0:5]):
axs[ix].plot(target.index, target[col].values | , label=col)
axs[ix].legend()
plt.show()
|
bacaldwell/ironic | ironic/tests/unit/drivers/modules/msftocs/test_power.py | Python | apache-2.0 | 7,843 | 0 | # Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test class for MSFT OCS PowerInterface
"""
import mock
from ironic.common import exception
from ironic.common import states
from ironic.conductor import task_manager
from ironic.drivers.modules.msftocs import common as msftocs_common
from ironic.drivers.modules.msftocs import msftocsclient
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.db import utils as db_utils
from ironic.tests.unit.objects import utils as obj_utils
INFO_DICT = db_utils.get_test_msftocs_info()
class MSFTOCSPowerTestCase(db_base.DbTestCase):
def setUp(self):
super(MSFTOCSPowerTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake_msftocs')
self.info = INFO_DICT
self.node = obj_utils.create_test_node(self.context,
driver='fake_msftocs',
driver_info | =self.info)
def test_get_properties(self):
expected = msftocs_common.REQUIRED_PROPERTIES
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertEqual(expected, task.driver.get_properties())
@mock.patch.object(msft | ocs_common, 'parse_driver_info', autospec=True)
def test_validate(self, mock_drvinfo):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.power.validate(task)
mock_drvinfo.assert_called_once_with(task.node)
@mock.patch.object(msftocs_common, 'parse_driver_info', autospec=True)
def test_validate_fail(self, mock_drvinfo):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
mock_drvinfo.side_effect = exception.InvalidParameterValue('x')
self.assertRaises(exception.InvalidParameterValue,
task.driver.power.validate,
task)
@mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
def test_get_power_state(self, mock_gci):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
blade_id = task.node.driver_info['msftocs_blade_id']
mock_gci.return_value = (mock_c, blade_id)
mock_c.get_blade_state.return_value = msftocsclient.POWER_STATUS_ON
self.assertEqual(states.POWER_ON,
task.driver.power.get_power_state(task))
mock_gci.assert_called_once_with(task.node.driver_info)
mock_c.get_blade_state.assert_called_once_with(blade_id)
@mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
def test_set_power_state_on(self, mock_gci):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
blade_id = task.node.driver_info['msftocs_blade_id']
mock_gci.return_value = (mock_c, blade_id)
task.driver.power.set_power_state(task, states.POWER_ON)
mock_gci.assert_called_once_with(task.node.driver_info)
mock_c.set_blade_on.assert_called_once_with(blade_id)
@mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
def test_set_power_state_off(self, mock_gci):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
blade_id = task.node.driver_info['msftocs_blade_id']
mock_gci.return_value = (mock_c, blade_id)
task.driver.power.set_power_state(task, states.POWER_OFF)
mock_gci.assert_called_once_with(task.node.driver_info)
mock_c.set_blade_off.assert_called_once_with(blade_id)
@mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
def test_set_power_state_blade_on_fail(self, mock_gci):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
blade_id = task.node.driver_info['msftocs_blade_id']
mock_gci.return_value = (mock_c, blade_id)
ex = exception.MSFTOCSClientApiException('x')
mock_c.set_blade_on.side_effect = ex
pstate = states.POWER_ON
self.assertRaises(exception.PowerStateFailure,
task.driver.power.set_power_state,
task, pstate)
mock_gci.assert_called_once_with(task.node.driver_info)
mock_c.set_blade_on.assert_called_once_with(blade_id)
@mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
def test_set_power_state_invalid_parameter_fail(self, mock_gci):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
blade_id = task.node.driver_info['msftocs_blade_id']
mock_gci.return_value = (mock_c, blade_id)
pstate = states.ERROR
self.assertRaises(exception.InvalidParameterValue,
task.driver.power.set_power_state,
task, pstate)
mock_gci.assert_called_once_with(task.node.driver_info)
@mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
def test_reboot(self, mock_gci):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
blade_id = task.node.driver_info['msftocs_blade_id']
mock_gci.return_value = (mock_c, blade_id)
task.driver.power.reboot(task)
mock_gci.assert_called_once_with(task.node.driver_info)
mock_c.set_blade_power_cycle.assert_called_once_with(blade_id)
@mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
def test_reboot_fail(self, mock_gci):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
blade_id = task.node.driver_info['msftocs_blade_id']
mock_gci.return_value = (mock_c, blade_id)
ex = exception.MSFTOCSClientApiException('x')
mock_c.set_blade_power_cycle.side_effect = ex
self.assertRaises(exception.PowerStateFailure,
task.driver.power.reboot,
task)
mock_gci.assert_called_once_with(task.node.driver_info)
mock_c.set_blade_power_cycle.assert_called_once_with(blade_id)
|
akruis/pyheapdump | pyheapdump/__main__.py | Python | apache-2.0 | 3,105 | 0.004509 | #
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 by Anselm Kruis
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for | the specific language governing permissions and
# limitations under the License.
#
"""
=====================
Pyheap | dump.__main__
=====================
Debug heap dumps.
.. warning::
This is alpha quality code.
.. autofunction:: main
"""
from __future__ import absolute_import, print_function, unicode_literals, division
import argparse
import sys
import os
from pyheapdump import debug_dump
def main(argv=None):
"""Debug a Python heap dump file.
You can invoke this function using the following command::
python -m pyheapdump [OPTIONS] pyheapdump
Use the option '-h' to get help::
python -m pyheapdump -h
"""
if argv is None:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description='debug a Python heap dump', prog=os.path.basename(sys.executable) + " -m pyheapdump")
parser.add_argument('--debugger', '-d', choices=['auto', 'pdb', 'pydevd'], default="auto", help="select the debugger, default is 'auto'")
parser.add_argument('--debugger-dir', help='pydevd only: path to the Python files of PyDev, usually <ECLIPSE_INSTALATION_DIR>/plugins/org.python.pydev_<VERSION>/pysrc/')
parser.add_argument('--host', help='pydevd only: the user may specify another host, if the debug server is not in the same machine')
parser.add_argument('--port', type=int, default=5678, help='pydevd only: specifies which port to use for communicating with the server. Default is port 5678')
parser.add_argument('--stdout', choices=['server', 'console'], default='server', help='pydevd only: pass the stdout to the debug server so that it is printed in its console or to this process console')
parser.add_argument('--stderr', choices=['server', 'console'], default='server', help='pydevd only: pass the stderr to the debug server so that it is printed in its console or to this process console')
parser.add_argument('--debug-pyheapdump', action='store_true', help=argparse.SUPPRESS)
parser.add_argument('dumpfile', type=argparse.FileType(mode='rb'), help="the heap dump file")
namespace = parser.parse_args(argv)
if namespace.debug_pyheapdump:
# It is better to use remote debugging, because of the debugger specific code later on
sys.path.append(namespace.debugger_dir)
import pydevd # @UnresolvedImport
pydevd.settrace(stdoutToServer=True, stderrToServer=True, suspend=True, trace_only_current_thread=True)
return debug_dump(dumpfile=namespace.dumpfile, debugger_options=vars(namespace))
if __name__ == '__main__':
sys.exit(main())
|
openstack/python-ironicclient | ironicclient/tests/unit/common/apiclient/test_exceptions.py | Python | apache-2.0 | 5,355 | 0 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from http import client as http_client
from oslotest import base as test_base
from ironicclient.common.apiclient import exceptions
class FakeResponse(object):
json_data = {}
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def json(self):
return self.json_data
class ExceptionsArgsTest(test_base.BaseTestCase):
def assert_exception(self, ex_cls, method, url, status_code, json_data,
error_msg=None, error_details=None,
check_description=True):
ex = exceptions.from_response(
FakeResponse(status_code=status_code,
headers={"Content-Type": "application/json"},
json_data=json_data),
method,
url)
self.assertIsInstance(ex, ex_cls)
if check_description:
expected_msg = error_msg or json_data["error"]["message"]
expected_details = error_details or json_data["error"]["details"]
self.assertEqual(expected_msg, ex.message)
self.assertEqual(expected_details, ex.details)
self.assertEqual(method, ex.method)
self.assertEqual(url, ex.url)
self.assertEqual(status_code, ex.http_status)
def test_from_response_known(self):
method = "GET"
url = "/fake"
status_code = http_client.BAD_REQUEST
json_data = {"error": {"message": "fake message",
"details": "fake details"}}
self.assert_exception(
exceptions.BadRequest, method, url, status_code, json_data)
def test_from_response_unknown(self):
method = "POST"
url = "/fake-unknown"
status_code = 499
json_data = {"error": {"message": "fake unknown message",
"details": "fake unknown details"}}
self.assert_exception(
exceptions.HTTPClientError, method, url, status_code, json_data)
status_code = 600
self.assert_exception(
exceptions.HttpError, method, url, status_code, json_data)
def test_from_response_non_openstack(self):
method = "POST"
url = "/fake-unknown"
status_code = http_client.BAD_REQUEST
json_data = {"alien": 123}
self.assert_exception(
exceptions.BadRequest, method, url, status_code, json_data,
check_descrip | tion=False)
def test_from_response_with_different_response_format(self):
method = "GET"
url = "/fake-wsme"
status_code = http_client.BAD_REQUEST
json_data1 = {"error_message": {"debuginfo": None,
"faultcode": "Client",
" | faultstring": "fake message"}}
message = str(
json_data1["error_message"]["faultstring"])
details = str(json_data1)
self.assert_exception(
exceptions.BadRequest, method, url, status_code, json_data1,
message, details)
json_data2 = {"badRequest": {"message": "fake message",
"code": http_client.BAD_REQUEST}}
message = str(json_data2["badRequest"]["message"])
details = str(json_data2)
self.assert_exception(
exceptions.BadRequest, method, url, status_code, json_data2,
message, details)
def test_from_response_with_text_response_format(self):
method = "GET"
url = "/fake-wsme"
status_code = http_client.BAD_REQUEST
text_data1 = "error_message: fake message"
ex = exceptions.from_response(
FakeResponse(status_code=status_code,
headers={"Content-Type": "text/html"},
text=text_data1),
method,
url)
self.assertIsInstance(ex, exceptions.BadRequest)
self.assertEqual(text_data1, ex.details)
self.assertEqual(method, ex.method)
self.assertEqual(url, ex.url)
self.assertEqual(status_code, ex.http_status)
def test_from_response_with_text_response_format_with_no_body(self):
method = "GET"
url = "/fake-wsme"
status_code = http_client.UNAUTHORIZED
ex = exceptions.from_response(
FakeResponse(status_code=status_code,
headers={"Content-Type": "text/html"}),
method,
url)
self.assertIsInstance(ex, exceptions.Unauthorized)
self.assertEqual('', ex.details)
self.assertEqual(method, ex.method)
self.assertEqual(url, ex.url)
self.assertEqual(status_code, ex.http_status)
|
google-research/google-research | symbolic_functionals/syfes/xc/gga_test.py | Python | apache-2.0 | 6,540 | 0.003058 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for xc.gga."""
import tempfile
from absl import flags
from absl.testing import absltest
from absl.testing import parameterized
import jax
import numpy as np
from pyscf.dft import libxc
import pyscf.gto
from pyscf.lib import parameters
from symbolic_functionals.syfes.xc import gga
from symbolic_functionals.syfes.xc import utils
jax.config.update('jax_enable_x64', True)
class XCGGATest(parameterized.TestCase):
def setUp(self):
super().setUp()
parameters.TMPDIR = tempfile.mkdtemp(dir=flags.FLAGS.test_tmpdir)
mol = pyscf.gto.M(
atom='''O 0. 0. 0.
H 0. -0.757 0.587
H 0. 0.757 0.587
''',
basis='def2svpd',
verbose=1)
ks = pyscf.dft.RKS(mol)
ks.xc = 'pbe,pbe'
ks.kernel()
ao = ks._numint.eval_ao(ks.mol, coords=ks.grids.coords, deriv=1)
self.rho_and_derivs = ks._numint.eval_rho2(
ks.mol, ao, mo_coeff=ks.mo_coeff, mo_occ=ks.mo_occ, xctype='GGA')
self.rho, rhogradx, rhogrady, rhogradz = self.rho_and_derivs
self.sigma = rhogradx ** 2 + rhogrady ** 2 + rhogradz ** 2
# construct a spin polarized density to test spin polarized case
zeta = 0.2
self.rho_and_derivs_a = 0.5 * (1 + zeta) * self.rho_and_derivs
self.rho_and_derivs_b = 0.5 * (1 - zeta) * self.rho_and_derivs
self.rhoa, self.rhob = self.rho_and_derivs_a[0], self.rho_and_derivs_b[0]
self.sigma_aa = (0.5 * (1 + zeta)) ** 2 * self.sigma
self.sigma_ab = ((0.5 * (1 + zeta))* (0.5 * (1 - zeta))) * self.sigma
self.sigma_bb = (0.5 * (1 - zeta)) ** 2 * self.sigma
@parameterized.parameters(
('gga_x_pbe', gga.e_x_pbe_unpolarized),
('gga_x_rpbe', gga.e_x_rpbe_unpolarized),
('gga_x_b88', gga.e_x_b88_unpolarized),
('gga_c_pbe', gga.e_c_pbe_unpolarized),
('hyb_gga_xc_b97',
utils.function_sum(gga.e_x_b97_unpolarized, gga.e_c_b97_unpolarized)),
)
def test_gga_xc_unpolarized_against_libxc(self, xc_name, xc_fun):
eps_xc_ref, (vrho_ref, vsigma_ref, _, _), _, _ = libxc.eval_xc(
xc_name, self.rho, spin=0, relativity=0, deriv=1)
e_xc_ref = eps_xc_ref * self.rho
e_xc, (vrho, vsigma) = jax.vmap(jax.value_and_grad(xc_fun, argnums=(0, 1)))(
self.rho, self.sigma)
np.testing.assert_allclose(e_xc, e_xc_ref, atol=1e-12)
np.testing.assert_allclose(vrho, vrho_ref, atol=1e-6)
np.testing.assert_allclose(vsigma, vsigma_ref, atol=1e-6 | )
@parameterized.parameters(
('gga_x_pbe', gga.e_x_pbe_polarized),
('gga_x_rpbe', gga.e_x_rpbe_polarized),
('gga_x_b88', gga.e_x_b88_polarized),
('gga_c_pbe', gga.e_c_pbe_polarized),
('hyb_gga_xc_b97',
utils.function_sum(gga.e_x_b9 | 7_polarized, gga.e_c_b97_polarized)),
)
def test_gga_xc_polarized_against_libxc(self, xc_name, xc_fun):
eps_xc_ref, (vrho_ref, vsigma_ref, _, _), _, _ = libxc.eval_xc(
xc_name,
(self.rho_and_derivs_a, self.rho_and_derivs_b),
spin=1,
relativity=0,
deriv=1)
e_xc_ref = eps_xc_ref * self.rho
vrhoa_ref, vrhob_ref = vrho_ref[:, 0], vrho_ref[:, 1]
vsigma_aa_ref, vsigma_ab_ref, vsigma_bb_ref = (
vsigma_ref[:, 0], vsigma_ref[:, 1], vsigma_ref[:, 2])
e_xc, grads = jax.vmap(jax.value_and_grad(xc_fun, argnums=(0, 1, 2, 3, 4)))(
self.rhoa, self.rhob, self.sigma_aa, self.sigma_ab, self.sigma_bb)
vrhoa, vrhob, vsigma_aa, vsigma_ab, vsigma_bb = grads
np.testing.assert_allclose(e_xc, e_xc_ref, atol=1e-12)
np.testing.assert_allclose(vrhoa, vrhoa_ref, atol=1e-6)
np.testing.assert_allclose(vrhob, vrhob_ref, atol=1e-6)
np.testing.assert_allclose(vsigma_aa, vsigma_aa_ref, atol=5e-5)
np.testing.assert_allclose(vsigma_ab, vsigma_ab_ref, atol=5e-5)
np.testing.assert_allclose(vsigma_bb, vsigma_bb_ref, atol=5e-5)
# NOTE(htm): For wB97X-V, there is max absolute (relative) difference on
# the order of .005 (0.01) between vsigma evaluated here and by libxc.
# Since e_xc and vrho match well, and the comparison to B97 is good,
# it is suspected that the error comes from libxc.
@parameterized.parameters(
('hyb_gga_xc_wb97x_v', gga.e_xc_wb97xv_unpolarized)
)
def test_wb97xv_unpolarized_against_libxc(self, xc_name, xc_fun):
eps_xc_ref, (vrho_ref, vsigma_ref, _, _), _, _ = libxc.eval_xc(
xc_name, self.rho, spin=0, relativity=0, deriv=1)
e_xc_ref = eps_xc_ref * self.rho
e_xc, (vrho, vsigma) = jax.vmap(jax.value_and_grad(xc_fun, argnums=(0, 1)))(
self.rho, self.sigma)
np.testing.assert_allclose(e_xc, e_xc_ref, atol=1e-12)
np.testing.assert_allclose(vrho, vrho_ref, atol=1e-6)
np.testing.assert_allclose(vsigma, vsigma_ref, atol=1e-2)
@parameterized.parameters(
('hyb_gga_xc_wb97x_v', gga.e_xc_wb97xv_polarized)
)
def test_wb97xv_polarized_against_libxc(self, xc_name, xc_fun):
eps_xc_ref, (vrho_ref, vsigma_ref, _, _), _, _ = libxc.eval_xc(
xc_name,
(self.rho_and_derivs_a, self.rho_and_derivs_b),
spin=1,
relativity=0,
deriv=1)
e_xc_ref = eps_xc_ref * self.rho
vrhoa_ref, vrhob_ref = vrho_ref[:, 0], vrho_ref[:, 1]
vsigma_aa_ref, vsigma_ab_ref, vsigma_bb_ref = (
vsigma_ref[:, 0], vsigma_ref[:, 1], vsigma_ref[:, 2])
e_xc, grads = jax.vmap(jax.value_and_grad(xc_fun, argnums=(0, 1, 2, 3, 4)))(
self.rhoa, self.rhob, self.sigma_aa, self.sigma_ab, self.sigma_bb)
vrhoa, vrhob, vsigma_aa, vsigma_ab, vsigma_bb = grads
np.testing.assert_allclose(e_xc, e_xc_ref, atol=1e-12)
np.testing.assert_allclose(vrhoa, vrhoa_ref, atol=1e-6)
np.testing.assert_allclose(vrhob, vrhob_ref, atol=1e-6)
np.testing.assert_allclose(vsigma_aa, vsigma_aa_ref, atol=1.5e-2)
np.testing.assert_allclose(vsigma_ab, vsigma_ab_ref, atol=1.5e-2)
np.testing.assert_allclose(vsigma_bb, vsigma_bb_ref, atol=1.5e-2)
if __name__ == '__main__':
absltest.main()
|
shiquanwang/numba | deps/pyextensibletype/setup.py | Python | bsd-2-clause | 975 | 0.005128 | import os
from fnmatch import fnmatchcase
from distutils.util import conve | rt_path
from distutils.core import setup
from Cython.Distutils import build_ext
from setupconfig import get_extensions
def find_packages(where='.', exclude=()):
out = []
stack=[(convert_path(where), '')]
while stack:
where, prefix = stack.pop(0)
| for name in os.listdir(where):
fn = os.path.join(where,name)
if ('.' not in name and os.path.isdir(fn) and
os.path.isfile(os.path.join(fn, '__init__.py'))
):
out.append(prefix+name)
stack.append((fn, prefix+name+'.'))
for pat in list(exclude) + ['ez_setup', 'distribute_setup']:
out = [item for item in out if not fnmatchcase(item, pat)]
return out
root = os.path.dirname(os.path.abspath(__file__))
setup(cmdclass={'build_ext': build_ext},
ext_modules=get_extensions(path_prefix=root),
packages=find_packages())
|
stvstnfrd/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/test_mixed_modulestore.py | Python | agpl-3.0 | 178,445 | 0.003301 | """
Unit tests for the Mixed Modulestore, with DDT for the various stores (Split, Draft, XML)
"""
import datetime
import itertools
import logging
import mimetypes
from collections import namedtuple
from contextlib import contextmanager
from shutil import rmtree
from tempfile import mkdtemp
from uuid import uuid4
import ddt
import pymongo
import pytest
import six
# Mixed modulestore depends on django, so we'll manually configure some django settings
# before importing the module
# TODO remove this import and the configuration -- xmodule should not depend on django!
from django.conf import settings
from mock import Mock, call, patch
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator, LibraryLocator
from pytz import UTC
from six.moves import range
from web_fragments.fragment import Fragment
from xblock.core import XBlockAside
from xblock.fields import Scope, ScopeIds, String
from xblock.runtime import DictKeyValueStore, KvsFieldData
from xblock.test.tools import TestRuntime
from openedx.core.lib.tests import attr
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.draft_and_published import DIRECT_ONLY_CATEGORIES, UnsupportedRevisionError
from xmodule.modulestore.edit_info import EditInfoMixin
from xmodule.modulestore.exceptions import (
DuplicateCourseError,
ItemNotFoundError,
NoPathToItem,
ReferentialIntegrityError
)
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.modulestore.mixed import MixedModuleStore
from xmodule.modulestore.search import navigation_index, path_to_location
from xmodule.modulestore.split_mongo.split import SplitMongoModuleStore
from xmodule.modulestore.store_utilities import DETACHED_XBLOCK_TYPES
from xmodule.modulestore.tests.factories import check_exact_number_of_calls, check_mongo_calls, mongo_uses_error_check
from xmodule.modulestore.tests.mongo_connection import MONGO_HOST, MONGO_PORT_NUM
from xmodule.modulestore.tests.test_asides import AsideTestType
from xmodule.modulestore.tests.utils import (
LocationMixin,
MongoContentstoreBuilder,
create_modulestore_instance,
mock_tab_from_json
)
from xmodule.modulestore.xml_exporter import export_course_to_xml
from xmodule.modulestore.xml_importer import import_course_from_xml
from xmodule.tests import DATA_DIR, CourseComparisonTest
from xmodule.x_module import XModuleMixin
if not settings.configured:
settings.configure()
log = logging.getLogger(__name__)
class CommonMixedModuleStoreSetup(CourseComparisonTest):
"""
Quasi-superclass which tests Location based apps against both split and mongo dbs (Locator and
Location-based dbs)
"""
HOST = MONGO_HOST
PORT = MONGO_PORT_NUM
DB = 'test_mongo_%s' % uuid4().hex[:5]
COLLECTION = 'modulestore'
ASSET_COLLECTION = 'assetstore'
FS_ROOT = DATA_DIR
DEFAULT_CLASS = 'xmodule.raw_module.RawDescriptor'
RENDER_TEMPLATE = lambda t_n, d, ctx=None, nsp='main': ''
MONGO_COURSEID = 'MITx/999/2013_Spring'
modulestore_options = {
'default_class': DEFAULT_CLASS,
'fs_root': DATA_DIR,
'render_template': RENDER_TEMPLATE,
'xblock_mixins': (EditInfoMixin, InheritanceMixin, LocationMixin, XModuleMixin),
}
DOC_STORE_CONFIG = {
'host': HOST,
'port': PORT,
'db': DB,
'collection': COLLECTION,
'asset_collection': ASSET_COLLECTION,
}
OPTIONS = {
'stores': [
{
'NAME': ModuleStoreEnum.Type.mongo,
'ENGINE': 'xmodule.modulestore.mongo.draft.DraftModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
},
{
'NAME': ModuleStoreEnum.Type.split,
'ENGINE': 'xmodule.modulestore.split_mongo.split_draft.DraftVersioningModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
},
],
'xblock_mixins': modulestore_options['xblock_mixins'],
}
def _comp | are_ignore_version(self, loc1, loc2, msg=None):
"""
AssertEqual replacement for CourseLocator
"""
if loc1.for_branch(None) != loc2.for_branch(None):
self.fail(self._formatMessage(msg, u"{} != {}".format(six.text_type(loc1), six.text_type(loc2))))
def setUp(self):
"""
Set up the database for testing
"""
super(CommonMixedModuleStoreSetup, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.exclude | _field(None, 'wiki_slug')
self.exclude_field(None, 'xml_attributes')
self.exclude_field(None, 'parent')
self.ignore_asset_key('_id')
self.ignore_asset_key('uploadDate')
self.ignore_asset_key('content_son')
self.ignore_asset_key('thumbnail_location')
self.options = getattr(self, 'options', self.OPTIONS)
self.connection = pymongo.MongoClient(
host=self.HOST,
port=self.PORT,
tz_aware=True,
)
self.connection.drop_database(self.DB)
self.addCleanup(self.connection.drop_database, self.DB)
self.addCleanup(self.connection.close)
self.addTypeEqualityFunc(BlockUsageLocator, '_compare_ignore_version')
self.addTypeEqualityFunc(CourseLocator, '_compare_ignore_version')
# define attrs which get set in initdb to quell pylint
self.writable_chapter_location = self.store = self.fake_location = None
self.course_locations = {}
self.user_id = ModuleStoreEnum.UserID.test
def _create_course(self, course_key, asides=None):
"""
Create a course w/ one item in the persistence store using the given course & item location.
"""
# create course
with self.store.bulk_operations(course_key):
self.course = self.store.create_course(course_key.org, course_key.course, course_key.run, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
if isinstance(self.course.id, CourseLocator):
self.course_locations[self.MONGO_COURSEID] = self.course.location
else:
assert self.course.id == course_key
# create chapter
chapter = self.store.create_child(self.user_id, self.course.location, 'chapter',
block_id='Overview', asides=asides)
self.writable_chapter_location = chapter.location
def _create_block_hierarchy(self):
"""
Creates a hierarchy of blocks for testing
Each block's (version_agnostic) location is assigned as a field of the class and can be easily accessed
"""
BlockInfo = namedtuple('BlockInfo', 'field_name, category, display_name, sub_tree')
trees = [
BlockInfo(
'chapter_x', 'chapter', 'Chapter_x', [
BlockInfo(
'sequential_x1', 'sequential', 'Sequential_x1', [
BlockInfo(
'vertical_x1a', 'vertical', 'Vertical_x1a', [
BlockInfo('problem_x1a_1', 'problem', 'Problem_x1a_1', []),
BlockInfo('problem_x1a_2', 'problem', 'Problem_x1a_2', []),
BlockInfo('problem_x1a_3', 'problem', 'Problem_x1a_3', []),
BlockInfo('html_x1a_1', 'html', 'HTML_x1a_1', []),
]
),
BlockInfo(
'vertical_x1b', 'vertical', 'Vertical_x1b', []
)
]
),
BlockInfo(
'sequential_x2', 'sequential', 'Sequential_x2', []
)
]
),
BlockInfo(
|
nicholasmhughes/random-scripts | zeromq_examples/router_to_req.py | Python | gpl-3.0 | 1,519 | 0.002633 |
# encoding: utf-8
#
# Custom routing Router to Mama (ROUTER to REQ)
#
# Author: Jeremy Avnet (brainsik) <spork(dash)zmq(at)theo | ry(dot)org>
#
import time
import random
from threading import Thread
import zmq
import zhelpers
NBR_WORKERS = 10
def worker_thread(context=None):
context = context or zmq.Cont | ext.instance()
worker = context.socket(zmq.REQ)
# We use a string identity for ease here
zhelpers.set_id(worker)
worker.connect("tcp://localhost:5671")
total = 0
while True:
# Tell the router we're ready for work
worker.send(b"ready")
# Get workload from router, until finished
workload = worker.recv()
finished = workload == b"END"
if finished:
print("Processed: %d tasks" % total)
break
total += 1
# Do some random work
time.sleep(0.1 * random.random())
context = zmq.Context.instance()
client = context.socket(zmq.ROUTER)
client.bind("tcp://*:5671")
for _ in range(NBR_WORKERS):
Thread(target=worker_thread).start()
for _ in range(NBR_WORKERS * 10):
# LRU worker is next waiting in the queue
address, empty, ready = client.recv_multipart()
client.send_multipart([
address,
b'',
b'This is the workload',
])
# Now ask mama to shut down and report their results
for _ in range(NBR_WORKERS):
address, empty, ready = client.recv_multipart()
client.send_multipart([
address,
b'',
b'END',
])
|
akaraspt/deepsleepnet | tensorlayer/nlp.py | Python | apache-2.0 | 32,754 | 0.004396 | #! /usr/bin/python
# -*- coding: utf8 -*-
import tensorflow as tf
import os
from sys import platform as _platform
import collections
import random
import numpy as np
import warnings
from six.moves import xrange
from tensorflow.python.platform import gfile
import re
## Iteration functions
def generate_skip_gram_batch(data, batch_size, num_skips, skip_window, data_index=0):
"""Generate a training batch for the Skip-Gram model.
Parameters
----------
data : a list
To present context.
batch_size : an int
Batch size to return.
num_skips : an int
How many times to reuse an input to generate a label.
skip_window : an int
How many words to consider left and right.
data_index : an int
Index of the context location.
without using yield, this code use data_index to instead.
Returns
--------
batch : a list
Inputs
labels : a list
Labels
data_index : an int
Index of the context location.
Examples
--------
>>> Setting num_skips=2, skip_window=1, use the right and left words.
>>> In the same way, num_skips=4, skip_window=2 means use the nearby 4 words.
>>> data = [1,2,3,4,5,6,7,8,9,10,11]
>>> batch, labels, data_index = tl.nlp.generate_skip_gram_batch(data=data, batch_size=8, num_skips=2, skip_window=1, data_index=0)
>>> print(batch)
... [2 2 3 3 4 4 5 5]
>>> print(labels)
... [[3]
... [1]
... [4]
... [2]
... [5]
... [3]
... [4]
... [6]]
References
-----------
- `TensorFlow word2vec tutorial <https://www.tensorflow.org/versions/r0.9/tutorials/word2vec/index.html#vector-representations-of-words>`_
"""
# global data_index # you can put data_index outside the function, then
# modify the global data_index in the function without return it.
# note: without using yield, this code use data_index to instead.
assert batch_size % num_skips == 0
assert num_skips <= 2 * skip_window
batch = np.ndarray(shape=(batch_size), dtype=np | .int32)
labels = np.ndarray(shape=(batch_size, 1), dtype=np.int32)
span = 2 * skip_window + 1 # [ skip_window target skip_window ]
buffer = collections.deque(maxlen=span)
for _ in range(span):
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
for i in range(batch_size // num_skips):
target = skip_window # target label at the center of the buffer
targets_to_avoid = [ skip_ | window ]
for j in range(num_skips):
while target in targets_to_avoid:
target = random.randint(0, span - 1)
targets_to_avoid.append(target)
batch[i * num_skips + j] = buffer[skip_window]
labels[i * num_skips + j, 0] = buffer[target]
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
return batch, labels, data_index
## Sampling functions
def sample(a=[], temperature=1.0):
"""Sample an index from a probability array.
Parameters
----------
a : a list
List of probabilities.
temperature : float or None
The higher the more uniform.\n
When a = [0.1, 0.2, 0.7],\n
temperature = 0.7, the distribution will be sharpen [ 0.05048273 0.13588945 0.81362782]\n
temperature = 1.0, the distribution will be the same [0.1 0.2 0.7]\n
temperature = 1.5, the distribution will be filtered [ 0.16008435 0.25411807 0.58579758]\n
If None, it will be ``np.argmax(a)``
Notes
------
No matter what is the temperature and input list, the sum of all probabilities will be one.
Even if input list = [1, 100, 200], the sum of all probabilities will still be one.
For large vocabulary_size, choice a higher temperature to avoid error.
"""
b = np.copy(a)
try:
if temperature == 1:
return np.argmax(np.random.multinomial(1, a, 1))
if temperature is None:
return np.argmax(a)
else:
a = np.log(a) / temperature
a = np.exp(a) / np.sum(np.exp(a))
return np.argmax(np.random.multinomial(1, a, 1))
except:
# np.set_printoptions(threshold=np.nan)
# print(a)
# print(np.sum(a))
# print(np.max(a))
# print(np.min(a))
# exit()
message = "For large vocabulary_size, choice a higher temperature\
to avoid log error. Hint : use ``sample_top``. "
warnings.warn(message, Warning)
# print(a)
# print(b)
return np.argmax(np.random.multinomial(1, b, 1))
def sample_top(a=[], top_k=10):
"""Sample from ``top_k`` probabilities.
Parameters
----------
a : a list
List of probabilities.
top_k : int
Number of candidates to be considered.
"""
idx = np.argpartition(a, -top_k)[-top_k:]
probs = a[idx]
# print("new", probs)
probs = probs / np.sum(probs)
choice = np.random.choice(idx, p=probs)
return choice
## old implementation
# a = np.array(a)
# idx = np.argsort(a)[::-1]
# idx = idx[:top_k]
# # a = a[idx]
# probs = a[idx]
# print("prev", probs)
# # probs = probs / np.sum(probs)
# # choice = np.random.choice(idx, p=probs)
# # return choice
## Vector representations of words (Advanced) UNDOCUMENT
class SimpleVocabulary(object):
"""Simple vocabulary wrapper, see create_vocab().
Parameters
------------
vocab : A dictionary of word to word_id.
unk_id : Id of the special 'unknown' word.
"""
def __init__(self, vocab, unk_id):
"""Initializes the vocabulary."""
self._vocab = vocab
self._unk_id = unk_id
def word_to_id(self, word):
"""Returns the integer id of a word string."""
if word in self._vocab:
return self._vocab[word]
else:
return self._unk_id
class Vocabulary(object):
"""Create Vocabulary class from a given vocabulary and its id-word, word-id convert,
see create_vocab() and ``tutorial_tfrecord3.py``.
Parameters
-----------
vocab_file : File containing the vocabulary, where the words are the first
whitespace-separated token on each line (other tokens are ignored) and
the word ids are the corresponding line numbers.
start_word : Special word denoting sentence start.
end_word : Special word denoting sentence end.
unk_word : Special word denoting unknown words.
Properties
------------
vocab : a dictionary from word to id.
reverse_vocab : a list from id to word.
start_id : int of start id
end_id : int of end id
unk_id : int of unk id
pad_id : int of padding id
Vocab_files
-------------
>>> Look as follow, includes `start_word` , `end_word` but no `unk_word` .
>>> a 969108
>>> <S> 586368
>>> </S> 586368
>>> . 440479
>>> on 213612
>>> of 202290
>>> the 196219
>>> in 182598
>>> with 152984
>>> and 139109
>>> is 97322
"""
def __init__(self,
vocab_file,
start_word="<S>",
end_word="</S>",
unk_word="<UNK>",
pad_word="<PAD>"):
if not tf.gfile.Exists(vocab_file):
tf.logging.fatal("Vocab file %s not found.", vocab_file)
tf.logging.info("Initializing vocabulary from file: %s", vocab_file)
with tf.gfile.GFile(vocab_file, mode="r") as f:
reverse_vocab = list(f.readlines())
reverse_vocab = [line.split()[0] for line in reverse_vocab]
assert start_word in reverse_vocab
assert end_word in reverse_vocab
if unk_word not in reverse_vocab:
reverse_vocab.append(unk_word)
vocab = dict([(x, y) for (y, x) in enumerate(reverse_vocab)])
print((" [TL] Vocabulary from %s : %s %s %s" % (vocab_file, start_word, end_word, unk_word)))
print((" vocabulary with %d words (includes start_word, end_word, unk_word)" % len(vocab)))
# tf.logging.info(" vocabulary with %d words" % len(vocab))
self.vocab = vocab # vocab[word] = id
self.reverse_vocab = reverse_vocab # reverse_vocab[id] = word
# Save special word ids.
self.start_id = vocab[star |
zhouhoo/conceptNet_55_client | tools/debug.py | Python | apache-2.0 | 213 | 0 | import inspect
def caller():
return inspect.stack | ()[2][3]
def print_debug(msg, tag=None):
if tag:
print('[%s]%s %s' % (caller(), tag, msg))
else:
print('[%s] %s' % ( | caller(), msg))
|
endlessm/chromium-browser | third_party/llvm/lldb/test/API/functionalities/data-formatter/data-formatter-objc/TestDataFormatterObjCNSURL.py | Python | bsd-3-clause | 1,265 | 0 | # encoding: utf-8
"""
Test lldb data formatter subsystem.
"""
import lldb
from lldbsuite.test.dec | orators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
from ObjCDataFormatterTestCase import ObjCDataFormatterTestCase
class ObjCDataFormatterNSURL(ObjCDataFormatterTestCase):
@skipUnlessDarwin
def test_nsurl_with_run_command(self):
"""Test formatters for NSURL."""
self.appkit_tester_impl(self.nsurl_data_formatter_commands)
def nsurl_data_formatter_commands(self):
self.expect(
'frame variable cfurl_ref cfchi | ldurl_ref cfgchildurl_ref',
substrs=[
'(CFURLRef) cfurl_ref = ', '@"http://www.foo.bar',
'cfchildurl_ref = ', '@"page.html -- http://www.foo.bar',
'(CFURLRef) cfgchildurl_ref = ',
'@"?whatever -- http://www.foo.bar/page.html"'
])
self.expect(
'frame variable nsurl nsurl2 nsurl3',
substrs=[
'(NSURL *) nsurl = ', '@"http://www.foo.bar',
'(NSURL *) nsurl2 =', '@"page.html -- http://www.foo.bar',
'(NSURL *) nsurl3 = ',
'@"?whatever -- http://www.foo.bar/page.html"'
])
|
macedir/Software-Development | Python/OOP/Decorators.py | Python | gpl-3.0 | 398 | 0.012563 | # DECORATORS
import time
def funcA( | function):
def wrappingPaper():
t1 = time.time()
function()
t2 = time.time()
return 'Runtime was {} seconds.'.format(str(t2 - t1))
return wrappingPaper()
@funcA
def funcB():
workList = []
for x in range(0, 1001):
workList.append(x)
print('Sum is {}.'.format(str(sum(workList))))
prin | t(funcB)
|
andreamartire/gmpy | test2/test_large.py | Python | lgpl-3.0 | 884 | 0.003394 | from gmpy2 import *
from math import log
from time import time
# This test is designed to detect issues when allocating memory for large
# numbers. If it crashes and you need to work with very large numbers,
# you will need to compile GMP from scratch and tr | y a different memory
# allocation option.
def pi(N):
print "Computing pi to %s decimal places." % N
start = time()
N = int(round(log(10,2)*N))
sq2 = fs | qrt(mpf(2, N))
a = mpz(6) - 4*sq2
y = sq2-1
for k in range(0, 10000):
xx = fsqrt(fsqrt(1-y**4))
y = (1-xx)/(1+xx)
anew = a*(1+y)**4 - 2**(2*k+3)*y*(1+y+y**2)
if anew == a:
break
a = anew
print "Computation took %5.2f seconds." % (time() - start)
return 1/a
if __name__ == '__main__':
print "Testing operations with large numbers."
pi(1000000)
|
Qwlouse/Findeco | node_storage/tests/__init__.py | Python | gpl-3.0 | 1,410 | 0.004255 | #!/usr/bin/python
# coding=utf-8
# region License
# Findeco is dually licensed under GPLv3 or later and MPLv2.
#
################################################################################
# Copyright (c) 2012 Klaus Greff <klaus.greff@gmx.net>
# This file is part of Findeco.
#
# Findeco is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
#
# Findeco is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Findeco. If not | , see <http://www.gnu.org/licenses/>.
################################################################################
#
################################################################################
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL w | as not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#endregion #####################################################################
from __future__ import division, print_function, unicode_literals
|
pgromano/Markov_Models | Markov_Models/coarse_grain/pcca.py | Python | gpl-3.0 | 1,708 | 0 | import numpy as np
from sklearn.preprocessing import normalize
from copy import deepcopy
class PCCA(object):
def __init__(self, n_states=2, psi=None, ncv=None):
self.n_states = n_states
if n_states < 2:
raise ValueError('''Coarse-graining with PCCA+ requires at least
two states for valid decomposition.''')
self.psi = psi
self.ncv = ncv
def fit(self, model):
def spread(v):
return v.max() - v.min()
if self.psi is None:
self.psi = model.eigenv | ectors('right',
k | =self.n_states + 1, ncv=self.ncv)
self.psi = self.psi[:, 1:]
self.chi = np.zeros(self.psi.shape[0], dtype=int)
for ma in range(self.n_states - 1):
v = self.psi[:, ma]
index = np.argmax([spread(v[self.chi == mi])
for mi in range(ma + 1)])
self.chi[(self.chi == index) & (v > 0)] = ma + 1
return self
def transform(self, model):
# Build new coarse-grained model
new_model = deepcopy(model)
new_model.n_states = self.n_states
new_model.crisp_membership = self.chi
S = [np.where(self.chi == ma) for ma in range(self.n_states)]
new_model._T = np.zeros((new_model.n_states, new_model.n_states))
for i in range(new_model.n_states):
for j in range(new_model.n_states):
new_model._T[i, j] = model._T[i, S[j]].sum()
new_model._T = normalize(new_model._T, norm='l1')
return new_model
def fit_transform(self, model):
self.fit(model)
return self.transform(model)
|
excelly/xpy-ml | ex/ml/rpca.py | Python | apache-2.0 | 2,134 | 0.007966 | from ex import *
from ex.alg.common import svdex
def RPCA(D, lam = None, tol = 1e-7, maxIter = 500):
'''Yi Ma's robust pca
return (L, SingularValues(L))
'''
m, n = D.shape
maxmn, minmn = (max(m, n), min(m, n))
lam = float(lam) if lam is not None else 1.0
log.info('RPCA for %dx%d matrix. lambda = %f.' % (m, n, lam))
lam = lam/sqrt(maxmn)
Y = D.copy()
norm_two = svdex(Y, 1); norm_two = norm_two[1][0]
norm_inf = norm(Y.ravel(), inf) / lam
dual_norm = max(norm_two, norm_inf)
Y = Y / dual_norm
A_hat = zeros((m, n))
E_hat = zeros((m, n))
mu = 1.25/norm_two
mu_bar = mu*1e7
rho = 1.5
d_norm = norm(D, 'fro')
sv = 10
for it in range(maxIter):
temp_T = D - A_hat + (1/mu | )*Y;
E_hat[:] = 0;
filt = temp_T > lam/mu;
E_hat[filt] = temp_T[filt] - lam/mu
filt = temp_T < -lam/mu;
E_hat[filt] = temp_T[filt] + lam/mu
U, diagS, Vh = svdex(D - E_hat + (1/mu)*Y, sv)
svp = | sum(diagS > 1/mu);
if svp < sv:
sv = min(svp + 1, minmn)
else:
sv = min(svp + round(0.05*minmn), minmn)
A_hat = mul(U[:,:svp]*(diagS[:svp] - 1/mu), Vh[:svp])
Z = D - A_hat - E_hat
Y = Y + mu*Z
mu = min(mu*rho, mu_bar)
# stop criterion
stop = norm(Z, 'fro') / d_norm
converged = stop < tol
log.info('Iter=%d, rank=%d, |E|_0=%d, Stop=%g' % (
it, svp, sum(fabs(E_hat.ravel()) > 1e-10), stop))
if converged: break
return (A_hat, diagS[:svp])
if __name__ == '__main__':
InitLog()
rk = 10
m = 500
num_ol = int(round(m*m*0.01))
BG = mul(randn(rk, m).T, randn(rk, m))/rk
OL = zeros((m, m))
for ind in range(num_ol):
ij = random.randint(0, m, 2)
OL[ij[0], ij[1]] = 5 + rand(1)*10
A = BG + OL
A_hat = RPCA(A)[0]
A_svd = svdex(A, rk)
A_svd = mul(A_svd[0]*A_svd[1], A_svd[2])
log.info('RPCA RMSE = %f, SVD RMSE = %f' % (
rmse(BG-A_hat), rmse(BG-A_svd)))
test(rmse(BG-A_hat) < 1e-5, 'RPCA recovering L')
|
deonwu/Goku | scripts/GokuCtrl/ipa4django/views/pyExcelerator/BIFFRecords.py | Python | mit | 88,384 | 0.002478 | #!/usr/bin/env python
# -*- coding: windows-1251 -*-
# Copyright (C) 2005 Roman V. Kiseliov
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# 3. All advertising materials mentioning features or use of this
# software must display the following acknowledgment:
# "This product includes software developed by
# Roman V. Kiseliov <roman@kiseliov.ru>."
#
# 4. Redistributions of any form whatsoever must retain the following
# acknowledgment:
# "This product includes software developed by
# Roman V. Kiseliov <roman@kiseliov.ru>."
#
# THIS SOFTWARE IS PROVIDED BY Roman V. Kiseliov ``AS IS'' AND ANY
# EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Roman V. Kiseliov OR
# ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
__rev_id__ = """$Id: BIFFRecords.py,v 1.7 2005/10/26 07:44:24 rvk Exp $"""
from struct import pack
from UnicodeUtils import *
import sys
class SharedStringTable(object):
_SST_ID = 0x00FC
_CONTINUE_ID = 0x003C
def __init__(self):
self._sst_record = ''
self._continues = []
self._current_piece = pack('<II', 0, 0)
self._pos = len(self._current_piece)
self._str_indexes = {}
self._add_calls = 0
def add_str(self, s):
self._add_calls += 1
if s not in self._str_indexes:
self._str_indexes[s] = len(self._str_indexes)
self._add_to_sst(s)
return self._str_indexes[s]
def str_index(self, s):
return self._str_indexes[s]
def get_biff_record(self):
self._new_piece()
result = pack('<2HII', self._SST_ID, len(self._sst_record), self._add_calls, len(self._str_indexes))
result += self._sst_record[8:].encode('utf8')
result += ''.join(self._continues)
return result
def _add_to_sst(self, s):
u_str = upack2(s)
if len(u_str) > 0xFFFF:
raise Exception('error: very long string.')
is_unicode_str = u_str[2] == '\x01'
if is_unicode_str:
atom_len = 5 # 2 byte -- len,
# 1 byte -- options,
# 2 byte -- 1st sym
else:
atom_len = 4 # 2 byte -- len,
# 1 byte -- options,
# 1 byte -- 1st sym
self._save_atom(u_str[0:atom_len])
self._save_splitted(u_str[atom_len:], is_unicode_str)
def _new_piece(self):
if self._sst_record == '':
self._sst_record = self._current_piece
else:
curr_piece_len = len(self._current_piece)
self._continues.append(pack('<2H%ds'%curr_piece_len, self._CONTINUE_ID, curr_piece_len, self._current_piece))
self._current_piece = ''
self._pos = len(self._current_piece)
def _save_atom(self, s):
atom_len = len(s)
free_space = 0x2020 - len(self._current_piece)
if free_space < atom_len:
self._new_piece()
self._current_piece += s
def _save_splitted(self, s, is_unicode_str):
i = 0
str_len = len(s)
while i < str_len:
piece_len = len(self._current_piece)
free_space = 0x2020 - piece_len
tail_len = str_len - i
need_more_space = free_space < tail_len
if not need_more_space:
atom_len = tail_len
else:
if is_unicode_str:
atom_len = free_space & 0xFFFE
else:
atom_len = free_space
self._current_piece += s[i:i+atom_len]
if need_more_space:
self._new_piece()
if is_unicode_str:
self._current_piece += '\x01'
else:
self._current_piece += '\x00'
i += atom_len
class BiffRecord(object):
def __init__(self):
self._rec_data = ''
def get_rec_id(self):
return _REC_ID
def get_rec_header(self):
return pack('<2H', self._REC_ID, len(self._rec_data))
def get_rec_data(self):
return self._rec_data
def get(self):
data = self.get_rec_data()
if len(data) > 0x2020: # limit for BIFF7/8
| chunks = []
pos = 0
while pos < len(data):
chunk_pos = pos + 0x2020
chunk = data[pos:chunk_pos]
chunks.append(chunk)
pos = chunk_pos
continues = pack('<2H', self._REC_ID, len(chunks[0])) + chunks[0]
for chunk in chunks[1:]:
| continues += pack('<2H%ds'%len(chunk), 0x003C, len(chunk), chunk)
# 0x003C -- CONTINUE record id
return continues
else:
return self.get_rec_header() + data
class Biff8BOFRecord(BiffRecord):
"""
Offset Size Contents
0 2 Version, contains 0600H for BIFF8 and BIFF8X
2 2 Type of the following data:
0005H = Workbook globals
0006H = Visual Basic module
0010H = Worksheet
0020H = Chart
0040H = Macro sheet
0100H = Workspace file
4 2 Build identifier
6 2 Build year
8 4 File history flags
12 4 Lowest Excel version that can read all records in this file
"""
_REC_ID = 0x0809
# stream types
BOOK_GLOBAL = 0x0005
VB_MODULE = 0x0006
WORKSHEET = 0x0010
CHART = 0x0020
MACROSHEET = 0x0040
WORKSPACE = 0x0100
def __init__(self, rec_type):
BiffRecord.__init__(self)
version = 0x0600
build = 0x0DBB
year = 0x07CC
file_hist_flags = 0x00L
ver_can_read = 0x06L
self._rec_data = pack('<4H2I', version, rec_type, build, year, file_hist_flags, ver_can_read)
class InteraceHdrRecord(BiffRecord):
_REC_ID = 0x00E1
def __init__(self):
BiffRecord.__init__(self)
self._rec_data = pack('BB', 0xB0, 0x04)
class InteraceEndRecord(BiffRecord):
_REC_ID = 0x00E2
def __init__(self):
BiffRecord.__init__(self)
self._rec_data = ''
class MMSRecord(BiffRecord):
_REC_ID = 0x00C1
def __init__(self):
BiffRecord.__init__(self)
self._rec_data = pack('<H', 0x00)
class WriteAccessRecord(BiffRecord):
"""
This record is part of the file protection. It contains the name of the
user that has saved the file. The user name is always stored as an
equal-sized string. All unused characters after the name are filled
with space characters. It is not required to write the mentioned string
length. Every other length will be accepted too.
"""
_REC_ID = 0x005C
def __init__(self, owner):
BiffRecord.__init__(self)
uowner = owner[0:0x30]
uowner_len = len(uowner)
self._rec_data = pack('%ds%ds' % (uowner_len, 0x70 - uowner_len), uowner, ' '*(0x70 - uowner_len))
class DSFRecord(BiffRecord):
"""
This record specifies if the file cont |
BU-PyCon/Meeting-3 | Programs/interpolate.py | Python | mit | 3,010 | 0.015615 | import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
from scipy.interpolate import interp1d,splev,splrep
def extractSpectrum(filename):
"""
NAME:
extractSpectrum
PURPOSE:
To open an input fits file from SDSS and extract the relevant
components, namely the flux and corresponding wavelength.
INPUTS:
filename The path and filename (including the extension) to the
file to be read in.
OUTPUTS:
lam The wavelengths, in angstrom, of the flux values
flux The actual flux, in arbitrary units
EXAMPLE:
flux, lam = extractSpectra('path/to/file/filename.fits')
"""
hdu = fits.open(filename) #Open the file using astropy
data = hdu[1].data #Data is in 2nd component of HDU
flux = data['flux'] #Get flux from read in dict
lam = 10**(data['loglam']) #Get wavelength, make it not log10
hdu.close() #Close the file, we're done with it
return lam, flux #Return the values as numpy arrays
def interpolate(points, lam, flux, method):
"""
NAME:
interpolate
PURPOSE:
General purpose function that can call and use various scipy.interpolate
methods. Defi | ned for convienience.
INPUTS:
points Set of new points to get interpolated values for.
lam The wavelengths of the data points
flux The fluxes of the data points
method The method of interpolation to use. Valide values include
'interp1d:linear', 'interp | 1d:quadratic', and 'splrep'.
OUTPUTS:
Interpolated set of values for each corresponding input point.
EXAMPLE:
interpFlux = interpolate(interpLam, lam, flux)
"""
if method == 'interp1d:linear':
f = interp1d(lam, flux, assume_sorted = True)
return f(points)
if method == 'interp1d:quadratic':
f = interp1d(lam, flux, kind = 'quadratic', assume_sorted = True)
return f(points)
if method == 'splrep':
return splev(points, splrep(lam, flux))
raise Exception("You didn't choose a proper interpolating method")
#First extract the flux and corresponding wavelength
fileName = 'spec-4053-55591-0938.fits'
lam, flux = extractSpectrum(fileName)
#Now let's plot it, without any processing
plt.figure(1)
plt.plot(lam, flux, '-o', lw = 1.5, c = (0.694,0.906,0.561),
mec = 'none', ms = 4, label = 'Original data')
plt.xlabel('Wavelength', fontsize = 16)
plt.ylabel('Flux', fontsize = 16)
plt.ylim(0,1.1*max(flux))
#Now let's interpolate and plot that up
interpLam = np.arange(4000,10000,1)
interpFlux = interpolate(interpLam, lam, flux, 'splrep') #This is my own method
plt.plot(interpLam, interpFlux, '-k', label = 'Interpolated')
plt.legend(loc = 0)
plt.show(block = False)
print('Done...')
|
blhughes/forge | forge/commands/group/add.py | Python | gpl-2.0 | 734 | 0.021798 | import forge
from forge.models.groups import Group
class Add(object):
def __init__(self,json_args,session):
if type(json_args) != type({}):
raise TypeError("JSON Arg must be dict type")
if 'name' and 'distribution' not in json_args.keys():
raise forge.ArgumentError()
self.name = json_args['name']
self.dist | ribution = json_args['distribution']
| self.session = session
def call(self):
group = Group(self.name,self.distribution)
self.session.add(group)
self.session.commit()
return {'name':self.name, 'distribution':self.distribution}
|
kishs1991/mythoughts | mythoughts/mythoughts/settings.py | Python | gpl-3.0 | 3,202 | 0.001249 | """
Django settings for mythoughts project.
Generated by 'django-admin startproject' using Django 1.9.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4z)-mry!_e(hm&% | sn$y&twnk&b_9dpmegvvrks!v*sinbbi)!9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'me.apps.MeConfig',
'django.contrib.admin',
'django.contrib.auth',
' | django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mythoughts.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mythoughts.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
anryko/ansible | lib/ansible/modules/network/onyx/onyx_snmp_hosts.py | Python | gpl-3.0 | 20,657 | 0.003824 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_snmp_hosts
version_added: "2.10"
author: "Sara Touqan (@sarato)"
short_description: Configures SNMP host parameters
description:
- This module provides declarative management of SNMP hosts protocol params
on Mellanox ONYX network devices.
options:
hosts:
type: list
description:
- List of snmp hosts
suboptions:
name:
description:
- Specifies the name of the host.
required: true
type: str
enabled:
description:
- Temporarily Enables/Disables sending of all notifications to this host.
type: bool
notification_type:
description:
- Configures the type of sending notification to the specified host.
choices: ['trap', 'inform']
type: str
port:
description:
- Overrides default target port for this host.
type: str
version:
description:
- Specifys SNMP version of informs to send.
choices: ['1', '2c', '3']
type: str
user_name:
description:
- Specifys username for this inform sink.
type: str
auth_type:
description:
- Configures SNMP v3 security parameters, specifying passwords in a nother parameter (auth_password) (passwords are always stored encrypted).
choices: ['md5', 'sha', 'sha224', 'sha256', 'sha384', 'sha512']
type: str
auth_password:
description:
- The password needed to configure the auth type.
type: str
privacy_type:
description:
- Specifys SNMP v3 privacy settings for this user.
choices: ['3des', 'aes-128', 'aes-192', 'aes-192-cfb', 'aes-256', 'aes-256-cfb', 'des']
type: str
privacy_password:
description:
- The password needed to configure the privacy type.
type: str
state:
description:
- Used to decide if you want to delete the specified host or not.
choices: ['present' , 'absent']
type: str
"""
EXAMPLES = """
- name: enables snmp host
onyx_snmp_hosts:
hosts:
- name: 1.1.1.1
enabled: true
- name: configures snmp host with version 2c
onyx_snmp_hosts:
hosts:
- name: 2.3.2.4
enabled: true
| notification_type: trap
port: 66
version: 2c
- name: configures snmp host with version 3 and configures it with user as sara
onyx_snmp_hosts:
hosts:
- name: 2.3.2.4
enabled: true
notification_type: trap
port: 66
version: 3
user_name: sara
auth_type: sha
auth_password: jnbdfijbdsf
privacy_type: 3des
privacy_password: nojfd8uherwiug | fh
- name: deletes the snmp host
onyx_snmp_hosts:
hosts:
- name: 2.3.2.4
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always
type: list
sample:
- snmp-server host <host_name> disable
- no snmp-server host <host_name> disable
- snmp-server host <host_name> informs port <port_number> version <version_number>
- snmp-server host <host_name> traps port <port_number> version <version_number>
- snmp-server host <host_name> informs port <port_number> version <version_number> user <user_name> auth <auth_type>
<auth_password> priv <privacy_type> <privacy_password>
- snmp-server host <host_name> traps port <port_number> version <version_number> user <user_name> auth <auth_type>
<auth_password> priv <privacy_type> <privacy_password>
- no snmp-server host <host_name>.
"""
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.onyx.onyx import show_cmd
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
class OnyxSNMPHostsModule(BaseOnyxModule):
def init_module(self):
""" initialize module
"""
host_spec = dict(name=dict(required=True),
enabled=dict(type='bool'),
notification_type=dict(type='str', choices=['trap', 'inform']),
port=dict(type='str'),
version=dict(type='str', choices=['1', '2c', '3']),
user_name=dict(type='str'),
auth_type=dict(type='str', choices=['md5', 'sha', 'sha224', 'sha256', 'sha384', 'sha512']),
privacy_type=dict(type='str', choices=['3des', 'aes-128', 'aes-192', 'aes-192-cfb', 'aes-256', 'aes-256-cfb', 'des']),
privacy_password=dict(type='str', no_log=True),
auth_password=dict(type='str', no_log=True),
state=dict(type='str', choices=['present', 'absent'])
)
element_spec = dict(
hosts=dict(type='list', elements='dict', options=host_spec),
)
argument_spec = dict()
argument_spec.update(element_spec)
self._module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
def validate_snmp_required_params(self):
req_hosts = self._required_config.get("hosts")
if req_hosts:
for host in req_hosts:
version = host.get('version')
if version:
if version == '3':
if host.get('user_name') is None or host.get('auth_type') is None or host.get('auth_password') is None:
self._module.fail_json(msg='user_name, auth_type and auth_password are required when version number is 3.')
if host.get('notification_type') is not None:
if host.get('version') is None or host.get('port') is None:
self._module.fail_json(msg='port and version are required when notification_type is provided.')
if host.get('auth_type') is not None:
if host.get('auth_password') is None:
self._module.fail_json(msg='auth_password is required when auth_type is provided.')
if host.get('privacy_type') is not None:
if host.get('privacy_password') is None:
self._module.fail_json(msg='privacy_password is required when privacy_type is provided.')
def get_required_config(self):
module_params = self._module.params
self._required_config = dict(module_params)
self.validate_param_values(self._required_config)
self.validate_snmp_required_params()
def _set_host_config(self, hosts_config):
hosts = hosts_config.get('Notification sinks')
if hosts[0].get('Lines'):
self._current_config['current_hosts'] = dict()
self._current_config['host_names'] = []
return
current_hosts = dict()
host_names = []
for host in hosts:
host_info = dict()
for host_name in host:
host_names.append(host_name)
enabled = True
first_entry = host.get(host_name)[0]
if first_entry:
if first_entry.get('Enabled') == 'no':
enabled = False
notification_type = first_entry.get('Notification type')
notification_type = notification_type.split()
host_info['notification_type'] = notification_type[2]
version = notification_type[1][1:]
host_inf |
cedadev/ndg_xacml | ndg/xacml/core/context/action.py | Python | bsd-3-clause | 495 | 0.00202 | """NDG XACML Context Action type
NERC DataGrid
"""
__author__ = "P J Kershaw"
__date__ = "24/03/10"
__copyright__ = "(C) 2010 Science and Technology Facilities Council"
__contact__ = "Philip.Kershaw@stfc.ac.uk"
__license__ = "BSD - see | LICENSE file in top-level directory"
__contact | __ = "Philip.Kershaw@stfc.ac.uk"
__revision__ = "$Id$"
from ndg.xacml.core.context import RequestChildBase
class Action(RequestChildBase):
"""XACML Context Action type"""
ELEMENT_LOCAL_NAME = 'Action'
|
gusDuarte/sugar-toolkit-gtk3 | tests/graphics/customdestroy.py | Python | lgpl-2.1 | 918 | 0.002179 | from gi.repository import Gtk
"""
Since GTK+3 Gtk.CellRenderer doesn't have a destroy signal anymore.
We can do the cleanup in the python destructor | method instead.
"""
class MyCellRenderer(Gtk.CellRenderer):
def __init__(self):
Gtk.CellRenderer.__init__(self)
| def __del__(self):
print "cellrenderer destroy"
def do_render(self, cairo_t, widget, background_area, cell_area, flags):
pass
def window_destroy_cb(*kwargs):
print "window destroy"
Gtk.main_quit()
window = Gtk.Window(Gtk.WindowType.TOPLEVEL)
window.connect("destroy", window_destroy_cb)
window.show()
def treeview_destroy_cb(*kwargs):
print "treeview destroy"
treeview = Gtk.TreeView()
treeview.connect("destroy", treeview_destroy_cb)
window.add(treeview)
treeview.show()
col = Gtk.TreeViewColumn()
treeview.append_column(col)
cel = MyCellRenderer()
col.pack_start(cel, expand=True)
Gtk.main()
|
palmtree5/Red-DiscordBot | redbot/cogs/streams/streams.py | Python | gpl-3.0 | 39,323 | 0.00267 | import discord
from redbot.core.bot import Red
from redbot.core import checks, commands, Config
from redbot.core.i18n import cog_i18n, Translator, set_contextual_locales_from_guild
from redbot.core.utils._internal_utils import send_to_owners_with_prefix_replaced
from redbot.core.utils.chat_formatting import escape, pagify
from .streamtypes import (
PicartoStream,
Stream,
TwitchStream,
YoutubeStream,
)
from .errors import (
APIError,
InvalidTwitchCredentials,
InvalidYoutubeCredentials,
OfflineStream,
StreamNotFound,
StreamsError,
YoutubeQuotaExceeded,
)
from . import streamtypes as _streamtypes
import re
import logging
import asyncio
import aiohttp
import contextlib
from datetime import datetime
from collections import defaultdict
from typing import Optional, List, Tuple, Union, Dict
MAX_RETRY_COUNT = 10
_ = Translator("Streams", __file__)
log = logging.getLogger("red.core.cogs.Streams")
@cog_i18n(_)
class Streams(commands.Cog):
"""Various commands relating to streaming platforms.
You can check if a Twitch, YouTube or Picarto stream is
currently live.
"""
global_defaults = {
"refresh_timer": 300,
"tokens": {},
"streams": [],
"notified_owner_missing_twitch_secret": False,
}
guild_defaults = {
"autodelete": False,
"mention_everyone": False,
"mention_here": False,
"live_message_mention": False,
"live_message_nomention": False,
"ignore_reruns": False,
"ignore_schedule": False,
}
role_defaults = {"mention": False}
def __init__(self, bot: Red):
super().__init__()
self.config: Config = Config.get_conf(self, 26262626)
self.ttv_bearer_cache: dict = {}
self.config.register_global(**self.global_defaults)
self.config.register_guild(**self.guild_defaults)
self.config.register_role(**self.role_defaults)
self.bot: Red = bot
self.streams: List[Stream] = []
self.task: Optional[asyncio.Task] = None
self.yt_cid_pattern = re.compile("^UC[-_A-Za-z0-9]{21}[AQgw]$")
self._ready_event: asyncio.Event = asyncio.Event()
self._init_task: asyncio.Task = self.bot.loop.create_task(self.initialize())
async def red_delete_data_for_user(self, **kwargs):
""" Nothing to delete """
return
def check_name_or_id(self, data: str) -> bool:
matched = self.yt_cid_pattern.fullmatch(data)
if matched is None:
return True
return False
async def initialize(self) -> None:
"""Should be called straight after cog instantiation."""
await self.bot.wait_until_ready()
try:
await self.move_api_keys()
await self.get_twitch_bearer_token()
self.streams = await self.load_streams()
self.task = self.bot.loop.create_task(self._stream_alerts())
except Exception as error:
log.exception("Failed to initialize Streams cog:", exc_info=error)
self._ready_event.set()
@commands.Cog.listener()
async def on_red_api_tokens_update(self, service_name, api_tokens):
if service_name == "twitch":
await self.get_twitch_bearer_token(api_tokens)
async def cog_before_invoke(self, ctx: commands.Context):
await self._ready_event.wait()
async def move_api_keys(self) -> None:
"""Move the API keys from cog stored config to core bot config if they exist."""
tokens = await self.config.tokens()
youtube = await self.bot.get_shared_api_tokens("youtube")
twitch = await self.bot.get_shared_api_tokens("twitch")
for token_type, token in tokens.items():
if token_type == "YoutubeStream" and "api_key" not in youtube:
await self.bot.set_shared_api_tokens("youtube", api_key=token)
if token_type == "TwitchStream" and "client_id" not in twitch:
# Don't need to check Community since they're set the same
await self.bot.set_shared_api_tokens("twitch", client_id=token)
await self.config.tokens.clear()
async def get_twitch_bearer_token(self, api_tokens: Optional[Dict] = None) -> None:
tokens = (
await self.bot.get_shared_api_tokens("twitch") if api_tokens is None else api_tokens
)
if tokens.get("client_id"):
notified_owner_missing_twitch_secret = (
await self.config.notified_owner_missing_twitch_secret()
)
try:
tokens["client_secret"]
if notified_owner_missing_twitch_secret is True:
await self.config.notified_owner_missing_twitch_secret.set(False)
except KeyError:
message = _(
"You need a client secret key if you want to use the Twitch API on this cog.\n"
"Follow these steps:\n"
"1. Go to this page: https://dev.twitch.tv/console/apps.\n"
'2. Click "Manage" on your application.\n'
'3. Click on "New secret".\n'
"5. Copy your client ID and your client secret into:\n"
"{command}"
"\n\n"
"Note: These tokens are sensitive and should only be used in a private channel "
"or in DM with the bot."
).format(
command="`[p]set api twitch client_id {} client_secret {}`".format(
_("<your_client_id_here>"), _("<your_client_secret_here>")
)
)
if notified_owner_missing_twitch_secret is False:
await send_to_owners_with_prefix_replaced(self.bot, message)
await self.config.notified_owner_missing_twitch_secret.set(True)
async with aiohttp.ClientSession() as session:
async with session.post(
"https://id.twitch.tv/oauth2/token",
| params={
"client_id": tokens. | get("client_id", ""),
"client_secret": tokens.get("client_secret", ""),
"grant_type": "client_credentials",
},
) as req:
try:
data = await req.json()
except aiohttp.ContentTypeError:
data = {}
if req.status == 200:
pass
elif req.status == 400 and data.get("message") == "invalid client":
log.error(
"Twitch API request failed authentication: set Client ID is invalid."
)
elif req.status == 403 and data.get("message") == "invalid client secret":
log.error(
"Twitch API request failed authentication: set Client Secret is invalid."
)
elif "message" in data:
log.error(
"Twitch OAuth2 API request failed with status code %s"
" and error message: %s",
req.status,
data["message"],
)
else:
log.error("Twitch OAuth2 API request failed with status code %s", req.status)
if req.status != 200:
return
self.ttv_bearer_cache = data
self.ttv_bearer_cache["expires_at"] = datetime.now().timestamp() + data.get("expires_in")
async def maybe_renew_twitch_bearer_token(self) -> None:
if self.ttv_bearer_cache:
if self.ttv_bearer_cache["expires_at"] - datetime.now().timestamp() <= 60:
await self.get_twitch_bearer_token()
@commands.guild_only()
@commands.command()
async def twitchstream(self, ctx: commands.Context, channel_name: str):
"""Check if a Twitch channel is live."""
await self.maybe_renew_twitch_bearer_token()
token = (await self.bot.get_shared_api_tokens("twitch")).get("client_id")
stre |
jasonwee/asus-rt-n14uhp-mrtg | src/lesson_data_structures/enum_aliases.py | Python | apache-2.0 | 470 | 0.002128 | import enum
class BugStatus(enum.Enum) | :
new = 7
incomplete = 6
invalid = 5
wont_fix = 4
in_progress = 3
fix_committed = 2
fix_released = 1
by_design = 4
closed = 1
for status in BugStatus:
print('{:15} = {}'.format(status.name, status.value))
print('\nSame: by_design is wont_fix: ',
BugStatus.by_design is BugStatus.wont_fix)
print('Same: closed is fix_released: ',
| BugStatus.closed is BugStatus.fix_released)
|
sbnoemi/notorhot | notorhot/contrib/write_in/_tests/views.py | Python | bsd-3-clause | 4,631 | 0.011445 | import datetime
from mock import Mock, patch, MagicMock, PropertyMock
from django.test import TestCase
from django import forms
from django.http import Http40 | 4
from not | orhot._tests.factories import mixer
from notorhot._tests._utils import setup_view, ViewTestMixin
from notorhot.contrib.write_in._tests.models import SimpleWriteIn
from notorhot.contrib.write_in.models import DefaultWriteIn
from notorhot.contrib.write_in.views import WriteInBaseView, WriteInDefaultView, \
WriteInThanksView
class WriteInBaseViewTestCase(ViewTestMixin, TestCase):
view_class = WriteInBaseView
def test_assign_fields(self):
# make sure that changing fields / exclude_fields on a view instance
# doesn't change the attribute on the class
view = self.make_view('get')
view.fields = ['field1',]
view.exclude_fields = ['field2',]
self.assertNotEqual(self.view_class.fields, ['field1',])
self.assertNotEqual(self.view_class.exclude_fields, ['field2',])
def test_get_category(self):
cat = mixer.blend('notorhot.CandidateCategory')
view = self.make_view('get', request_kwargs={ 'category_slug': cat.slug, })
view_cat = view.get_category()
self.assertEqual(view_cat, cat)
cat = mixer.blend('notorhot.CandidateCategory', is_public=False)
view = self.make_view('get', request_kwargs={ 'category_slug': cat.slug, })
view_cat = view.get_category()
self.assertIsNotNone(view_cat)
# should be able to run with or without category
view = self.make_view('get')
view_cat = view.get_category()
def test_get_form_class_with_category(self):
cat = mixer.blend('notorhot.CandidateCategory')
view = self.make_view('get', request_kwargs={ 'category_slug': cat.slug, })
form_class = view.get_form_class()
self.assertItemsEqual(form_class.base_fields.keys(),
['candidate_name', 'submitter_name', 'submitter_email'])
# make sure we remove the category from fields / add to exclude
view = self.make_view('get', request_kwargs={ 'category_slug': cat.slug, })
view.fields = ['category', 'candidate_name',]
self.assertIn('category', view.fields)
self.assertNotIn('category', view.exclude_fields)
form_class = view.get_form_class()
self.assertNotIn('category', view.fields)
self.assertIn('category', view.exclude_fields)
def test_get_form_class_without_category(self):
cat = mixer.blend('notorhot.CandidateCategory')
view = self.make_view('get')
form_class = view.get_form_class()
self.assertItemsEqual(form_class.base_fields.keys(),
['candidate_name', 'submitter_name', 'submitter_email', 'category'])
# make sure we add the category to fields / remove from exclude
view = self.make_view('get')
view.fields = ['candidate_name',]
view.exclude_fields.append('category')
self.assertNotIn('category', view.fields)
self.assertIn('category', view.exclude_fields)
form_class = view.get_form_class()
self.assertIn('category', view.fields)
self.assertNotIn('category', view.exclude_fields)
# but don't add to fields if no fields set
view = self.make_view('get')
self.assertItemsEqual(view.fields, [])
form_class = view.get_form_class()
self.assertItemsEqual(view.fields, [])
class WriteInDefaultViewTestCase(ViewTestMixin, TestCase):
view_class = WriteInDefaultView
def test_init(self):
view = self.make_view('get')
self.assertEqual(view.fields, view.default_fields)
self.assertEqual(view.exclude_fields,
['date_submitted', 'date_processed', 'status',])
class WriteInThanksViewTestCase(ViewTestMixin, TestCase):
view_class = WriteInThanksView
def test_get_category(self):
cat = mixer.blend('notorhot.CandidateCategory', slug='cat1')
view = self.make_view('get', request_kwargs={ 'category_slug': 'cat1', })
try:
view_cat = view.category
except Http404:
self.fail(u"WriteInThanksView should be able to retrieve existing "
"category.")
else:
self.assertEqual(cat, view_cat)
view = self.make_view('get', request_kwargs={ 'category_slug': 'cat2', })
with self.assertRaises(Http404):
view_cat = view.category |
evanw/flatbuffers | tests/namespace_test/NamespaceA/NamespaceB/EnumInNestedNS.py | Python | apache-2.0 | 128 | 0.007813 | # automatically generated, do not mod | ify
# namespace: NamespaceB
class EnumInNestedNS(object | ):
A = 0
B = 1
C = 2
|
kidmaple/CoolWall | user/python/Tools/idle/keydefs.py | Python | gpl-2.0 | 2,235 | 0.000895 | windows_keydefs = \
{'<<Copy>>': ['<Control-c>'],
'<<Cut>>': [ | '<Control-x>'],
'<<Paste>>': ['<Co | ntrol-v>'],
'<<beginning-of-line>>': ['<Control-a>', '<Home>'],
'<<center-insert>>': ['<Control-l>'],
'<<close-all-windows>>': ['<Control-q>'],
'<<close-window>>': ['<Alt-F4>'],
'<<dump-undo-state>>': ['<Control-backslash>'],
'<<end-of-file>>': ['<Control-d>'],
'<<help>>': ['<F1>'],
'<<history-next>>': ['<Alt-n>'],
'<<history-previous>>': ['<Alt-p>'],
'<<interrupt-execution>>': ['<Control-c>'],
'<<open-class-browser>>': ['<Alt-c>'],
'<<open-module>>': ['<Alt-m>'],
'<<open-new-window>>': ['<Control-n>'],
'<<open-window-from-file>>': ['<Control-o>'],
'<<plain-newline-and-indent>>': ['<Control-j>'],
'<<redo>>': ['<Control-y>'],
'<<remove-selection>>': ['<Escape>'],
'<<save-copy-of-window-as-file>>': ['<Alt-Shift-s>'],
'<<save-window-as-file>>': ['<Alt-s>'],
'<<save-window>>': ['<Control-s>'],
'<<select-all>>': ['<Alt-a>'],
'<<toggle-auto-coloring>>': ['<Control-slash>'],
'<<undo>>': ['<Control-z>']}
unix_keydefs = \
{'<<Copy>>': ['<Alt-w>', '<Meta-w>'],
'<<Cut>>': ['<Control-w>'],
'<<Paste>>': ['<Control-y>'],
'<<beginning-of-line>>': ['<Control-a>', '<Home>'],
'<<center-insert>>': ['<Control-l>'],
'<<close-all-windows>>': ['<Control-x><Control-c>'],
'<<close-window>>': ['<Control-x><Control-0>', '<Control-x><Key-0>'],
'<<do-nothing>>': ['<Control-x>'],
'<<dump-undo-state>>': ['<Control-backslash>'],
'<<end-of-file>>': ['<Control-d>'],
'<<help>>': ['<F1>'],
'<<history-next>>': ['<Alt-n>', '<Meta-n>'],
'<<history-previous>>': ['<Alt-p>', '<Meta-p>'],
'<<interrupt-execution>>': ['<Control-c>'],
'<<open-class-browser>>': ['<Control-x><Control-b>'],
'<<open-module>>': ['<Control-x><Control-m>'],
'<<open-new-window>>': ['<Control-x><Control-n>'],
'<<open-window-from-file>>': ['<Control-x><Control-f>'],
'<<plain-newline-and-indent>>': ['<Control-j>'],
'<<redo>>': ['<Alt-z>', '<Meta-z>'],
'<<save-copy-of-window-as-file>>': ['<Control-x><w>'],
'<<save-window-as-file>>': ['<Control-x><Control-w>'],
'<<save-window>>': ['<Control-x><Control-s>'],
'<<select-all>>': ['<Alt-a>', '<Meta-a>'],
'<<toggle-auto-coloring>>': ['<Control-slash>'],
'<<undo>>': ['<Control-z>']}
|
docker/docker-py | tests/integration/api_image_test.py | Python | apache-2.0 | 12,869 | 0 | import contextlib
import json
import shutil
import socket
import tarfile
import tempfile
import threading
import pytest
from http.server import SimpleHTTPRequestHandler
import socketserver
import docker
from ..helpers import requires_api_version, requires_experimental
from .base import BaseAPIIntegrationTest, TEST_IMG
class ListImagesTest(BaseAPIIntegrationTest):
def test_images(self):
res1 = self.client.images(all=True)
assert 'Id' in res1[0]
res10 = res1[0]
assert 'Created' in res10
assert 'RepoTags' in res10
distinct = []
for img in res1:
if img['Id'] not in distinct:
distinct.append(img['Id'])
assert len(distinct) == self.client.info()['Images']
def test_images_quiet(self):
res1 = self.client.images(quiet=True)
assert type(res1[0]) == str
class PullImageTest(BaseAPIIntegrationTest):
def test_pull(self):
try:
self.client.remove_image('hello-world')
except docker.errors.APIError:
pass
res = self.client.pull('hello-world')
self.tmp_imgs.append('hello-world')
assert type(res) == str
assert len(self.client.images('hello-world')) >= 1
img_info = self.client.inspect_image('hello-world')
assert 'Id' in img_info
def test_pull_streaming(self):
try:
self.client.remove_image('hello-world')
except docker.errors.APIError:
pass
stream = self.client.pull(
'hello-world', stream=True, decode=True)
self.tmp_imgs.append('hello-world')
for chunk in stream:
assert isinstance(chunk, dict)
assert len(self.client.images('hello-world')) >= 1
img_info = self.client.inspect_image('hello-world')
assert 'Id' in img_info
@requires_api_version('1.32')
@requires_experimental(until=None)
def test_pull_invalid_platform(self):
with pytest.raises(docker.errors.APIError) as excinfo:
self.client.pull('hello-world', platform='foobar')
# Some API versions incorrectly returns 500 status; assert 4xx or 5xx
assert excinfo.value.is_error()
assert 'unknown operating system' in excinfo.exconly() \
or 'invalid platform' in excinfo.exconly()
class CommitTest(BaseAPIIntegrationTest):
def test_commit(self):
container = self.client.create_container(TEST_IMG, ['touch', '/test'])
id = container['Id']
self.client.start(id)
self.tmp_containers.append(id)
res = self.client.commit(id)
assert 'Id' in res
img_id = res['Id']
self.tmp_imgs.append(img_id)
img = self.client.inspect_image(img_id)
assert 'Container' in img
assert img['Container'].startswith(id)
assert 'ContainerConfig' in img
assert 'Image' in img['ContainerConfig']
assert TEST_IMG == img['ContainerConfig']['Image']
busybox_id = self.client.inspect_image(TEST_IMG)['Id']
assert 'Parent' in img
assert img['Parent'] == busybox_id
def test_commit_with_changes(self):
cid = self.client.create_container(TEST_IMG, ['touch', '/test'])
self.tmp_containers.append(cid)
self.client.start(cid)
img_id = self.client.commit(
cid, changes=['EXPOSE 8000', 'CMD ["bash"]']
)
self.tmp_imgs.append(img_id)
img = self.client.inspect_image(img_id)
assert 'Container' in img
assert img['Container'].startswith(cid['Id'])
assert '8000/tcp' in img['Config']['ExposedPorts']
assert img['Config']['Cmd'] == ['bash']
class RemoveImageTest(BaseAPIIntegrationTest):
def test_remove(self):
container = self.client.create_container(TEST_IMG, ['touch', '/test'])
id = container['Id']
self.client.start(id)
self.tmp_containers.append(id)
res = self.client.commit(id)
assert 'Id' in res
img_id = res['Id']
self.tmp_imgs.append(img_id)
logs = self.client.remove_image(img_id, force=True)
assert {"Deleted": img_id} in logs
images = self.client.images(all=True)
res = [x for x in images if x['Id'].startswith(img_id)]
assert len(res) == 0
class ImportImageTest(BaseAPIIntegrationTest):
'''Base class for `docker import` test cases.'''
TAR_SIZE = 512 * 1024
def write_dummy_tar_content(self, n_bytes, tar_fd):
def extend_file(f, n_bytes):
f.seek(n_bytes - 1)
f.write(bytearray([65]))
f.seek(0)
tar = tarfile.TarFile(fileobj=tar_fd, mode='w')
with tempfile.NamedTemporaryFile() as f:
extend_file(f, n_bytes)
tarinfo = tar.gettarinfo(name=f.name, arcname='testdata')
tar.addfile(tarinfo, fileobj=f)
tar.close()
@contextlib.contextmanager
| def dummy_tar_stream(self, n_bytes):
'''Yields a stream that is valid tar data of size n_bytes.'''
with tempfile.NamedTemporaryFile() as tar_file:
self.write_dummy_tar_content(n_bytes, tar_file)
tar_file.seek(0)
yield tar_file
@contextlib.contextmanager
def dummy_tar_file(self, n_bytes):
'''Yields the name of a valid tar file of size n_bytes.'''
with tempfile.N | amedTemporaryFile(delete=False) as tar_file:
self.write_dummy_tar_content(n_bytes, tar_file)
tar_file.seek(0)
yield tar_file.name
def test_import_from_bytes(self):
with self.dummy_tar_stream(n_bytes=500) as f:
content = f.read()
# The generic import_image() function cannot import in-memory bytes
# data that happens to be represented as a string type, because
# import_image() will try to use it as a filename and usually then
# trigger an exception. So we test the import_image_from_data()
# function instead.
statuses = self.client.import_image_from_data(
content, repository='test/import-from-bytes')
result_text = statuses.splitlines()[-1]
result = json.loads(result_text)
assert 'error' not in result
img_id = result['status']
self.tmp_imgs.append(img_id)
def test_import_from_file(self):
with self.dummy_tar_file(n_bytes=self.TAR_SIZE) as tar_filename:
# statuses = self.client.import_image(
# src=tar_filename, repository='test/import-from-file')
statuses = self.client.import_image_from_file(
tar_filename, repository='test/import-from-file')
result_text = statuses.splitlines()[-1]
result = json.loads(result_text)
assert 'error' not in result
assert 'status' in result
img_id = result['status']
self.tmp_imgs.append(img_id)
def test_import_from_stream(self):
with self.dummy_tar_stream(n_bytes=self.TAR_SIZE) as tar_stream:
statuses = self.client.import_image(
src=tar_stream, repository='test/import-from-stream')
# statuses = self.client.import_image_from_stream(
# tar_stream, repository='test/import-from-stream')
result_text = statuses.splitlines()[-1]
result = json.loads(result_text)
assert 'error' not in result
assert 'status' in result
img_id = result['status']
self.tmp_imgs.append(img_id)
def test_import_image_from_data_with_changes(self):
with self.dummy_tar_stream(n_bytes=500) as f:
content = f.read()
statuses = self.client.import_image_from_data(
content, repository='test/import-from-bytes',
changes=['USER foobar', 'CMD ["echo"]']
)
result_text = statuses.splitlines()[-1]
result = json.loads(result_text)
assert 'error' not in result
img_id = result['status']
self.tmp_imgs.append(img_id)
img_data = self.client.inspect_image(img_id)
assert img_data is not None
assert img_data['Config']['Cmd'] == ['echo']
assert |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.