text stringlengths 4 1.02M | meta dict |
|---|---|
%reset
%pylab
%pdb off
# Can do "%pylab" or "%pylab inline"
# Cheat sheet:
# np.array([v1, v2])
# np.array([rVec[0], iVec[0], cVec[0]]) # makes a 3x3 matrix
# np.linspace(v1, v2, numPoints)
# np.concatenate(( a1, a2 ))
# <headingcell level=3>
# Import libraries
# <codecell>
import numpy as np
import scipy.integrate as integrate
#from pycse import odelay
#from IPython.html.widgets import interact, interactive
#from IPython.display import clear_output, display, HTML
from thesis_functions.initialconditions import InputDataDictionary, SetInitialConditions
from thesis_functions.visualization import CreatePlotGrid, SetPlotGridData
from thesis_functions.astro import FindOrbitCenter, ComputeLibrationPoints, stop_yEquals0, stop_zEquals0
from thesis_functions.astro import ComputeNonlinearDerivs, ComputeRelmoDynamicsMatrix
from thesis_functions.astro import odeintNonlinearDerivs, odeintNonlinearDerivsWithLinearRelmoSTM, odeintNonlinearDerivsWithLinearRelmo
from thesis_functions.astro import ComputeRequiredVelocity, PropagateSatelliteAndChaser
from thesis_functions.astro import PropagateSatellite, ComputeOffsets, ConvertOffsets, ConvertOffset, BuildRICFrame, BuildVNBFrame
# <headingcell level=3>
# Initial Conditions
# <codecell>
# First satellite
# The initial condition dictionary contains initial conditions from Barbee, Howell, and Sharp
ICs = InputDataDictionary()
# Barbee's initial conditions are a planar (Lyapunov) orbit at Earth/Moon L1
mu, timespan, initialstate1 = SetInitialConditions(ICs, ICset = 'Barbee', ICtestcase = 0, numPoints = 200)
# X1 and X2 are positions of larger and smaller bodies along X axis
X1, X2, L1, L2, L3, L4, L5 = ComputeLibrationPoints(mu)
# The FindOrbitCenter function doesn't work if you only propagate a partial orbit, so just treat L1 as the center
center = L1
# Build instantaneous RIC and VNB frames
#x1, y1, z1, xdot1, ydot1, zdot1 = initialstate1
#x1 = np.array([initialstate1[0]])
#y1 = np.array([initialstate1[1]])
#z1 = np.array([initialstate1[2]])
#xdot1 = np.array([initialstate1[3]])
#ydot1 = np.array([initialstate1[4]])
#zdot1 = np.array([initialstate1[5]])
#rVec, iVec, cVec = BuildRICFrame(x1, y1, z1, xdot1, ydot1, zdot1, center)
#vVec, nVec, bVec = BuildVNBFrame(x1, y1, z1, xdot1, ydot1, zdot1, center)
# TODO: start target satellite from different points along its orbit.
# Look at how delta-V changes; also maybe linear relmo will be a better approximation along other parts of the orbit.
# <headingcell level=3>
# Define Waypoints
# <codecell>
# In nondimensional units, r12 = 1, M = 1, timeConst = Period/(2pi) = 1, G = 1
m1 = 5.97219e24; # Earth # kg
m2 = 7.34767309e22; # Moon # kg
M = m1 + m2;
G = 6.67384e-11/1e9; # m3/(kg*s^2) >> converted to km3
r12 = 384400.0; # km
timeConst = r12**(1.5)/(G*M)**(0.5) # units are seconds # this is how you convert between dimensional time (seconds) and non-dimensional time
print 'timeconst', timeConst
T = 2.0*np.pi*timeConst # Period in seconds of Moon around Earth
print 'Period of Moon around Earth in seconds', T
period = np.max(timespan) # Period of libration point orbit (in nondimensional time units)
print 'Period of libration point orbit in seconds', period*timeConst
# TODO: input waypoints in any frame (RLP, RIC, or VNB)
# TODO: get decent test cases in the Sun-Earth-Moon frame
# TODO: report/plot position error at each waypoint
# TODO: report/plot delta-V at each waypoint
Waypoints = dict();
Waypoints[0] = {'t' : 0.0,
'r_RIC': [0.0, 1000.0/r12, 0.0]};
Waypoints[0] = {'t' : 86400.0*2.88/timeConst, # 2.88 days
'r_RIC': [0.0, 275.0/r12, 0.0]}; # move 725 km # 400% errors
Waypoints[0] = {'t' : 86400.0*4.70/timeConst, # 1.82 days
'r_RIC': [0.0, 180.0/r12, 0.0]}; # move 95 km # 400% errors
Waypoints[0] = {'t' : 86400.0*5.31/timeConst,
'r_RIC': [0.0, 100.0/r12, 0.0]}; # 40% errors
Waypoints[1] = {'t' : 86400.0*5.67/timeConst,
'r_RIC': [0.0, 15.0/r12, 0.0]}; # 8% errors
Waypoints[2] = {'t' : 86400.0*6.03/timeConst,
'r_RIC': [0.0, 5.0/r12, 0.0]}; # 10% errors
Waypoints[3] = {'t' : 86400.0*6.64/timeConst,
'r_RIC': [0.0, 1.0/r12, 0.0]}; #
Waypoints[4] = {'t' : 86400.0*7.0/timeConst,
'r_RIC': [0.0, 0.030/r12, 0.0]}; #
Waypoints[5] = {'t' : 86400.0*7.26/timeConst,
'r_RIC': [0.0, 0.0/r12, 0.0]};
# TODO: look at waypoints with different spacing, different approach directions, different amount of time between points
# <headingcell level=3>
# Convert Waypoints from RIC to RLP
# <codecell>
# Build RIC-to-RLP frame (inverse of RLP-to-RIC frame) at each waypoint time and convert waypoints from RIC to RLP
# TODO: would be nice to have a function that generically converts waypoints between frames (e.g. arguments = WaypointDictionary, inputframe, outputframe)
initialState1ForSegment = initialstate1
S = [0, 1, 2, 3, 4]
#S = [0]
for currentPoint in S:
nextPoint = currentPoint + 1;
print currentPoint, Waypoints[currentPoint]
print 'percentage of orbit covered getting to next point (by time):', (Waypoints[nextPoint]['t'] - Waypoints[currentPoint]['t'])/period*100.0
# array of time points
timespanForSegment = np.linspace(Waypoints[currentPoint]['t'], Waypoints[nextPoint]['t'], 500)
# target satellite position and velocity over time in RLP frame from integrating initial state with full nonlinear dynamics
x1, y1, z1, xdot1, ydot1, zdot1 = PropagateSatellite(mu, timespanForSegment, initialState1ForSegment);
# Build RIC and VNB frames
rVec, iVec, cVec = BuildRICFrame(x1, y1, z1, xdot1, ydot1, zdot1, center)
vVec, nVec, bVec = BuildVNBFrame(x1, y1, z1, xdot1, ydot1, zdot1, center)
## for the first point only
if (currentPoint == S[0]):
# TODO: clean up a little because there's very similar repeated code here
# START REPEAT 1
# this matrix converts from RLP coordinates to the RIC frame at the timestamp of the current (first) point
RLPtoRIC = np.array([rVec[0], iVec[0], cVec[0]])
# this matrix converts from RIC to RLP at the timestamp of the current (first) point
RICtoRLP = np.linalg.inv(RLPtoRIC)
RLPxVec = RICtoRLP[:,0]
RLPyVec = RICtoRLP[:,1]
RLPzVec = RICtoRLP[:,2]
# get the coordinates of the current waypoint in RIC
[drW, diW, dcW] = Waypoints[currentPoint]['r_RIC']
# Convert current waypoint from RIC frame to RLP frame at the timestamp of the current (first) point
dxW, dyW, dzW = ConvertOffset(drW, diW, dcW, RLPxVec, RLPyVec, RLPzVec);
Waypoints[currentPoint]['r_RLP'] = [dxW, dyW, dzW]
# Convert current waypoint to VNB frame
dvW, dnW, dbW = ConvertOffset(dxW, dyW, dzW, vVec[0], nVec[0], bVec[0]);
Waypoints[currentPoint]['r_VNB'] = [dvW, dnW, dbW]
# END REPEAT 1
## for all points
# START REPEAT 2
# this matrix converts from RLP coordinates to the RIC frame at the timestamp of the next point
RLPtoRIC = np.array([rVec[-1], iVec[-1], cVec[-1]])
# this matrix converts from RIC to RLP at the timestamp of the next point
RICtoRLP = np.linalg.inv(RLPtoRIC)
RLPxVec = RICtoRLP[:,0]
RLPyVec = RICtoRLP[:,1]
RLPzVec = RICtoRLP[:,2]
# next point
[drW, diW, dcW] = Waypoints[nextPoint]['r_RIC']
# Convert next waypoint from RIC frame to RLP frame at the timestamp of the next point
dxW, dyW, dzW = ConvertOffset(drW, diW, dcW, RLPxVec, RLPyVec, RLPzVec);
Waypoints[nextPoint]['r_RLP'] = [dxW, dyW, dzW]
# Convert waypoint to VNB frame
dvW, dnW, dbW = ConvertOffset(dxW, dyW, dzW, vVec[-1], nVec[-1], bVec[-1]);
Waypoints[nextPoint]['r_VNB'] = [dvW, dnW, dbW]
# END REPEAT 2
# Record updated primary satellite initial state for next segment
initialState1ForSegment = np.array([ x1[-1], y1[-1], z1[-1], xdot1[-1], ydot1[-1], zdot1[-1] ])
# <headingcell level=3>
# Set up plots
# <codecell>
# Create plots
# Allowed colors:
# b: blue
# g: green
# r: red
# c: cyan
# m: magenta
# y: yellow
# k: black
# w: white
#fig1 = plt.figure()
#fig2 = plt.figure()
#ax1 = fig1.add_subplot(111)
#ax2 = fig2.add_subplot(111)
#ax1.set_title('dx_LINEAR vs timespan')
#ax2.set_title('Difference between LINEAR and NONLINEAR: dy vs dx')
# Plots of offset in RLP, RIC, VNB frames
axXZ_RLP, axYZ_RLP, axXY_RLP, ax3D_RLP = CreatePlotGrid('Offset between Satellites 1 and 2 in RLP Frame', 'X', 'Y', 'Z', 'auto')
axXZ_RIC, axYZ_RIC, axXY_RIC, ax3D_RIC = CreatePlotGrid('Offset between Satellites 1 and 2 in RIC Frame', 'R', 'I', 'C', 'auto')
axXZ_VNB, axYZ_VNB, axXY_VNB, ax3D_VNB = CreatePlotGrid('Offset between Satellites 1 and 2 in VNB Frame', 'V', 'N', 'B', 'auto')
# add zero point to plots (this is location of target satellite)
points = {}
data = {}
points['zero'] = {'xyz':[0,0,0], 'color':'k'}
SetPlotGridData(axXZ_RLP, axYZ_RLP, axXY_RLP, ax3D_RLP, data, points)
SetPlotGridData(axXZ_RIC, axYZ_RIC, axXY_RIC, ax3D_RIC, data, points)
SetPlotGridData(axXZ_VNB, axYZ_VNB, axXY_VNB, ax3D_VNB, data, points)
points = {}
# add all waypoints to RLP, RIC, and VNB plots
for w in Waypoints:
points['w'] = {'xyz':np.array(Waypoints[w]['r_RLP'])*r12, 'color':'c'}
SetPlotGridData(axXZ_RLP, axYZ_RLP, axXY_RLP, ax3D_RLP, data, points)
points['w'] = {'xyz':np.array(Waypoints[w]['r_RIC'])*r12, 'color':'c'}
SetPlotGridData(axXZ_RIC, axYZ_RIC, axXY_RIC, ax3D_RIC, data, points)
points['w'] = {'xyz':np.array(Waypoints[w]['r_VNB'])*r12, 'color':'c'}
SetPlotGridData(axXZ_VNB, axYZ_VNB, axXY_VNB, ax3D_VNB, data, points)
points = {}
# <headingcell level=3>
# Travel between waypoints
# <codecell>
initialState1ForSegment = initialstate1
# assume starts exactly from first waypoint with same velocity as target satellite (for lack of any better velocity values at this point)
Waypoints[0]['r_RLP_achieved'] = Waypoints[0]['r_RLP']
Waypoints[0]['v_RLP_abs_premaneuver'] = initialstate1[3:6]
# Travel between waypoints
for currentPoint in S:
nextPoint = currentPoint + 1;
## Compute required velocity to travel between waypoints
# Compute required velocity at point 1 to take us to point 2 within time (t2-t1)
# This is from Lian et al.
# Method signature:
# initialRelativeVelocity = ComputeRequiredVelocity(initialState1ForSegment, initialRelativePosition, initialTime, targetRelativePosition, targetTime)
Waypoints[currentPoint]['v_RLP'] = ComputeRequiredVelocity(initialState1ForSegment, Waypoints[currentPoint]['r_RLP_achieved'], Waypoints[currentPoint]['t'], Waypoints[nextPoint]['r_RLP'], Waypoints[nextPoint]['t'], mu)
#print 'initial chaser relative velocity', Waypoints[currentPoint]['v_RLP']
initialRelativeState = np.concatenate(( Waypoints[currentPoint]['r_RLP_achieved'], Waypoints[currentPoint]['v_RLP'] ))
## Integrate first satellite with full nonlinear dynamics and second satellite with linear relmo dynamics
# array of time points
timespanForSegment = np.linspace(Waypoints[currentPoint]['t'], Waypoints[nextPoint]['t'], 500)
# compute target satellite position and velocity over time in RLP frame by integrating initial state with full nonlinear dynamics
# compute offset between target and chaser satellite over time in RLP frame by integrating initial offset with linearized relmo dynamics
x1, y1, z1, xdot1, ydot1, zdot1, dx_LINEAR, dy_LINEAR, dz_LINEAR, dxdot_LINEAR, dydot_LINEAR, dzdot_LINEAR = PropagateSatelliteAndChaser(mu, timespanForSegment, initialState1ForSegment, initialRelativeState)
## Integrate second satellite with full nonlinear dynamics
# initial state of second satellite in absolute RLP coordinates (not relative to first satellite)
initialstate2 = np.array(initialState1ForSegment) - np.array(initialRelativeState)
# compute chaser satellite position and velocity over time in RLP frame by integrating initial state with full nonlinear dynamics
x2, y2, z2, xdot2, ydot2, zdot2 = PropagateSatellite(mu, timespanForSegment, initialstate2);
# Compute offsets in RLP frame based on nonlinear motion
dx_NONLIN, dy_NONLIN, dz_NONLIN = ComputeOffsets(timespanForSegment, x1, y1, z1, xdot1, ydot1, zdot1, x2, y2, z2, xdot2, ydot2, zdot2);
## Offsets in RIC and VNB
# Build RIC and VNB frames
rVec, iVec, cVec = BuildRICFrame(x1, y1, z1, xdot1, ydot1, zdot1, center)
vVec, nVec, bVec = BuildVNBFrame(x1, y1, z1, xdot1, ydot1, zdot1, center)
# Compute offsets in RIC frame
dr_LINEAR, di_LINEAR, dc_LINEAR = ConvertOffsets(dx_LINEAR, dy_LINEAR, dz_LINEAR, rVec, iVec, cVec);
dr_NONLIN, di_NONLIN, dc_NONLIN = ConvertOffsets(dx_NONLIN, dy_NONLIN, dz_NONLIN, rVec, iVec, cVec);
# Compute offsets in VNB frame
dv_LINEAR, dn_LINEAR, db_LINEAR = ConvertOffsets(dx_LINEAR, dy_LINEAR, dz_LINEAR, vVec, nVec, bVec);
dv_NONLIN, dn_NONLIN, db_NONLIN = ConvertOffsets(dx_NONLIN, dy_NONLIN, dz_NONLIN, vVec, nVec, bVec);
## Compute delta-V
# post-maneuver velocity at current waypoint
Waypoints[currentPoint]['v_RLP_abs_postmaneuver'] = np.array([ xdot2[0], ydot2[0], zdot2[0] ])
# compute delta-V executed at current waypoint
Waypoints[currentPoint]['deltaV'] = Waypoints[currentPoint]['v_RLP_abs_postmaneuver'] - Waypoints[currentPoint]['v_RLP_abs_premaneuver']
# pre-maneuver velocity for next waypoint (end of current propagation segment)
Waypoints[nextPoint]['v_RLP_abs_premaneuver'] = np.array([ xdot2[-1], ydot2[-1], zdot2[-1] ])
# TODO: also compute the delta-V based only on the linear relmo propagation and compare the delta-V to the nonlinear one currently being computed
# (this means we would need to propagate forward from the nominal waypoint instead of only propagating forward from the achieved waypoint)
# pre-maneuver relative velocity when arriving at next waypoint, based on linear propagation
#Waypoints[nextPoint]['v_RLP_pre_LINEAR'] = np.array([ dxdot_LINEAR[-1], dydot_LINEAR[-1], dzdot_LINEAR[-1] ])
## Output that gets fed into next iteration/segment
# Record updated primary satellite initial state for next segment
initialState1ForSegment = np.array([ x1[-1], y1[-1], z1[-1], xdot1[-1], ydot1[-1], zdot1[-1] ])
# Record updated/achieved chaser satellite waypoint for next segment
Waypoints[nextPoint]['r_RLP_achieved'] = np.array([ dx_NONLIN[-1], dy_NONLIN[-1], dz_NONLIN[-1] ])
# compute updated/achieved waypoint location in RIC and VNB
[dxW, dyW, dzW] = Waypoints[nextPoint]['r_RLP_achieved']
drW, diW, dcW = ConvertOffset(dxW, dyW, dzW, rVec[-1], iVec[-1], cVec[-1]);
dvW, dnW, dbW = ConvertOffset(dxW, dyW, dzW, vVec[-1], nVec[-1], bVec[-1]);
Waypoints[nextPoint]['r_RIC_achieved'] = [drW, diW, dcW]
Waypoints[nextPoint]['r_VNB_achieved'] = [dvW, dnW, dbW]
## VISUALIZATIONS
#ax1.plot(timespan, dx_LINEAR*r12)
# Compare linear relmo propagation to nonlinear dynamics
#ax2.plot((dx_NONLIN - dx_LINEAR)/np.amax(np.absolute(dx_LINEAR))*100.0, (dy_NONLIN - dy_LINEAR)/np.amax(np.absolute(dy_LINEAR))*100.0)
# create data dictionaries
dataoffsetRLP = {};
dataoffsetRLP['linear'] = {'x':dx_LINEAR*r12, 'y':dy_LINEAR*r12, 'z':dz_LINEAR*r12, 'color':'g'}
dataoffsetRLP['nonlin'] = {'x':dx_NONLIN*r12, 'y':dy_NONLIN*r12, 'z':dz_NONLIN*r12, 'color':'r'}
dataoffsetRIC = {};
dataoffsetRIC['linear'] = {'x':dr_LINEAR*r12, 'y':di_LINEAR*r12, 'z':dc_LINEAR*r12, 'color':'g'}
dataoffsetRIC['nonlin'] = {'x':dr_NONLIN*r12, 'y':di_NONLIN*r12, 'z':dc_NONLIN*r12, 'color':'r'}
dataoffsetVNB = {};
dataoffsetVNB['linear'] = {'x':dv_LINEAR*r12, 'y':dn_LINEAR*r12, 'z':db_LINEAR*r12, 'color':'g'}
dataoffsetVNB['nonlin'] = {'x':dv_NONLIN*r12, 'y':dn_NONLIN*r12, 'z':db_NONLIN*r12, 'color':'r'}
# Plot offset (relative motion) between satellites 1 and 2 in RLP frame and add achieved waypoint (end of current segment) to plot
points[nextPoint] = {'xyz':np.array(Waypoints[nextPoint]['r_RLP_achieved'])*r12, 'color':'m'}
SetPlotGridData(axXZ_RLP, axYZ_RLP, axXY_RLP, ax3D_RLP, dataoffsetRLP, points)
# Plot offset (relative motion) between satellites 1 and 2 in RIC frame and add achieved waypoint (start and end of current segment) to plot
points[nextPoint] = {'xyz':np.array(Waypoints[nextPoint]['r_RIC_achieved'])*r12, 'color':'m'}
SetPlotGridData(axXZ_RIC, axYZ_RIC, axXY_RIC, ax3D_RIC, dataoffsetRIC, points)
# Plot offset (relative motion) between satellites 1 and 2 in VNB frame and add achieved waypoint (start and end of current segment) to plot
points[nextPoint] = {'xyz':np.array(Waypoints[nextPoint]['r_VNB_achieved'])*r12, 'color':'m'}
SetPlotGridData(axXZ_VNB, axYZ_VNB, axXY_VNB, ax3D_VNB, dataoffsetVNB, points)
points = {}
## final delta-V
currentPoint = nextPoint
# final post-maneuver velocity is same as the target satellite's velocity
Waypoints[currentPoint]['v_RLP_abs_postmaneuver'] = np.array([ xdot1[-1], ydot1[-1], zdot1[-1] ])
# compute final delta-V
Waypoints[currentPoint]['deltaV'] = Waypoints[currentPoint]['v_RLP_abs_postmaneuver'] - Waypoints[currentPoint]['v_RLP_abs_premaneuver']
# <codecell>
# compute delta-V magnitude and report to screen
for w in Waypoints:
Waypoints[w]['deltaVmag'] = np.linalg.norm(Waypoints[w]['deltaV'],2)*r12/timeConst*1000 # m/s
print Waypoints[w]['deltaVmag'], Waypoints[w]['deltaV']
# <codecell>
| {
"content_hash": "19353dc7094dbe33c693746190171981",
"timestamp": "",
"source": "github",
"line_count": 418,
"max_line_length": 222,
"avg_line_length": 42.50956937799043,
"alnum_prop": 0.6847881141313523,
"repo_name": "aerosara/thesis",
"id": "5c93480ac8b9c9b7576a58bca26f7bf51a72737a",
"size": "17835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "notebooks_archive_10112014/Waypoints And DV Computation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PostScript",
"bytes": "188340"
},
{
"name": "Python",
"bytes": "147996"
},
{
"name": "TeX",
"bytes": "168575"
}
],
"symlink_target": ""
} |
from django import forms
from django.contrib.auth.models import User
from reserva.models import Libro, Reserva
from functools import partial
import datetime
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
DateInput = partial(forms.DateInput, {'class': 'datepicker'})
class CreateReservaForm(forms.Form):
libro = forms.ModelChoiceField(
queryset=Libro.objects.filter(disponibilidad=True))
fecha_encargo = forms.DateField(
widget=DateInput())
fecha_devolucion = forms.DateField(
widget=DateInput())
def clean(self):
cleaned_data = self.cleaned_data
usuario = cleaned_data.get("usuario")
libro = cleaned_data.get("libro")
fecha_devolucion = cleaned_data.get('fecha_devolucion')
fecha_encargo = cleaned_data.get('fecha_encargo')
if fecha_devolucion < fecha_encargo:
raise ValidationError(_('La fecha de encargo debe ser anterior a la fecha de devolución'))
if fecha_devolucion < datetime.date.today():
raise ValidationError(_('La fecha de devolución es inválida, no puede ponerse una fecha del pasado'))
if fecha_encargo < datetime.date.today():
raise ValidationError(_('La fecha de encargo es inválida, no puede ponerse una fecha del pasado'))
if fecha_devolucion > datetime.date.today() + datetime.timedelta(weeks=4):
raise ValidationError(_('No se pueden reservar libros por un tiempo mayor a un mes.'))
if Reserva.objects.filter(usuario=usuario, libro=libro).count() > 0:
del cleaned_data["usuario"]
del cleaned_data["libro"]
raise forms.ValidationError(_("Ya existe un libro asignado al usuario seleccionado"))
else:
libro = Libro.objects.filter(pk=libro.pk)[0]
libro.disponibilidad=False
libro.save()
return cleaned_data
| {
"content_hash": "408970fb7720fad50f2871052e340577",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 112,
"avg_line_length": 43.56818181818182,
"alnum_prop": 0.6864893062076161,
"repo_name": "fmoreyra/ReservaLibrosISOO",
"id": "7ee241b13311f58a5924bf9d1901aea05775d41e",
"size": "1921",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reserva/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "18449"
},
{
"name": "Python",
"bytes": "24248"
}
],
"symlink_target": ""
} |
from subprocess import call
import sys
import os
try:
os.remove(sys.argv[2])
except:
pass
call(["scrapy", "crawl", sys.argv[1], "-o", sys.argv[2]])
| {
"content_hash": "6ab4f131abb0cce3d6bbc847d4decfe7",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 57,
"avg_line_length": 17.444444444444443,
"alnum_prop": 0.6496815286624203,
"repo_name": "hovhannest/TMScrappers",
"id": "345d928a7a4e60c6b502b213b412cf438ee49c5d",
"size": "157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RottenTomatoes/rt/runspider.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19810"
},
{
"name": "Visual Basic",
"bytes": "1288"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
import shutil
import tempfile
from operator import itemgetter
import py
import pytest
import yaml
from ...helpers import build_config_details
from compose.config import config
from compose.config import types
from compose.config.config import resolve_build_args
from compose.config.config import resolve_environment
from compose.config.environment import Environment
from compose.config.errors import ConfigurationError
from compose.config.errors import VERSION_EXPLANATION
from compose.config.serialize import denormalize_service_dict
from compose.config.serialize import serialize_config
from compose.config.serialize import serialize_ns_time_value
from compose.config.types import VolumeSpec
from compose.const import COMPOSEFILE_V1 as V1
from compose.const import COMPOSEFILE_V2_0 as V2_0
from compose.const import COMPOSEFILE_V2_1 as V2_1
from compose.const import COMPOSEFILE_V2_2 as V2_2
from compose.const import COMPOSEFILE_V2_3 as V2_3
from compose.const import COMPOSEFILE_V3_0 as V3_0
from compose.const import COMPOSEFILE_V3_1 as V3_1
from compose.const import COMPOSEFILE_V3_2 as V3_2
from compose.const import COMPOSEFILE_V3_3 as V3_3
from compose.const import IS_WINDOWS_PLATFORM
from compose.utils import nanoseconds_from_time_seconds
from tests import mock
from tests import unittest
DEFAULT_VERSION = V2_0
def make_service_dict(name, service_dict, working_dir, filename=None):
"""Test helper function to construct a ServiceExtendsResolver
"""
resolver = config.ServiceExtendsResolver(
config.ServiceConfig(
working_dir=working_dir,
filename=filename,
name=name,
config=service_dict),
config.ConfigFile(filename=filename, config={}),
environment=Environment.from_env_file(working_dir)
)
return config.process_service(resolver.run())
def service_sort(services):
return sorted(services, key=itemgetter('name'))
def secret_sort(secrets):
return sorted(secrets, key=itemgetter('source'))
class ConfigTest(unittest.TestCase):
def test_load(self):
service_dicts = config.load(
build_config_details(
{
'foo': {'image': 'busybox'},
'bar': {'image': 'busybox', 'environment': ['FOO=1']},
},
'tests/fixtures/extends',
'common.yml'
)
).services
self.assertEqual(
service_sort(service_dicts),
service_sort([
{
'name': 'bar',
'image': 'busybox',
'environment': {'FOO': '1'},
},
{
'name': 'foo',
'image': 'busybox',
}
])
)
def test_load_v2(self):
config_data = config.load(
build_config_details({
'version': '2',
'services': {
'foo': {'image': 'busybox'},
'bar': {'image': 'busybox', 'environment': ['FOO=1']},
},
'volumes': {
'hello': {
'driver': 'default',
'driver_opts': {'beep': 'boop'}
}
},
'networks': {
'default': {
'driver': 'bridge',
'driver_opts': {'beep': 'boop'}
},
'with_ipam': {
'ipam': {
'driver': 'default',
'config': [
{'subnet': '172.28.0.0/16'}
]
}
},
'internal': {
'driver': 'bridge',
'internal': True
}
}
}, 'working_dir', 'filename.yml')
)
service_dicts = config_data.services
volume_dict = config_data.volumes
networks_dict = config_data.networks
self.assertEqual(
service_sort(service_dicts),
service_sort([
{
'name': 'bar',
'image': 'busybox',
'environment': {'FOO': '1'},
},
{
'name': 'foo',
'image': 'busybox',
}
])
)
self.assertEqual(volume_dict, {
'hello': {
'driver': 'default',
'driver_opts': {'beep': 'boop'}
}
})
self.assertEqual(networks_dict, {
'default': {
'driver': 'bridge',
'driver_opts': {'beep': 'boop'}
},
'with_ipam': {
'ipam': {
'driver': 'default',
'config': [
{'subnet': '172.28.0.0/16'}
]
}
},
'internal': {
'driver': 'bridge',
'internal': True
}
})
def test_valid_versions(self):
for version in ['2', '2.0']:
cfg = config.load(build_config_details({'version': version}))
assert cfg.version == V2_0
cfg = config.load(build_config_details({'version': '2.1'}))
assert cfg.version == V2_1
cfg = config.load(build_config_details({'version': '2.2'}))
assert cfg.version == V2_2
cfg = config.load(build_config_details({'version': '2.3'}))
assert cfg.version == V2_3
for version in ['3', '3.0']:
cfg = config.load(build_config_details({'version': version}))
assert cfg.version == V3_0
cfg = config.load(build_config_details({'version': '3.1'}))
assert cfg.version == V3_1
def test_v1_file_version(self):
cfg = config.load(build_config_details({'web': {'image': 'busybox'}}))
assert cfg.version == V1
assert list(s['name'] for s in cfg.services) == ['web']
cfg = config.load(build_config_details({'version': {'image': 'busybox'}}))
assert cfg.version == V1
assert list(s['name'] for s in cfg.services) == ['version']
def test_wrong_version_type(self):
for version in [None, 1, 2, 2.0]:
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{'version': version},
filename='filename.yml',
)
)
assert 'Version in "filename.yml" is invalid - it should be a string.' \
in excinfo.exconly()
def test_unsupported_version(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{'version': '2.18'},
filename='filename.yml',
)
)
assert 'Version in "filename.yml" is unsupported' in excinfo.exconly()
assert VERSION_EXPLANATION in excinfo.exconly()
def test_version_1_is_invalid(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '1',
'web': {'image': 'busybox'},
},
filename='filename.yml',
)
)
assert 'Version in "filename.yml" is invalid' in excinfo.exconly()
assert VERSION_EXPLANATION in excinfo.exconly()
def test_v1_file_with_version_is_invalid(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '2',
'web': {'image': 'busybox'},
},
filename='filename.yml',
)
)
assert 'Invalid top-level property "web"' in excinfo.exconly()
assert VERSION_EXPLANATION in excinfo.exconly()
def test_named_volume_config_empty(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'volumes': {
'simple': None,
'other': {},
}
})
config_result = config.load(config_details)
volumes = config_result.volumes
assert 'simple' in volumes
assert volumes['simple'] == {}
assert volumes['other'] == {}
def test_named_volume_numeric_driver_opt(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'volumes': {
'simple': {'driver_opts': {'size': 42}},
}
})
cfg = config.load(config_details)
assert cfg.volumes['simple']['driver_opts']['size'] == '42'
def test_volume_invalid_driver_opt(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'volumes': {
'simple': {'driver_opts': {'size': True}},
}
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert 'driver_opts.size contains an invalid type' in exc.exconly()
def test_named_volume_invalid_type_list(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'volumes': []
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "volume must be a mapping, not an array" in exc.exconly()
def test_networks_invalid_type_list(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'networks': []
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "network must be a mapping, not an array" in exc.exconly()
def test_load_service_with_name_version(self):
with mock.patch('compose.config.config.log') as mock_logging:
config_data = config.load(
build_config_details({
'version': {
'image': 'busybox'
}
}, 'working_dir', 'filename.yml')
)
assert 'Unexpected type for "version" key in "filename.yml"' \
in mock_logging.warn.call_args[0][0]
service_dicts = config_data.services
self.assertEqual(
service_sort(service_dicts),
service_sort([
{
'name': 'version',
'image': 'busybox',
}
])
)
def test_load_throws_error_when_not_dict(self):
with self.assertRaises(ConfigurationError):
config.load(
build_config_details(
{'web': 'busybox:latest'},
'working_dir',
'filename.yml'
)
)
def test_load_throws_error_when_not_dict_v2(self):
with self.assertRaises(ConfigurationError):
config.load(
build_config_details(
{'version': '2', 'services': {'web': 'busybox:latest'}},
'working_dir',
'filename.yml'
)
)
def test_load_throws_error_with_invalid_network_fields(self):
with self.assertRaises(ConfigurationError):
config.load(
build_config_details({
'version': '2',
'services': {'web': 'busybox:latest'},
'networks': {
'invalid': {'foo', 'bar'}
}
}, 'working_dir', 'filename.yml')
)
def test_load_config_link_local_ips_network(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': str(V2_1),
'services': {
'web': {
'image': 'example/web',
'networks': {
'foobar': {
'aliases': ['foo', 'bar'],
'link_local_ips': ['169.254.8.8']
}
}
}
},
'networks': {'foobar': {}}
}
)
details = config.ConfigDetails('.', [base_file])
web_service = config.load(details).services[0]
assert web_service['networks'] == {
'foobar': {
'aliases': ['foo', 'bar'],
'link_local_ips': ['169.254.8.8']
}
}
def test_load_config_volume_and_network_labels(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2.1',
'services': {
'web': {
'image': 'example/web',
},
},
'networks': {
'with_label': {
'labels': {
'label_key': 'label_val'
}
}
},
'volumes': {
'with_label': {
'labels': {
'label_key': 'label_val'
}
}
}
}
)
details = config.ConfigDetails('.', [base_file])
network_dict = config.load(details).networks
volume_dict = config.load(details).volumes
self.assertEqual(
network_dict,
{
'with_label': {
'labels': {
'label_key': 'label_val'
}
}
}
)
self.assertEqual(
volume_dict,
{
'with_label': {
'labels': {
'label_key': 'label_val'
}
}
}
)
def test_load_config_invalid_service_names(self):
for invalid_name in ['?not?allowed', ' ', '', '!', '/', '\xe2']:
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{invalid_name: {'image': 'busybox'}}))
assert 'Invalid service name \'%s\'' % invalid_name in exc.exconly()
def test_load_config_invalid_service_names_v2(self):
for invalid_name in ['?not?allowed', ' ', '', '!', '/', '\xe2']:
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': '2',
'services': {invalid_name: {'image': 'busybox'}},
}))
assert 'Invalid service name \'%s\'' % invalid_name in exc.exconly()
def test_load_with_invalid_field_name(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': '2',
'services': {
'web': {'image': 'busybox', 'name': 'bogus'},
}
},
'working_dir',
'filename.yml',
))
assert "Unsupported config option for services.web: 'name'" in exc.exconly()
def test_load_with_invalid_field_name_v1(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'web': {'image': 'busybox', 'name': 'bogus'},
},
'working_dir',
'filename.yml',
))
assert "Unsupported config option for web: 'name'" in exc.exconly()
def test_load_invalid_service_definition(self):
config_details = build_config_details(
{'web': 'wrong'},
'working_dir',
'filename.yml')
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "service 'web' must be a mapping not a string." in exc.exconly()
def test_load_with_empty_build_args(self):
config_details = build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'args': None,
},
},
},
}
)
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert (
"services.web.build.args contains an invalid type, it should be an "
"object, or an array" in exc.exconly()
)
def test_config_integer_service_name_raise_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{1: {'image': 'busybox'}},
'working_dir',
'filename.yml'
)
)
assert (
"In file 'filename.yml', the service name 1 must be a quoted string, i.e. '1'" in
excinfo.exconly()
)
def test_config_integer_service_name_raise_validation_error_v2(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '2',
'services': {1: {'image': 'busybox'}}
},
'working_dir',
'filename.yml'
)
)
assert (
"In file 'filename.yml', the service name 1 must be a quoted string, i.e. '1'." in
excinfo.exconly()
)
def test_config_invalid_service_name_raise_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details({
'version': '2',
'services': {
'test_app': {'build': '.'},
'mong\\o': {'image': 'mongo'},
}
})
)
assert 'Invalid service name \'mong\\o\'' in excinfo.exconly()
def test_config_duplicate_cache_from_values_validation_error(self):
with pytest.raises(ConfigurationError) as exc:
config.load(
build_config_details({
'version': '2.3',
'services': {
'test': {'build': {'context': '.', 'cache_from': ['a', 'b', 'a']}}
}
})
)
assert 'build.cache_from contains non-unique items' in exc.exconly()
def test_load_with_multiple_files_v1(self):
base_file = config.ConfigFile(
'base.yaml',
{
'web': {
'image': 'example/web',
'links': ['db'],
},
'db': {
'image': 'example/db',
},
})
override_file = config.ConfigFile(
'override.yaml',
{
'web': {
'build': '/',
'volumes': ['/home/user/project:/code'],
},
})
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'build': {'context': os.path.abspath('/')},
'volumes': [VolumeSpec.parse('/home/user/project:/code')],
'links': ['db'],
},
{
'name': 'db',
'image': 'example/db',
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_with_multiple_files_and_empty_override(self):
base_file = config.ConfigFile(
'base.yml',
{'web': {'image': 'example/web'}})
override_file = config.ConfigFile('override.yml', None)
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
error_msg = "Top level object in 'override.yml' needs to be an object"
assert error_msg in exc.exconly()
def test_load_with_multiple_files_and_empty_override_v2(self):
base_file = config.ConfigFile(
'base.yml',
{'version': '2', 'services': {'web': {'image': 'example/web'}}})
override_file = config.ConfigFile('override.yml', None)
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
error_msg = "Top level object in 'override.yml' needs to be an object"
assert error_msg in exc.exconly()
def test_load_with_multiple_files_and_empty_base(self):
base_file = config.ConfigFile('base.yml', None)
override_file = config.ConfigFile(
'override.yml',
{'web': {'image': 'example/web'}})
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
assert "Top level object in 'base.yml' needs to be an object" in exc.exconly()
def test_load_with_multiple_files_and_empty_base_v2(self):
base_file = config.ConfigFile('base.yml', None)
override_file = config.ConfigFile(
'override.tml',
{'version': '2', 'services': {'web': {'image': 'example/web'}}}
)
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
assert "Top level object in 'base.yml' needs to be an object" in exc.exconly()
def test_load_with_multiple_files_and_extends_in_override_file(self):
base_file = config.ConfigFile(
'base.yaml',
{
'web': {'image': 'example/web'},
})
override_file = config.ConfigFile(
'override.yaml',
{
'web': {
'extends': {
'file': 'common.yml',
'service': 'base',
},
'volumes': ['/home/user/project:/code'],
},
})
details = config.ConfigDetails('.', [base_file, override_file])
tmpdir = py.test.ensuretemp('config_test')
self.addCleanup(tmpdir.remove)
tmpdir.join('common.yml').write("""
base:
labels: ['label=one']
""")
with tmpdir.as_cwd():
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'volumes': [VolumeSpec.parse('/home/user/project:/code')],
'labels': {'label': 'one'},
},
]
self.assertEqual(service_sort(service_dicts), service_sort(expected))
def test_load_mixed_extends_resolution(self):
main_file = config.ConfigFile(
'main.yml', {
'version': '2.2',
'services': {
'prodweb': {
'extends': {
'service': 'web',
'file': 'base.yml'
},
'environment': {'PROD': 'true'},
},
},
}
)
tmpdir = pytest.ensuretemp('config_test')
self.addCleanup(tmpdir.remove)
tmpdir.join('base.yml').write("""
version: '2.2'
services:
base:
image: base
web:
extends: base
""")
details = config.ConfigDetails('.', [main_file])
with tmpdir.as_cwd():
service_dicts = config.load(details).services
assert service_dicts[0] == {
'name': 'prodweb',
'image': 'base',
'environment': {'PROD': 'true'},
}
def test_load_with_multiple_files_and_invalid_override(self):
base_file = config.ConfigFile(
'base.yaml',
{'web': {'image': 'example/web'}})
override_file = config.ConfigFile(
'override.yaml',
{'bogus': 'thing'})
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
assert "service 'bogus' must be a mapping not a string." in exc.exconly()
assert "In file 'override.yaml'" in exc.exconly()
def test_load_sorts_in_dependency_order(self):
config_details = build_config_details({
'web': {
'image': 'busybox:latest',
'links': ['db'],
},
'db': {
'image': 'busybox:latest',
'volumes_from': ['volume:ro']
},
'volume': {
'image': 'busybox:latest',
'volumes': ['/tmp'],
}
})
services = config.load(config_details).services
assert services[0]['name'] == 'volume'
assert services[1]['name'] == 'db'
assert services[2]['name'] == 'web'
def test_load_with_extensions(self):
config_details = build_config_details({
'version': '2.3',
'x-data': {
'lambda': 3,
'excess': [True, {}]
}
})
config_data = config.load(config_details)
assert config_data.services == []
def test_config_build_configuration(self):
service = config.load(
build_config_details(
{'web': {
'build': '.',
'dockerfile': 'Dockerfile-alt'
}},
'tests/fixtures/extends',
'filename.yml'
)
).services
self.assertTrue('context' in service[0]['build'])
self.assertEqual(service[0]['build']['dockerfile'], 'Dockerfile-alt')
def test_config_build_configuration_v2(self):
# service.dockerfile is invalid in v2
with self.assertRaises(ConfigurationError):
config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': '.',
'dockerfile': 'Dockerfile-alt'
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
service = config.load(
build_config_details({
'version': '2',
'services': {
'web': {
'build': '.'
}
}
}, 'tests/fixtures/extends', 'filename.yml')
).services[0]
self.assertTrue('context' in service['build'])
service = config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt'
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services
self.assertTrue('context' in service[0]['build'])
self.assertEqual(service[0]['build']['dockerfile'], 'Dockerfile-alt')
def test_load_with_buildargs(self):
service = config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt',
'args': {
'opt1': 42,
'opt2': 'foobar'
}
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services[0]
assert 'args' in service['build']
assert 'opt1' in service['build']['args']
assert isinstance(service['build']['args']['opt1'], str)
assert service['build']['args']['opt1'] == '42'
assert service['build']['args']['opt2'] == 'foobar'
def test_load_build_labels_dict(self):
service = config.load(
build_config_details(
{
'version': str(V3_3),
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt',
'labels': {
'label1': 42,
'label2': 'foobar'
}
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services[0]
assert 'labels' in service['build']
assert 'label1' in service['build']['labels']
assert service['build']['labels']['label1'] == 42
assert service['build']['labels']['label2'] == 'foobar'
def test_load_build_labels_list(self):
base_file = config.ConfigFile(
'base.yml',
{
'version': '2.3',
'services': {
'web': {
'build': {
'context': '.',
'labels': ['foo=bar', 'baz=true', 'foobar=1']
},
},
},
}
)
details = config.ConfigDetails('.', [base_file])
service = config.load(details).services[0]
assert service['build']['labels'] == {
'foo': 'bar', 'baz': 'true', 'foobar': '1'
}
def test_build_args_allow_empty_properties(self):
service = config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt',
'args': {
'foo': None
}
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services[0]
assert 'args' in service['build']
assert 'foo' in service['build']['args']
assert service['build']['args']['foo'] == ''
# If build argument is None then it will be converted to the empty
# string. Make sure that int zero kept as it is, i.e. not converted to
# the empty string
def test_build_args_check_zero_preserved(self):
service = config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt',
'args': {
'foo': 0
}
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services[0]
assert 'args' in service['build']
assert 'foo' in service['build']['args']
assert service['build']['args']['foo'] == '0'
def test_load_with_multiple_files_mismatched_networks_format(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': 'example/web',
'networks': {
'foobar': {'aliases': ['foo', 'bar']}
}
}
},
'networks': {'foobar': {}, 'baz': {}}
}
)
override_file = config.ConfigFile(
'override.yaml',
{
'version': '2',
'services': {
'web': {
'networks': ['baz']
}
}
}
)
details = config.ConfigDetails('.', [base_file, override_file])
web_service = config.load(details).services[0]
assert web_service['networks'] == {
'foobar': {'aliases': ['foo', 'bar']},
'baz': None
}
def test_load_with_multiple_files_v2(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': 'example/web',
'depends_on': ['db'],
},
'db': {
'image': 'example/db',
}
},
})
override_file = config.ConfigFile(
'override.yaml',
{
'version': '2',
'services': {
'web': {
'build': '/',
'volumes': ['/home/user/project:/code'],
'depends_on': ['other'],
},
'other': {
'image': 'example/other',
}
}
})
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'build': {'context': os.path.abspath('/')},
'image': 'example/web',
'volumes': [VolumeSpec.parse('/home/user/project:/code')],
'depends_on': {
'db': {'condition': 'service_started'},
'other': {'condition': 'service_started'},
},
},
{
'name': 'db',
'image': 'example/db',
},
{
'name': 'other',
'image': 'example/other',
},
]
assert service_sort(service_dicts) == service_sort(expected)
@mock.patch.dict(os.environ)
def test_load_with_multiple_files_v3_2(self):
os.environ['COMPOSE_CONVERT_WINDOWS_PATHS'] = 'true'
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.2',
'services': {
'web': {
'image': 'example/web',
'volumes': [
{'source': '/a', 'target': '/b', 'type': 'bind'},
{'source': 'vol', 'target': '/x', 'type': 'volume', 'read_only': True}
]
}
},
'volumes': {'vol': {}}
}
)
override_file = config.ConfigFile(
'override.yaml',
{
'version': '3.2',
'services': {
'web': {
'volumes': ['/c:/b', '/anonymous']
}
}
}
)
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
svc_volumes = map(lambda v: v.repr(), service_dicts[0]['volumes'])
assert sorted(svc_volumes) == sorted(
['/anonymous', '/c:/b:rw', 'vol:/x:ro']
)
@mock.patch.dict(os.environ)
def test_volume_mode_override(self):
os.environ['COMPOSE_CONVERT_WINDOWS_PATHS'] = 'true'
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2.3',
'services': {
'web': {
'image': 'example/web',
'volumes': ['/c:/b:rw']
}
},
}
)
override_file = config.ConfigFile(
'override.yaml',
{
'version': '2.3',
'services': {
'web': {
'volumes': ['/c:/b:ro']
}
}
}
)
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
svc_volumes = list(map(lambda v: v.repr(), service_dicts[0]['volumes']))
assert svc_volumes == ['/c:/b:ro']
def test_undeclared_volume_v2(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': 'busybox:latest',
'volumes': ['data0028:/data:ro'],
},
},
}
)
details = config.ConfigDetails('.', [base_file])
with self.assertRaises(ConfigurationError):
config.load(details)
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': 'busybox:latest',
'volumes': ['./data0028:/data:ro'],
},
},
}
)
details = config.ConfigDetails('.', [base_file])
config_data = config.load(details)
volume = config_data.services[0].get('volumes')[0]
assert not volume.is_named_volume
def test_undeclared_volume_v1(self):
base_file = config.ConfigFile(
'base.yaml',
{
'web': {
'image': 'busybox:latest',
'volumes': ['data0028:/data:ro'],
},
}
)
details = config.ConfigDetails('.', [base_file])
config_data = config.load(details)
volume = config_data.services[0].get('volumes')[0]
assert volume.external == 'data0028'
assert volume.is_named_volume
def test_config_valid_service_names(self):
for valid_name in ['_', '-', '.__.', '_what-up.', 'what_.up----', 'whatup']:
services = config.load(
build_config_details(
{valid_name: {'image': 'busybox'}},
'tests/fixtures/extends',
'common.yml')).services
assert services[0]['name'] == valid_name
def test_config_hint(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'foo': {'image': 'busybox', 'privilige': 'something'},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "(did you mean 'privileged'?)" in excinfo.exconly()
def test_load_errors_on_uppercase_with_no_image(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'Foo': {'build': '.'},
}, 'tests/fixtures/build-ctx'))
assert "Service 'Foo' contains uppercase characters" in exc.exconly()
def test_invalid_config_v1(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'foo': {'image': 1},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "foo.image contains an invalid type, it should be a string" \
in excinfo.exconly()
def test_invalid_config_v2(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '2',
'services': {
'foo': {'image': 1},
},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "services.foo.image contains an invalid type, it should be a string" \
in excinfo.exconly()
def test_invalid_config_build_and_image_specified_v1(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'foo': {'image': 'busybox', 'build': '.'},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "foo has both an image and build path specified." in excinfo.exconly()
def test_invalid_config_type_should_be_an_array(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'foo': {'image': 'busybox', 'links': 'an_link'},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "foo.links contains an invalid type, it should be an array" \
in excinfo.exconly()
def test_invalid_config_not_a_dictionary(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
['foo', 'lol'],
'tests/fixtures/extends',
'filename.yml'
)
)
assert "Top level object in 'filename.yml' needs to be an object" \
in excinfo.exconly()
def test_invalid_config_not_unique_items(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'web': {'build': '.', 'devices': ['/dev/foo:/dev/foo', '/dev/foo:/dev/foo']}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "has non-unique elements" in excinfo.exconly()
def test_invalid_list_of_strings_format(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'web': {'build': '.', 'command': [1]}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "web.command contains 1, which is an invalid type, it should be a string" \
in excinfo.exconly()
def test_load_config_dockerfile_without_build_raises_error_v1(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'web': {
'image': 'busybox',
'dockerfile': 'Dockerfile.alt'
}
}))
assert "web has both an image and alternate Dockerfile." in exc.exconly()
def test_config_extra_hosts_string_raises_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{'web': {
'image': 'busybox',
'extra_hosts': 'somehost:162.242.195.82'
}},
'working_dir',
'filename.yml'
)
)
assert "web.extra_hosts contains an invalid type" \
in excinfo.exconly()
def test_config_extra_hosts_list_of_dicts_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{'web': {
'image': 'busybox',
'extra_hosts': [
{'somehost': '162.242.195.82'},
{'otherhost': '50.31.209.229'}
]
}},
'working_dir',
'filename.yml'
)
)
assert "web.extra_hosts contains {\"somehost\": \"162.242.195.82\"}, " \
"which is an invalid type, it should be a string" \
in excinfo.exconly()
def test_config_ulimits_invalid_keys_validation_error(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'web': {
'image': 'busybox',
'ulimits': {
'nofile': {
"not_soft_or_hard": 100,
"soft": 10000,
"hard": 20000,
}
}
}
},
'working_dir',
'filename.yml'))
assert "web.ulimits.nofile contains unsupported option: 'not_soft_or_hard'" \
in exc.exconly()
def test_config_ulimits_required_keys_validation_error(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'web': {
'image': 'busybox',
'ulimits': {'nofile': {"soft": 10000}}
}
},
'working_dir',
'filename.yml'))
assert "web.ulimits.nofile" in exc.exconly()
assert "'hard' is a required property" in exc.exconly()
def test_config_ulimits_soft_greater_than_hard_error(self):
expected = "'soft' value can not be greater than 'hard' value"
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'web': {
'image': 'busybox',
'ulimits': {
'nofile': {"soft": 10000, "hard": 1000}
}
}
},
'working_dir',
'filename.yml'))
assert expected in exc.exconly()
def test_valid_config_which_allows_two_type_definitions(self):
expose_values = [["8000"], [8000]]
for expose in expose_values:
service = config.load(
build_config_details(
{'web': {
'image': 'busybox',
'expose': expose
}},
'working_dir',
'filename.yml'
)
).services
self.assertEqual(service[0]['expose'], expose)
def test_valid_config_oneof_string_or_list(self):
entrypoint_values = [["sh"], "sh"]
for entrypoint in entrypoint_values:
service = config.load(
build_config_details(
{'web': {
'image': 'busybox',
'entrypoint': entrypoint
}},
'working_dir',
'filename.yml'
)
).services
self.assertEqual(service[0]['entrypoint'], entrypoint)
def test_logs_warning_for_boolean_in_environment(self):
config_details = build_config_details({
'web': {
'image': 'busybox',
'environment': {'SHOW_STUFF': True}
}
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "contains true, which is an invalid type" in exc.exconly()
def test_config_valid_environment_dict_key_contains_dashes(self):
services = config.load(
build_config_details(
{'web': {
'image': 'busybox',
'environment': {'SPRING_JPA_HIBERNATE_DDL-AUTO': 'none'}
}},
'working_dir',
'filename.yml'
)
).services
self.assertEqual(services[0]['environment']['SPRING_JPA_HIBERNATE_DDL-AUTO'], 'none')
def test_load_yaml_with_yaml_error(self):
tmpdir = py.test.ensuretemp('invalid_yaml_test')
self.addCleanup(tmpdir.remove)
invalid_yaml_file = tmpdir.join('docker-compose.yml')
invalid_yaml_file.write("""
web:
this is bogus: ok: what
""")
with pytest.raises(ConfigurationError) as exc:
config.load_yaml(str(invalid_yaml_file))
assert 'line 3, column 32' in exc.exconly()
def test_validate_extra_hosts_invalid(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'web': {
'image': 'alpine',
'extra_hosts': "www.example.com: 192.168.0.17",
}
}))
assert "web.extra_hosts contains an invalid type" in exc.exconly()
def test_validate_extra_hosts_invalid_list(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'web': {
'image': 'alpine',
'extra_hosts': [
{'www.example.com': '192.168.0.17'},
{'api.example.com': '192.168.0.18'}
],
}
}))
assert "which is an invalid type" in exc.exconly()
def test_normalize_dns_options(self):
actual = config.load(build_config_details({
'web': {
'image': 'alpine',
'dns': '8.8.8.8',
'dns_search': 'domain.local',
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'dns': ['8.8.8.8'],
'dns_search': ['domain.local'],
}
]
def test_tmpfs_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'tmpfs': '/run',
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'tmpfs': ['/run'],
}
]
def test_oom_score_adj_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'oom_score_adj': 500
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'oom_score_adj': 500
}
]
def test_swappiness_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'mem_swappiness': 10,
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'mem_swappiness': 10,
}
]
def test_group_add_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'group_add': ["docker", 777]
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'group_add': ["docker", 777]
}
]
def test_dns_opt_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'dns_opt': ["use-vc", "no-tld-query"]
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'dns_opt': ["use-vc", "no-tld-query"]
}
]
def test_isolation_option(self):
actual = config.load(build_config_details({
'version': str(V2_1),
'services': {
'web': {
'image': 'win10',
'isolation': 'hyperv'
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'win10',
'isolation': 'hyperv',
}
]
def test_merge_service_dicts_from_files_with_extends_in_base(self):
base = {
'volumes': ['.:/app'],
'extends': {'service': 'app'}
}
override = {
'image': 'alpine:edge',
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert actual == {
'image': 'alpine:edge',
'volumes': ['.:/app'],
'extends': {'service': 'app'}
}
def test_merge_service_dicts_from_files_with_extends_in_override(self):
base = {
'volumes': ['.:/app'],
'extends': {'service': 'app'}
}
override = {
'image': 'alpine:edge',
'extends': {'service': 'foo'}
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert actual == {
'image': 'alpine:edge',
'volumes': ['.:/app'],
'extends': {'service': 'foo'}
}
def test_merge_service_dicts_heterogeneous(self):
base = {
'volumes': ['.:/app'],
'ports': ['5432']
}
override = {
'image': 'alpine:edge',
'ports': [5432]
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert actual == {
'image': 'alpine:edge',
'volumes': ['.:/app'],
'ports': types.ServicePort.parse('5432')
}
def test_merge_service_dicts_heterogeneous_2(self):
base = {
'volumes': ['.:/app'],
'ports': [5432]
}
override = {
'image': 'alpine:edge',
'ports': ['5432']
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert actual == {
'image': 'alpine:edge',
'volumes': ['.:/app'],
'ports': types.ServicePort.parse('5432')
}
def test_merge_service_dicts_ports_sorting(self):
base = {
'ports': [5432]
}
override = {
'image': 'alpine:edge',
'ports': ['5432/udp']
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert len(actual['ports']) == 2
assert types.ServicePort.parse('5432')[0] in actual['ports']
assert types.ServicePort.parse('5432/udp')[0] in actual['ports']
def test_merge_service_dicts_heterogeneous_volumes(self):
base = {
'volumes': ['/a:/b', '/x:/z'],
}
override = {
'image': 'alpine:edge',
'volumes': [
{'source': '/e', 'target': '/b', 'type': 'bind'},
{'source': '/c', 'target': '/d', 'type': 'bind'}
]
}
actual = config.merge_service_dicts_from_files(
base, override, V3_2
)
assert actual['volumes'] == [
{'source': '/e', 'target': '/b', 'type': 'bind'},
{'source': '/c', 'target': '/d', 'type': 'bind'},
'/x:/z'
]
def test_merge_logging_v1(self):
base = {
'image': 'alpine:edge',
'log_driver': 'something',
'log_opt': {'foo': 'three'},
}
override = {
'image': 'alpine:edge',
'command': 'true',
}
actual = config.merge_service_dicts(base, override, V1)
assert actual == {
'image': 'alpine:edge',
'log_driver': 'something',
'log_opt': {'foo': 'three'},
'command': 'true',
}
def test_merge_logging_v2(self):
base = {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '360',
'pretty-print': 'on'
}
}
}
def test_merge_logging_v2_override_driver(self):
base = {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'driver': 'syslog',
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'syslog',
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
def test_merge_logging_v2_no_base_driver(self):
base = {
'image': 'alpine:edge',
'logging': {
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'driver': 'json-file',
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '360',
'pretty-print': 'on'
}
}
}
def test_merge_logging_v2_no_drivers(self):
base = {
'image': 'alpine:edge',
'logging': {
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual == {
'image': 'alpine:edge',
'logging': {
'options': {
'frequency': '2000',
'timeout': '360',
'pretty-print': 'on'
}
}
}
def test_merge_logging_v2_no_override_options(self):
base = {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'driver': 'syslog'
}
}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'syslog',
}
}
def test_merge_logging_v2_no_base(self):
base = {
'image': 'alpine:edge'
}
override = {
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000'
}
}
}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000'
}
}
}
def test_merge_logging_v2_no_override(self):
base = {
'image': 'alpine:edge',
'logging': {
'driver': 'syslog',
'options': {
'frequency': '2000'
}
}
}
override = {}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'syslog',
'options': {
'frequency': '2000'
}
}
}
def test_merge_mixed_ports(self):
base = {
'image': 'busybox:latest',
'command': 'top',
'ports': [
{
'target': '1245',
'published': '1245',
'protocol': 'udp',
}
]
}
override = {
'ports': ['1245:1245/udp']
}
actual = config.merge_service_dicts(base, override, V3_1)
assert actual == {
'image': 'busybox:latest',
'command': 'top',
'ports': [types.ServicePort('1245', '1245', 'udp', None, None)]
}
def test_merge_depends_on_no_override(self):
base = {
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
'app2': {'condition': 'service_healthy'}
}
}
override = {}
actual = config.merge_service_dicts(base, override, V2_1)
assert actual == base
def test_merge_depends_on_mixed_syntax(self):
base = {
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
'app2': {'condition': 'service_healthy'}
}
}
override = {
'depends_on': ['app3']
}
actual = config.merge_service_dicts(base, override, V2_1)
assert actual == {
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
'app2': {'condition': 'service_healthy'},
'app3': {'condition': 'service_started'}
}
}
def test_empty_environment_key_allowed(self):
service_dict = config.load(
build_config_details(
{
'web': {
'build': '.',
'environment': {
'POSTGRES_PASSWORD': ''
},
},
},
'.',
None,
)
).services[0]
self.assertEqual(service_dict['environment']['POSTGRES_PASSWORD'], '')
def test_merge_pid(self):
# Regression: https://github.com/docker/compose/issues/4184
base = {
'image': 'busybox',
'pid': 'host'
}
override = {
'labels': {'com.docker.compose.test': 'yes'}
}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual == {
'image': 'busybox',
'pid': 'host',
'labels': {'com.docker.compose.test': 'yes'}
}
def test_merge_different_secrets(self):
base = {
'image': 'busybox',
'secrets': [
{'source': 'src.txt'}
]
}
override = {'secrets': ['other-src.txt']}
actual = config.merge_service_dicts(base, override, V3_1)
assert secret_sort(actual['secrets']) == secret_sort([
{'source': 'src.txt'},
{'source': 'other-src.txt'}
])
def test_merge_secrets_override(self):
base = {
'image': 'busybox',
'secrets': ['src.txt'],
}
override = {
'secrets': [
{
'source': 'src.txt',
'target': 'data.txt',
'mode': 0o400
}
]
}
actual = config.merge_service_dicts(base, override, V3_1)
assert actual['secrets'] == override['secrets']
def test_merge_different_configs(self):
base = {
'image': 'busybox',
'configs': [
{'source': 'src.txt'}
]
}
override = {'configs': ['other-src.txt']}
actual = config.merge_service_dicts(base, override, V3_3)
assert secret_sort(actual['configs']) == secret_sort([
{'source': 'src.txt'},
{'source': 'other-src.txt'}
])
def test_merge_configs_override(self):
base = {
'image': 'busybox',
'configs': ['src.txt'],
}
override = {
'configs': [
{
'source': 'src.txt',
'target': 'data.txt',
'mode': 0o400
}
]
}
actual = config.merge_service_dicts(base, override, V3_3)
assert actual['configs'] == override['configs']
def test_merge_deploy(self):
base = {
'image': 'busybox',
}
override = {
'deploy': {
'mode': 'global',
'restart_policy': {
'condition': 'on-failure'
}
}
}
actual = config.merge_service_dicts(base, override, V3_0)
assert actual['deploy'] == override['deploy']
def test_merge_deploy_override(self):
base = {
'image': 'busybox',
'deploy': {
'mode': 'global',
'restart_policy': {
'condition': 'on-failure'
},
'placement': {
'constraints': [
'node.role == manager'
]
}
}
}
override = {
'deploy': {
'mode': 'replicated',
'restart_policy': {
'condition': 'any'
}
}
}
actual = config.merge_service_dicts(base, override, V3_0)
assert actual['deploy'] == {
'mode': 'replicated',
'restart_policy': {
'condition': 'any'
},
'placement': {
'constraints': [
'node.role == manager'
]
}
}
def test_merge_credential_spec(self):
base = {
'image': 'bb',
'credential_spec': {
'file': '/hello-world',
}
}
override = {
'credential_spec': {
'registry': 'revolution.com',
}
}
actual = config.merge_service_dicts(base, override, V3_3)
assert actual['credential_spec'] == override['credential_spec']
def test_merge_scale(self):
base = {
'image': 'bar',
'scale': 2,
}
override = {
'scale': 4,
}
actual = config.merge_service_dicts(base, override, V2_2)
assert actual == {'image': 'bar', 'scale': 4}
def test_merge_blkio_config(self):
base = {
'image': 'bar',
'blkio_config': {
'weight': 300,
'weight_device': [
{'path': '/dev/sda1', 'weight': 200}
],
'device_read_iops': [
{'path': '/dev/sda1', 'rate': 300}
],
'device_write_iops': [
{'path': '/dev/sda1', 'rate': 1000}
]
}
}
override = {
'blkio_config': {
'weight': 450,
'weight_device': [
{'path': '/dev/sda2', 'weight': 400}
],
'device_read_iops': [
{'path': '/dev/sda1', 'rate': 2000}
],
'device_read_bps': [
{'path': '/dev/sda1', 'rate': 1024}
]
}
}
actual = config.merge_service_dicts(base, override, V2_2)
assert actual == {
'image': 'bar',
'blkio_config': {
'weight': override['blkio_config']['weight'],
'weight_device': (
base['blkio_config']['weight_device'] +
override['blkio_config']['weight_device']
),
'device_read_iops': override['blkio_config']['device_read_iops'],
'device_read_bps': override['blkio_config']['device_read_bps'],
'device_write_iops': base['blkio_config']['device_write_iops']
}
}
def test_merge_extra_hosts(self):
base = {
'image': 'bar',
'extra_hosts': {
'foo': '1.2.3.4',
}
}
override = {
'extra_hosts': ['bar:5.6.7.8', 'foo:127.0.0.1']
}
actual = config.merge_service_dicts(base, override, V2_0)
assert actual['extra_hosts'] == {
'foo': '127.0.0.1',
'bar': '5.6.7.8',
}
def test_merge_healthcheck_config(self):
base = {
'image': 'bar',
'healthcheck': {
'start_period': 1000,
'interval': 3000,
'test': ['true']
}
}
override = {
'healthcheck': {
'interval': 5000,
'timeout': 10000,
'test': ['echo', 'OK'],
}
}
actual = config.merge_service_dicts(base, override, V2_3)
assert actual['healthcheck'] == {
'start_period': base['healthcheck']['start_period'],
'test': override['healthcheck']['test'],
'interval': override['healthcheck']['interval'],
'timeout': override['healthcheck']['timeout'],
}
def test_merge_healthcheck_override_disables(self):
base = {
'image': 'bar',
'healthcheck': {
'start_period': 1000,
'interval': 3000,
'timeout': 2000,
'retries': 3,
'test': ['true']
}
}
override = {
'healthcheck': {
'disabled': True
}
}
actual = config.merge_service_dicts(base, override, V2_3)
assert actual['healthcheck'] == {'disabled': True}
def test_merge_healthcheck_override_enables(self):
base = {
'image': 'bar',
'healthcheck': {
'disabled': True
}
}
override = {
'healthcheck': {
'disabled': False,
'start_period': 1000,
'interval': 3000,
'timeout': 2000,
'retries': 3,
'test': ['true']
}
}
actual = config.merge_service_dicts(base, override, V2_3)
assert actual['healthcheck'] == override['healthcheck']
def test_external_volume_config(self):
config_details = build_config_details({
'version': '2',
'services': {
'bogus': {'image': 'busybox'}
},
'volumes': {
'ext': {'external': True},
'ext2': {'external': {'name': 'aliased'}}
}
})
config_result = config.load(config_details)
volumes = config_result.volumes
assert 'ext' in volumes
assert volumes['ext']['external'] is True
assert 'ext2' in volumes
assert volumes['ext2']['external']['name'] == 'aliased'
def test_external_volume_invalid_config(self):
config_details = build_config_details({
'version': '2',
'services': {
'bogus': {'image': 'busybox'}
},
'volumes': {
'ext': {'external': True, 'driver': 'foo'}
}
})
with pytest.raises(ConfigurationError):
config.load(config_details)
def test_depends_on_orders_services(self):
config_details = build_config_details({
'version': '2',
'services': {
'one': {'image': 'busybox', 'depends_on': ['three', 'two']},
'two': {'image': 'busybox', 'depends_on': ['three']},
'three': {'image': 'busybox'},
},
})
actual = config.load(config_details)
assert (
[service['name'] for service in actual.services] ==
['three', 'two', 'one']
)
def test_depends_on_unknown_service_errors(self):
config_details = build_config_details({
'version': '2',
'services': {
'one': {'image': 'busybox', 'depends_on': ['three']},
},
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "Service 'one' depends on service 'three'" in exc.exconly()
def test_linked_service_is_undefined(self):
with self.assertRaises(ConfigurationError):
config.load(
build_config_details({
'version': '2',
'services': {
'web': {'image': 'busybox', 'links': ['db:db']},
},
})
)
def test_load_dockerfile_without_context(self):
config_details = build_config_details({
'version': '2',
'services': {
'one': {'build': {'dockerfile': 'Dockerfile.foo'}},
},
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert 'has neither an image nor a build context' in exc.exconly()
def test_load_secrets(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.1',
'services': {
'web': {
'image': 'example/web',
'secrets': [
'one',
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
},
],
},
},
'secrets': {
'one': {'file': 'secret.txt'},
},
})
details = config.ConfigDetails('.', [base_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'secrets': [
types.ServiceSecret('one', None, None, None, None),
types.ServiceSecret('source', 'target', '100', '200', 0o777),
],
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_secrets_multi_file(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.1',
'services': {
'web': {
'image': 'example/web',
'secrets': ['one'],
},
},
'secrets': {
'one': {'file': 'secret.txt'},
},
})
override_file = config.ConfigFile(
'base.yaml',
{
'version': '3.1',
'services': {
'web': {
'secrets': [
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
},
],
},
},
})
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'secrets': [
types.ServiceSecret('one', None, None, None, None),
types.ServiceSecret('source', 'target', '100', '200', 0o777),
],
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_configs(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.3',
'services': {
'web': {
'image': 'example/web',
'configs': [
'one',
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
},
],
},
},
'configs': {
'one': {'file': 'secret.txt'},
},
})
details = config.ConfigDetails('.', [base_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'configs': [
types.ServiceConfig('one', None, None, None, None),
types.ServiceConfig('source', 'target', '100', '200', 0o777),
],
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_configs_multi_file(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.3',
'services': {
'web': {
'image': 'example/web',
'configs': ['one'],
},
},
'configs': {
'one': {'file': 'secret.txt'},
},
})
override_file = config.ConfigFile(
'base.yaml',
{
'version': '3.3',
'services': {
'web': {
'configs': [
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
},
],
},
},
})
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'configs': [
types.ServiceConfig('one', None, None, None, None),
types.ServiceConfig('source', 'target', '100', '200', 0o777),
],
},
]
assert service_sort(service_dicts) == service_sort(expected)
class NetworkModeTest(unittest.TestCase):
def test_network_mode_standard(self):
config_data = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'bridge',
},
},
}))
assert config_data.services[0]['network_mode'] == 'bridge'
def test_network_mode_standard_v1(self):
config_data = config.load(build_config_details({
'web': {
'image': 'busybox',
'command': "top",
'net': 'bridge',
},
}))
assert config_data.services[0]['network_mode'] == 'bridge'
assert 'net' not in config_data.services[0]
def test_network_mode_container(self):
config_data = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'container:foo',
},
},
}))
assert config_data.services[0]['network_mode'] == 'container:foo'
def test_network_mode_container_v1(self):
config_data = config.load(build_config_details({
'web': {
'image': 'busybox',
'command': "top",
'net': 'container:foo',
},
}))
assert config_data.services[0]['network_mode'] == 'container:foo'
def test_network_mode_service(self):
config_data = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'service:foo',
},
'foo': {
'image': 'busybox',
'command': "top",
},
},
}))
assert config_data.services[1]['network_mode'] == 'service:foo'
def test_network_mode_service_v1(self):
config_data = config.load(build_config_details({
'web': {
'image': 'busybox',
'command': "top",
'net': 'container:foo',
},
'foo': {
'image': 'busybox',
'command': "top",
},
}))
assert config_data.services[1]['network_mode'] == 'service:foo'
def test_network_mode_service_nonexistent(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'service:foo',
},
},
}))
assert "service 'foo' which is undefined" in excinfo.exconly()
def test_network_mode_plus_networks_is_invalid(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'bridge',
'networks': ['front'],
},
},
'networks': {
'front': None,
}
}))
assert "'network_mode' and 'networks' cannot be combined" in excinfo.exconly()
class PortsTest(unittest.TestCase):
INVALID_PORTS_TYPES = [
{"1": "8000"},
False,
"8000",
8000,
]
NON_UNIQUE_SINGLE_PORTS = [
["8000", "8000"],
]
INVALID_PORT_MAPPINGS = [
["8000-8004:8000-8002"],
["4242:4242-4244"],
]
VALID_SINGLE_PORTS = [
["8000"],
["8000/tcp"],
["8000", "9000"],
[8000],
[8000, 9000],
]
VALID_PORT_MAPPINGS = [
["8000:8050"],
["49153-49154:3002-3003"],
]
def test_config_invalid_ports_type_validation(self):
for invalid_ports in self.INVALID_PORTS_TYPES:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'ports': invalid_ports})
assert "contains an invalid type" in exc.value.msg
def test_config_non_unique_ports_validation(self):
for invalid_ports in self.NON_UNIQUE_SINGLE_PORTS:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'ports': invalid_ports})
assert "non-unique" in exc.value.msg
def test_config_invalid_ports_format_validation(self):
for invalid_ports in self.INVALID_PORT_MAPPINGS:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'ports': invalid_ports})
assert "Port ranges don't match in length" in exc.value.msg
def test_config_valid_ports_format_validation(self):
for valid_ports in self.VALID_SINGLE_PORTS + self.VALID_PORT_MAPPINGS:
self.check_config({'ports': valid_ports})
def test_config_invalid_expose_type_validation(self):
for invalid_expose in self.INVALID_PORTS_TYPES:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'expose': invalid_expose})
assert "contains an invalid type" in exc.value.msg
def test_config_non_unique_expose_validation(self):
for invalid_expose in self.NON_UNIQUE_SINGLE_PORTS:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'expose': invalid_expose})
assert "non-unique" in exc.value.msg
def test_config_invalid_expose_format_validation(self):
# Valid port mappings ARE NOT valid 'expose' entries
for invalid_expose in self.INVALID_PORT_MAPPINGS + self.VALID_PORT_MAPPINGS:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'expose': invalid_expose})
assert "should be of the format" in exc.value.msg
def test_config_valid_expose_format_validation(self):
# Valid single ports ARE valid 'expose' entries
for valid_expose in self.VALID_SINGLE_PORTS:
self.check_config({'expose': valid_expose})
def check_config(self, cfg):
config.load(
build_config_details({
'version': '2.3',
'services': {
'web': dict(image='busybox', **cfg)
},
}, 'working_dir', 'filename.yml')
)
class InterpolationTest(unittest.TestCase):
@mock.patch.dict(os.environ)
def test_config_file_with_environment_file(self):
project_dir = 'tests/fixtures/default-env-file'
service_dicts = config.load(
config.find(
project_dir, None, Environment.from_env_file(project_dir)
)
).services
self.assertEqual(service_dicts[0], {
'name': 'web',
'image': 'alpine:latest',
'ports': [
types.ServicePort.parse('5643')[0],
types.ServicePort.parse('9999')[0]
],
'command': 'true'
})
@mock.patch.dict(os.environ)
def test_config_file_with_environment_variable(self):
project_dir = 'tests/fixtures/environment-interpolation'
os.environ.update(
IMAGE="busybox",
HOST_PORT="80",
LABEL_VALUE="myvalue",
)
service_dicts = config.load(
config.find(
project_dir, None, Environment.from_env_file(project_dir)
)
).services
self.assertEqual(service_dicts, [
{
'name': 'web',
'image': 'busybox',
'ports': types.ServicePort.parse('80:8000'),
'labels': {'mylabel': 'myvalue'},
'hostname': 'host-',
'command': '${ESCAPED}',
}
])
@mock.patch.dict(os.environ)
def test_unset_variable_produces_warning(self):
os.environ.pop('FOO', None)
os.environ.pop('BAR', None)
config_details = build_config_details(
{
'web': {
'image': '${FOO}',
'command': '${BAR}',
'container_name': '${BAR}',
},
},
'.',
None,
)
with mock.patch('compose.config.environment.log') as log:
config.load(config_details)
self.assertEqual(2, log.warn.call_count)
warnings = sorted(args[0][0] for args in log.warn.call_args_list)
self.assertIn('BAR', warnings[0])
self.assertIn('FOO', warnings[1])
@mock.patch.dict(os.environ)
def test_invalid_interpolation(self):
with self.assertRaises(config.ConfigurationError) as cm:
config.load(
build_config_details(
{'web': {'image': '${'}},
'working_dir',
'filename.yml'
)
)
self.assertIn('Invalid', cm.exception.msg)
self.assertIn('for "image" option', cm.exception.msg)
self.assertIn('in service "web"', cm.exception.msg)
self.assertIn('"${"', cm.exception.msg)
@mock.patch.dict(os.environ)
def test_interpolation_secrets_section(self):
os.environ['FOO'] = 'baz.bar'
config_dict = config.load(build_config_details({
'version': '3.1',
'secrets': {
'secretdata': {
'external': {'name': '$FOO'}
}
}
}))
assert config_dict.secrets == {
'secretdata': {
'external': {'name': 'baz.bar'},
'external_name': 'baz.bar'
}
}
@mock.patch.dict(os.environ)
def test_interpolation_configs_section(self):
os.environ['FOO'] = 'baz.bar'
config_dict = config.load(build_config_details({
'version': '3.3',
'configs': {
'configdata': {
'external': {'name': '$FOO'}
}
}
}))
assert config_dict.configs == {
'configdata': {
'external': {'name': 'baz.bar'},
'external_name': 'baz.bar'
}
}
class VolumeConfigTest(unittest.TestCase):
def test_no_binding(self):
d = make_service_dict('foo', {'build': '.', 'volumes': ['/data']}, working_dir='.')
self.assertEqual(d['volumes'], ['/data'])
@mock.patch.dict(os.environ)
def test_volume_binding_with_environment_variable(self):
os.environ['VOLUME_PATH'] = '/host/path'
d = config.load(
build_config_details(
{'foo': {'build': '.', 'volumes': ['${VOLUME_PATH}:/container/path']}},
'.',
None,
)
).services[0]
self.assertEqual(d['volumes'], [VolumeSpec.parse('/host/path:/container/path')])
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix paths')
@mock.patch.dict(os.environ)
def test_volume_binding_with_home(self):
os.environ['HOME'] = '/home/user'
d = make_service_dict('foo', {'build': '.', 'volumes': ['~:/container/path']}, working_dir='.')
self.assertEqual(d['volumes'], ['/home/user:/container/path'])
def test_name_does_not_expand(self):
d = make_service_dict('foo', {'build': '.', 'volumes': ['mydatavolume:/data']}, working_dir='.')
self.assertEqual(d['volumes'], ['mydatavolume:/data'])
def test_absolute_posix_path_does_not_expand(self):
d = make_service_dict('foo', {'build': '.', 'volumes': ['/var/lib/data:/data']}, working_dir='.')
self.assertEqual(d['volumes'], ['/var/lib/data:/data'])
def test_absolute_windows_path_does_not_expand(self):
d = make_service_dict('foo', {'build': '.', 'volumes': ['c:\\data:/data']}, working_dir='.')
self.assertEqual(d['volumes'], ['c:\\data:/data'])
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix paths')
def test_relative_path_does_expand_posix(self):
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['./data:/data']},
working_dir='/home/me/myproject')
self.assertEqual(d['volumes'], ['/home/me/myproject/data:/data'])
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['.:/data']},
working_dir='/home/me/myproject')
self.assertEqual(d['volumes'], ['/home/me/myproject:/data'])
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['../otherproject:/data']},
working_dir='/home/me/myproject')
self.assertEqual(d['volumes'], ['/home/me/otherproject:/data'])
@pytest.mark.skipif(not IS_WINDOWS_PLATFORM, reason='windows paths')
def test_relative_path_does_expand_windows(self):
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['./data:/data']},
working_dir='c:\\Users\\me\\myproject')
self.assertEqual(d['volumes'], ['c:\\Users\\me\\myproject\\data:/data'])
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['.:/data']},
working_dir='c:\\Users\\me\\myproject')
self.assertEqual(d['volumes'], ['c:\\Users\\me\\myproject:/data'])
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['../otherproject:/data']},
working_dir='c:\\Users\\me\\myproject')
self.assertEqual(d['volumes'], ['c:\\Users\\me\\otherproject:/data'])
@mock.patch.dict(os.environ)
def test_home_directory_with_driver_does_not_expand(self):
os.environ['NAME'] = 'surprise!'
d = make_service_dict('foo', {
'build': '.',
'volumes': ['~:/data'],
'volume_driver': 'foodriver',
}, working_dir='.')
self.assertEqual(d['volumes'], ['~:/data'])
def test_volume_path_with_non_ascii_directory(self):
volume = u'/Füü/data:/data'
container_path = config.resolve_volume_path(".", volume)
self.assertEqual(container_path, volume)
class MergePathMappingTest(object):
config_name = ""
def test_empty(self):
service_dict = config.merge_service_dicts({}, {}, DEFAULT_VERSION)
assert self.config_name not in service_dict
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/data']},
{},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(['/foo:/code', '/data'])
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name: ['/bar:/code']},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(['/bar:/code'])
def test_override_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/data']},
{self.config_name: ['/bar:/code']},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(['/bar:/code', '/data'])
def test_add_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/data']},
{self.config_name: ['/bar:/code', '/quux:/data']},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(['/bar:/code', '/quux:/data'])
def test_remove_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/quux:/data']},
{self.config_name: ['/bar:/code', '/data']},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(['/bar:/code', '/data'])
class MergeVolumesTest(unittest.TestCase, MergePathMappingTest):
config_name = 'volumes'
class MergeDevicesTest(unittest.TestCase, MergePathMappingTest):
config_name = 'devices'
class BuildOrImageMergeTest(unittest.TestCase):
def test_merge_build_or_image_no_override(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {}, V1),
{'build': '.'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {}, V1),
{'image': 'redis'},
)
def test_merge_build_or_image_override_with_same(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {'build': './web'}, V1),
{'build': './web'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {'image': 'postgres'}, V1),
{'image': 'postgres'},
)
def test_merge_build_or_image_override_with_other(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {'image': 'redis'}, V1),
{'image': 'redis'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {'build': '.'}, V1),
{'build': '.'}
)
class MergeListsTest(object):
config_name = ""
base_config = []
override_config = []
def merged_config(self):
return set(self.base_config) | set(self.override_config)
def test_empty(self):
assert self.config_name not in config.merge_service_dicts({}, {}, DEFAULT_VERSION)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(self.base_config)
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name: self.base_config},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(self.base_config)
def test_add_item(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{self.config_name: self.override_config},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(self.merged_config())
class MergePortsTest(unittest.TestCase, MergeListsTest):
config_name = 'ports'
base_config = ['10:8000', '9000']
override_config = ['20:8000']
def merged_config(self):
return self.convert(self.base_config) | self.convert(self.override_config)
def convert(self, port_config):
return set(config.merge_service_dicts(
{self.config_name: port_config},
{self.config_name: []},
DEFAULT_VERSION
)[self.config_name])
def test_duplicate_port_mappings(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{self.config_name: self.base_config},
DEFAULT_VERSION
)
assert set(service_dict[self.config_name]) == self.convert(self.base_config)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == self.convert(self.base_config)
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name: self.base_config},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == self.convert(self.base_config)
class MergeNetworksTest(unittest.TestCase, MergeListsTest):
config_name = 'networks'
base_config = ['frontend', 'backend']
override_config = ['monitoring']
class MergeStringsOrListsTest(unittest.TestCase):
def test_no_override(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{},
DEFAULT_VERSION)
assert set(service_dict['dns']) == set(['8.8.8.8'])
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'dns': '8.8.8.8'},
DEFAULT_VERSION)
assert set(service_dict['dns']) == set(['8.8.8.8'])
def test_add_string(self):
service_dict = config.merge_service_dicts(
{'dns': ['8.8.8.8']},
{'dns': '9.9.9.9'},
DEFAULT_VERSION)
assert set(service_dict['dns']) == set(['8.8.8.8', '9.9.9.9'])
def test_add_list(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{'dns': ['9.9.9.9']},
DEFAULT_VERSION)
assert set(service_dict['dns']) == set(['8.8.8.8', '9.9.9.9'])
class MergeLabelsTest(unittest.TestCase):
def test_empty(self):
assert 'labels' not in config.merge_service_dicts({}, {}, DEFAULT_VERSION)
def test_no_override(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.'}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '1', 'bar': ''}
def test_no_base(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.'}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['foo=2']}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '2'}
def test_override_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['foo=2']}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '2', 'bar': ''}
def test_add_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['bar=2']}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '1', 'bar': '2'}
def test_remove_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar=2']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['bar']}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '1', 'bar': ''}
class MergeBuildTest(unittest.TestCase):
def test_full(self):
base = {
'context': '.',
'dockerfile': 'Dockerfile',
'args': {
'x': '1',
'y': '2',
},
'cache_from': ['ubuntu'],
'labels': ['com.docker.compose.test=true']
}
override = {
'context': './prod',
'dockerfile': 'Dockerfile.prod',
'args': ['x=12'],
'cache_from': ['debian'],
'labels': {
'com.docker.compose.test': 'false',
'com.docker.compose.prod': 'true',
}
}
result = config.merge_build(None, {'build': base}, {'build': override})
assert result['context'] == override['context']
assert result['dockerfile'] == override['dockerfile']
assert result['args'] == {'x': '12', 'y': '2'}
assert set(result['cache_from']) == set(['ubuntu', 'debian'])
assert result['labels'] == override['labels']
def test_empty_override(self):
base = {
'context': '.',
'dockerfile': 'Dockerfile',
'args': {
'x': '1',
'y': '2',
},
'cache_from': ['ubuntu'],
'labels': {
'com.docker.compose.test': 'true'
}
}
override = {}
result = config.merge_build(None, {'build': base}, {'build': override})
assert result == base
def test_empty_base(self):
base = {}
override = {
'context': './prod',
'dockerfile': 'Dockerfile.prod',
'args': {'x': '12'},
'cache_from': ['debian'],
'labels': {
'com.docker.compose.test': 'false',
'com.docker.compose.prod': 'true',
}
}
result = config.merge_build(None, {'build': base}, {'build': override})
assert result == override
class MemoryOptionsTest(unittest.TestCase):
def test_validation_fails_with_just_memswap_limit(self):
"""
When you set a 'memswap_limit' it is invalid config unless you also set
a mem_limit
"""
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'foo': {'image': 'busybox', 'memswap_limit': 2000000},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "foo.memswap_limit is invalid: when defining " \
"'memswap_limit' you must set 'mem_limit' as well" \
in excinfo.exconly()
def test_validation_with_correct_memswap_values(self):
service_dict = config.load(
build_config_details(
{'foo': {'image': 'busybox', 'mem_limit': 1000000, 'memswap_limit': 2000000}},
'tests/fixtures/extends',
'common.yml'
)
).services
self.assertEqual(service_dict[0]['memswap_limit'], 2000000)
def test_memswap_can_be_a_string(self):
service_dict = config.load(
build_config_details(
{'foo': {'image': 'busybox', 'mem_limit': "1G", 'memswap_limit': "512M"}},
'tests/fixtures/extends',
'common.yml'
)
).services
self.assertEqual(service_dict[0]['memswap_limit'], "512M")
class EnvTest(unittest.TestCase):
def test_parse_environment_as_list(self):
environment = [
'NORMAL=F1',
'CONTAINS_EQUALS=F=2',
'TRAILING_EQUALS=',
]
self.assertEqual(
config.parse_environment(environment),
{'NORMAL': 'F1', 'CONTAINS_EQUALS': 'F=2', 'TRAILING_EQUALS': ''},
)
def test_parse_environment_as_dict(self):
environment = {
'NORMAL': 'F1',
'CONTAINS_EQUALS': 'F=2',
'TRAILING_EQUALS': None,
}
self.assertEqual(config.parse_environment(environment), environment)
def test_parse_environment_invalid(self):
with self.assertRaises(ConfigurationError):
config.parse_environment('a=b')
def test_parse_environment_empty(self):
self.assertEqual(config.parse_environment(None), {})
@mock.patch.dict(os.environ)
def test_resolve_environment(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
service_dict = {
'build': '.',
'environment': {
'FILE_DEF': 'F1',
'FILE_DEF_EMPTY': '',
'ENV_DEF': None,
'NO_DEF': None
},
}
self.assertEqual(
resolve_environment(
service_dict, Environment.from_env_file(None)
),
{'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': None},
)
def test_resolve_environment_from_env_file(self):
self.assertEqual(
resolve_environment({'env_file': ['tests/fixtures/env/one.env']}),
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'bar'},
)
def test_environment_overrides_env_file(self):
self.assertEqual(
resolve_environment({
'environment': {'FOO': 'baz'},
'env_file': ['tests/fixtures/env/one.env'],
}),
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'baz'},
)
def test_resolve_environment_with_multiple_env_files(self):
service_dict = {
'env_file': [
'tests/fixtures/env/one.env',
'tests/fixtures/env/two.env'
]
}
self.assertEqual(
resolve_environment(service_dict),
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'baz', 'DOO': 'dah'},
)
def test_resolve_environment_nonexistent_file(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{'foo': {'image': 'example', 'env_file': 'nonexistent.env'}},
working_dir='tests/fixtures/env'))
assert 'Couldn\'t find env file' in exc.exconly()
assert 'nonexistent.env' in exc.exconly()
@mock.patch.dict(os.environ)
def test_resolve_environment_from_env_file_with_empty_values(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
self.assertEqual(
resolve_environment(
{'env_file': ['tests/fixtures/env/resolve.env']},
Environment.from_env_file(None)
),
{
'FILE_DEF': u'bär',
'FILE_DEF_EMPTY': '',
'ENV_DEF': 'E3',
'NO_DEF': None
},
)
@mock.patch.dict(os.environ)
def test_resolve_build_args(self):
os.environ['env_arg'] = 'value2'
build = {
'context': '.',
'args': {
'arg1': 'value1',
'empty_arg': '',
'env_arg': None,
'no_env': None
}
}
self.assertEqual(
resolve_build_args(build['args'], Environment.from_env_file(build['context'])),
{'arg1': 'value1', 'empty_arg': '', 'env_arg': 'value2', 'no_env': None},
)
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
@mock.patch.dict(os.environ)
def test_resolve_path(self):
os.environ['HOSTENV'] = '/tmp'
os.environ['CONTAINERENV'] = '/host/tmp'
service_dict = config.load(
build_config_details(
{'foo': {'build': '.', 'volumes': ['$HOSTENV:$CONTAINERENV']}},
"tests/fixtures/env",
)
).services[0]
self.assertEqual(
set(service_dict['volumes']),
set([VolumeSpec.parse('/tmp:/host/tmp')]))
service_dict = config.load(
build_config_details(
{'foo': {'build': '.', 'volumes': ['/opt${HOSTENV}:/opt${CONTAINERENV}']}},
"tests/fixtures/env",
)
).services[0]
self.assertEqual(
set(service_dict['volumes']),
set([VolumeSpec.parse('/opt/tmp:/opt/host/tmp')]))
def load_from_filename(filename, override_dir=None):
return config.load(
config.find('.', [filename], Environment.from_env_file('.'), override_dir=override_dir)
).services
class ExtendsTest(unittest.TestCase):
def test_extends(self):
service_dicts = load_from_filename('tests/fixtures/extends/docker-compose.yml')
self.assertEqual(service_sort(service_dicts), service_sort([
{
'name': 'mydb',
'image': 'busybox',
'command': 'top',
},
{
'name': 'myweb',
'image': 'busybox',
'command': 'top',
'network_mode': 'bridge',
'links': ['mydb:db'],
'environment': {
"FOO": "1",
"BAR": "2",
"BAZ": "2",
},
}
]))
def test_merging_env_labels_ulimits(self):
service_dicts = load_from_filename('tests/fixtures/extends/common-env-labels-ulimits.yml')
self.assertEqual(service_sort(service_dicts), service_sort([
{
'name': 'web',
'image': 'busybox',
'command': '/bin/true',
'network_mode': 'host',
'environment': {
"FOO": "2",
"BAR": "1",
"BAZ": "3",
},
'labels': {'label': 'one'},
'ulimits': {'nproc': 65535, 'memlock': {'soft': 1024, 'hard': 2048}}
}
]))
def test_nested(self):
service_dicts = load_from_filename('tests/fixtures/extends/nested.yml')
self.assertEqual(service_dicts, [
{
'name': 'myweb',
'image': 'busybox',
'command': '/bin/true',
'network_mode': 'host',
'environment': {
"FOO": "2",
"BAR": "2",
},
},
])
def test_self_referencing_file(self):
"""
We specify a 'file' key that is the filename we're already in.
"""
service_dicts = load_from_filename('tests/fixtures/extends/specify-file-as-self.yml')
self.assertEqual(service_sort(service_dicts), service_sort([
{
'environment':
{
'YEP': '1', 'BAR': '1', 'BAZ': '3'
},
'image': 'busybox',
'name': 'myweb'
},
{
'environment':
{'YEP': '1'},
'image': 'busybox',
'name': 'otherweb'
},
{
'environment':
{'YEP': '1', 'BAZ': '3'},
'image': 'busybox',
'name': 'web'
}
]))
def test_circular(self):
with pytest.raises(config.CircularReference) as exc:
load_from_filename('tests/fixtures/extends/circle-1.yml')
path = [
(os.path.basename(filename), service_name)
for (filename, service_name) in exc.value.trail
]
expected = [
('circle-1.yml', 'web'),
('circle-2.yml', 'other'),
('circle-1.yml', 'web'),
]
self.assertEqual(path, expected)
def test_extends_validation_empty_dictionary(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'web': {'image': 'busybox', 'extends': {}},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert 'service' in excinfo.exconly()
def test_extends_validation_missing_service_key(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'web': {'image': 'busybox', 'extends': {'file': 'common.yml'}},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "'service' is a required property" in excinfo.exconly()
def test_extends_validation_invalid_key(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'web': {
'image': 'busybox',
'extends': {
'file': 'common.yml',
'service': 'web',
'rogue_key': 'is not allowed'
}
},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "web.extends contains unsupported option: 'rogue_key'" \
in excinfo.exconly()
def test_extends_validation_sub_property_key(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'web': {
'image': 'busybox',
'extends': {
'file': 1,
'service': 'web',
}
},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "web.extends.file contains 1, which is an invalid type, it should be a string" \
in excinfo.exconly()
def test_extends_validation_no_file_key_no_filename_set(self):
dictionary = {'extends': {'service': 'web'}}
with pytest.raises(ConfigurationError) as excinfo:
make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
assert 'file' in excinfo.exconly()
def test_extends_validation_valid_config(self):
service = config.load(
build_config_details(
{
'web': {'image': 'busybox', 'extends': {'service': 'web', 'file': 'common.yml'}},
},
'tests/fixtures/extends',
'common.yml'
)
).services
self.assertEqual(len(service), 1)
self.assertIsInstance(service[0], dict)
self.assertEqual(service[0]['command'], "/bin/true")
def test_extended_service_with_invalid_config(self):
with pytest.raises(ConfigurationError) as exc:
load_from_filename('tests/fixtures/extends/service-with-invalid-schema.yml')
assert (
"myweb has neither an image nor a build context specified" in
exc.exconly()
)
def test_extended_service_with_valid_config(self):
service = load_from_filename('tests/fixtures/extends/service-with-valid-composite-extends.yml')
self.assertEqual(service[0]['command'], "top")
def test_extends_file_defaults_to_self(self):
"""
Test not specifying a file in our extends options that the
config is valid and correctly extends from itself.
"""
service_dicts = load_from_filename('tests/fixtures/extends/no-file-specified.yml')
self.assertEqual(service_sort(service_dicts), service_sort([
{
'name': 'myweb',
'image': 'busybox',
'environment': {
"BAR": "1",
"BAZ": "3",
}
},
{
'name': 'web',
'image': 'busybox',
'environment': {
"BAZ": "3",
}
}
]))
def test_invalid_links_in_extended_service(self):
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/invalid-links.yml')
assert "services with 'links' cannot be extended" in excinfo.exconly()
def test_invalid_volumes_from_in_extended_service(self):
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/invalid-volumes.yml')
assert "services with 'volumes_from' cannot be extended" in excinfo.exconly()
def test_invalid_net_in_extended_service(self):
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/invalid-net-v2.yml')
assert 'network_mode: service' in excinfo.exconly()
assert 'cannot be extended' in excinfo.exconly()
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/invalid-net.yml')
assert 'net: container' in excinfo.exconly()
assert 'cannot be extended' in excinfo.exconly()
@mock.patch.dict(os.environ)
def test_load_config_runs_interpolation_in_extended_service(self):
os.environ.update(HOSTNAME_VALUE="penguin")
expected_interpolated_value = "host-penguin"
service_dicts = load_from_filename(
'tests/fixtures/extends/valid-interpolation.yml')
for service in service_dicts:
assert service['hostname'] == expected_interpolated_value
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
def test_volume_path(self):
dicts = load_from_filename('tests/fixtures/volume-path/docker-compose.yml')
paths = [
VolumeSpec(
os.path.abspath('tests/fixtures/volume-path/common/foo'),
'/foo',
'rw'),
VolumeSpec(
os.path.abspath('tests/fixtures/volume-path/bar'),
'/bar',
'rw')
]
self.assertEqual(set(dicts[0]['volumes']), set(paths))
def test_parent_build_path_dne(self):
child = load_from_filename('tests/fixtures/extends/nonexistent-path-child.yml')
self.assertEqual(child, [
{
'name': 'dnechild',
'image': 'busybox',
'command': '/bin/true',
'environment': {
"FOO": "1",
"BAR": "2",
},
},
])
def test_load_throws_error_when_base_service_does_not_exist(self):
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/nonexistent-service.yml')
assert "Cannot extend service 'foo'" in excinfo.exconly()
assert "Service not found" in excinfo.exconly()
def test_partial_service_config_in_extends_is_still_valid(self):
dicts = load_from_filename('tests/fixtures/extends/valid-common-config.yml')
self.assertEqual(dicts[0]['environment'], {'FOO': '1'})
def test_extended_service_with_verbose_and_shorthand_way(self):
services = load_from_filename('tests/fixtures/extends/verbose-and-shorthand.yml')
self.assertEqual(service_sort(services), service_sort([
{
'name': 'base',
'image': 'busybox',
'environment': {'BAR': '1'},
},
{
'name': 'verbose',
'image': 'busybox',
'environment': {'BAR': '1', 'FOO': '1'},
},
{
'name': 'shorthand',
'image': 'busybox',
'environment': {'BAR': '1', 'FOO': '2'},
},
]))
@mock.patch.dict(os.environ)
def test_extends_with_environment_and_env_files(self):
tmpdir = py.test.ensuretemp('test_extends_with_environment')
self.addCleanup(tmpdir.remove)
commondir = tmpdir.mkdir('common')
commondir.join('base.yml').write("""
app:
image: 'example/app'
env_file:
- 'envs'
environment:
- SECRET
- TEST_ONE=common
- TEST_TWO=common
""")
tmpdir.join('docker-compose.yml').write("""
ext:
extends:
file: common/base.yml
service: app
env_file:
- 'envs'
environment:
- THING
- TEST_ONE=top
""")
commondir.join('envs').write("""
COMMON_ENV_FILE
TEST_ONE=common-env-file
TEST_TWO=common-env-file
TEST_THREE=common-env-file
TEST_FOUR=common-env-file
""")
tmpdir.join('envs').write("""
TOP_ENV_FILE
TEST_ONE=top-env-file
TEST_TWO=top-env-file
TEST_THREE=top-env-file
""")
expected = [
{
'name': 'ext',
'image': 'example/app',
'environment': {
'SECRET': 'secret',
'TOP_ENV_FILE': 'secret',
'COMMON_ENV_FILE': 'secret',
'THING': 'thing',
'TEST_ONE': 'top',
'TEST_TWO': 'common',
'TEST_THREE': 'top-env-file',
'TEST_FOUR': 'common-env-file',
},
},
]
os.environ['SECRET'] = 'secret'
os.environ['THING'] = 'thing'
os.environ['COMMON_ENV_FILE'] = 'secret'
os.environ['TOP_ENV_FILE'] = 'secret'
config = load_from_filename(str(tmpdir.join('docker-compose.yml')))
assert config == expected
def test_extends_with_mixed_versions_is_error(self):
tmpdir = py.test.ensuretemp('test_extends_with_mixed_version')
self.addCleanup(tmpdir.remove)
tmpdir.join('docker-compose.yml').write("""
version: "2"
services:
web:
extends:
file: base.yml
service: base
image: busybox
""")
tmpdir.join('base.yml').write("""
base:
volumes: ['/foo']
ports: ['3000:3000']
""")
with pytest.raises(ConfigurationError) as exc:
load_from_filename(str(tmpdir.join('docker-compose.yml')))
assert 'Version mismatch' in exc.exconly()
def test_extends_with_defined_version_passes(self):
tmpdir = py.test.ensuretemp('test_extends_with_defined_version')
self.addCleanup(tmpdir.remove)
tmpdir.join('docker-compose.yml').write("""
version: "2"
services:
web:
extends:
file: base.yml
service: base
image: busybox
""")
tmpdir.join('base.yml').write("""
version: "2"
services:
base:
volumes: ['/foo']
ports: ['3000:3000']
command: top
""")
service = load_from_filename(str(tmpdir.join('docker-compose.yml')))
self.assertEqual(service[0]['command'], "top")
def test_extends_with_depends_on(self):
tmpdir = py.test.ensuretemp('test_extends_with_depends_on')
self.addCleanup(tmpdir.remove)
tmpdir.join('docker-compose.yml').write("""
version: "2"
services:
base:
image: example
web:
extends: base
image: busybox
depends_on: ['other']
other:
image: example
""")
services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
assert service_sort(services)[2]['depends_on'] == {
'other': {'condition': 'service_started'}
}
def test_extends_with_healthcheck(self):
service_dicts = load_from_filename('tests/fixtures/extends/healthcheck-2.yml')
assert service_sort(service_dicts) == [{
'name': 'demo',
'image': 'foobar:latest',
'healthcheck': {
'test': ['CMD', '/health.sh'],
'interval': 10000000000,
'timeout': 5000000000,
'retries': 36,
}
}]
def test_extends_with_ports(self):
tmpdir = py.test.ensuretemp('test_extends_with_ports')
self.addCleanup(tmpdir.remove)
tmpdir.join('docker-compose.yml').write("""
version: '2'
services:
a:
image: nginx
ports:
- 80
b:
extends:
service: a
""")
services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
assert len(services) == 2
for svc in services:
assert svc['ports'] == [types.ServicePort('80', None, None, None, None)]
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
class ExpandPathTest(unittest.TestCase):
working_dir = '/home/user/somedir'
def test_expand_path_normal(self):
result = config.expand_path(self.working_dir, 'myfile')
self.assertEqual(result, self.working_dir + '/' + 'myfile')
def test_expand_path_absolute(self):
abs_path = '/home/user/otherdir/somefile'
result = config.expand_path(self.working_dir, abs_path)
self.assertEqual(result, abs_path)
def test_expand_path_with_tilde(self):
test_path = '~/otherdir/somefile'
with mock.patch.dict(os.environ):
os.environ['HOME'] = user_path = '/home/user/'
result = config.expand_path(self.working_dir, test_path)
self.assertEqual(result, user_path + 'otherdir/somefile')
class VolumePathTest(unittest.TestCase):
def test_split_path_mapping_with_windows_path(self):
host_path = "c:\\Users\\msamblanet\\Documents\\anvil\\connect\\config"
windows_volume_path = host_path + ":/opt/connect/config:ro"
expected_mapping = ("/opt/connect/config", (host_path, 'ro'))
mapping = config.split_path_mapping(windows_volume_path)
assert mapping == expected_mapping
def test_split_path_mapping_with_windows_path_in_container(self):
host_path = 'c:\\Users\\remilia\\data'
container_path = 'c:\\scarletdevil\\data'
expected_mapping = (container_path, (host_path, None))
mapping = config.split_path_mapping('{0}:{1}'.format(host_path, container_path))
assert mapping == expected_mapping
def test_split_path_mapping_with_root_mount(self):
host_path = '/'
container_path = '/var/hostroot'
expected_mapping = (container_path, (host_path, None))
mapping = config.split_path_mapping('{0}:{1}'.format(host_path, container_path))
assert mapping == expected_mapping
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
class BuildPathTest(unittest.TestCase):
def setUp(self):
self.abs_context_path = os.path.join(os.getcwd(), 'tests/fixtures/build-ctx')
def test_nonexistent_path(self):
with self.assertRaises(ConfigurationError):
config.load(
build_config_details(
{
'foo': {'build': 'nonexistent.path'},
},
'working_dir',
'filename.yml'
)
)
def test_relative_path(self):
relative_build_path = '../build-ctx/'
service_dict = make_service_dict(
'relpath',
{'build': relative_build_path},
working_dir='tests/fixtures/build-path'
)
self.assertEqual(service_dict['build'], self.abs_context_path)
def test_absolute_path(self):
service_dict = make_service_dict(
'abspath',
{'build': self.abs_context_path},
working_dir='tests/fixtures/build-path'
)
self.assertEqual(service_dict['build'], self.abs_context_path)
def test_from_file(self):
service_dict = load_from_filename('tests/fixtures/build-path/docker-compose.yml')
self.assertEqual(service_dict, [{'name': 'foo', 'build': {'context': self.abs_context_path}}])
def test_from_file_override_dir(self):
override_dir = os.path.join(os.getcwd(), 'tests/fixtures/')
service_dict = load_from_filename(
'tests/fixtures/build-path-override-dir/docker-compose.yml', override_dir=override_dir)
self.assertEquals(service_dict, [{'name': 'foo', 'build': {'context': self.abs_context_path}}])
def test_valid_url_in_build_path(self):
valid_urls = [
'git://github.com/docker/docker',
'git@github.com:docker/docker.git',
'git@bitbucket.org:atlassianlabs/atlassian-docker.git',
'https://github.com/docker/docker.git',
'http://github.com/docker/docker.git',
'github.com/docker/docker.git',
]
for valid_url in valid_urls:
service_dict = config.load(build_config_details({
'validurl': {'build': valid_url},
}, '.', None)).services
assert service_dict[0]['build'] == {'context': valid_url}
def test_invalid_url_in_build_path(self):
invalid_urls = [
'example.com/bogus',
'ftp://example.com/',
'/path/does/not/exist',
]
for invalid_url in invalid_urls:
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'invalidurl': {'build': invalid_url},
}, '.', None))
assert 'build path' in exc.exconly()
class HealthcheckTest(unittest.TestCase):
def test_healthcheck(self):
service_dict = make_service_dict(
'test',
{'healthcheck': {
'test': ['CMD', 'true'],
'interval': '1s',
'timeout': '1m',
'retries': 3,
'start_period': '10s'
}},
'.',
)
assert service_dict['healthcheck'] == {
'test': ['CMD', 'true'],
'interval': nanoseconds_from_time_seconds(1),
'timeout': nanoseconds_from_time_seconds(60),
'retries': 3,
'start_period': nanoseconds_from_time_seconds(10)
}
def test_disable(self):
service_dict = make_service_dict(
'test',
{'healthcheck': {
'disable': True,
}},
'.',
)
assert service_dict['healthcheck'] == {
'test': ['NONE'],
}
def test_disable_with_other_config_is_invalid(self):
with pytest.raises(ConfigurationError) as excinfo:
make_service_dict(
'invalid-healthcheck',
{'healthcheck': {
'disable': True,
'interval': '1s',
}},
'.',
)
assert 'invalid-healthcheck' in excinfo.exconly()
assert 'disable' in excinfo.exconly()
class GetDefaultConfigFilesTestCase(unittest.TestCase):
files = [
'docker-compose.yml',
'docker-compose.yaml',
]
def test_get_config_path_default_file_in_basedir(self):
for index, filename in enumerate(self.files):
self.assertEqual(
filename,
get_config_filename_for_files(self.files[index:]))
with self.assertRaises(config.ComposeFileNotFound):
get_config_filename_for_files([])
def test_get_config_path_default_file_in_parent_dir(self):
"""Test with files placed in the subdir"""
def get_config_in_subdir(files):
return get_config_filename_for_files(files, subdir=True)
for index, filename in enumerate(self.files):
self.assertEqual(filename, get_config_in_subdir(self.files[index:]))
with self.assertRaises(config.ComposeFileNotFound):
get_config_in_subdir([])
def get_config_filename_for_files(filenames, subdir=None):
def make_files(dirname, filenames):
for fname in filenames:
with open(os.path.join(dirname, fname), 'w') as f:
f.write('')
project_dir = tempfile.mkdtemp()
try:
make_files(project_dir, filenames)
if subdir:
base_dir = tempfile.mkdtemp(dir=project_dir)
else:
base_dir = project_dir
filename, = config.get_default_config_files(base_dir)
return os.path.basename(filename)
finally:
shutil.rmtree(project_dir)
class SerializeTest(unittest.TestCase):
def test_denormalize_depends_on_v3(self):
service_dict = {
'image': 'busybox',
'command': 'true',
'depends_on': {
'service2': {'condition': 'service_started'},
'service3': {'condition': 'service_started'},
}
}
assert denormalize_service_dict(service_dict, V3_0) == {
'image': 'busybox',
'command': 'true',
'depends_on': ['service2', 'service3']
}
def test_denormalize_depends_on_v2_1(self):
service_dict = {
'image': 'busybox',
'command': 'true',
'depends_on': {
'service2': {'condition': 'service_started'},
'service3': {'condition': 'service_started'},
}
}
assert denormalize_service_dict(service_dict, V2_1) == service_dict
def test_serialize_time(self):
data = {
9: '9ns',
9000: '9us',
9000000: '9ms',
90000000: '90ms',
900000000: '900ms',
999999999: '999999999ns',
1000000000: '1s',
60000000000: '1m',
60000000001: '60000000001ns',
9000000000000: '150m',
90000000000000: '25h',
}
for k, v in data.items():
assert serialize_ns_time_value(k) == v
def test_denormalize_healthcheck(self):
service_dict = {
'image': 'test',
'healthcheck': {
'test': 'exit 1',
'interval': '1m40s',
'timeout': '30s',
'retries': 5,
'start_period': '2s90ms'
}
}
processed_service = config.process_service(config.ServiceConfig(
'.', 'test', 'test', service_dict
))
denormalized_service = denormalize_service_dict(processed_service, V2_3)
assert denormalized_service['healthcheck']['interval'] == '100s'
assert denormalized_service['healthcheck']['timeout'] == '30s'
assert denormalized_service['healthcheck']['start_period'] == '2090ms'
def test_denormalize_image_has_digest(self):
service_dict = {
'image': 'busybox'
}
image_digest = 'busybox@sha256:abcde'
assert denormalize_service_dict(service_dict, V3_0, image_digest) == {
'image': 'busybox@sha256:abcde'
}
def test_denormalize_image_no_digest(self):
service_dict = {
'image': 'busybox'
}
assert denormalize_service_dict(service_dict, V3_0) == {
'image': 'busybox'
}
def test_serialize_secrets(self):
service_dict = {
'image': 'example/web',
'secrets': [
{'source': 'one'},
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
}
]
}
secrets_dict = {
'one': {'file': '/one.txt'},
'source': {'file': '/source.pem'},
'two': {'external': True},
}
config_dict = config.load(build_config_details({
'version': '3.1',
'services': {'web': service_dict},
'secrets': secrets_dict
}))
serialized_config = yaml.load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert secret_sort(serialized_service['secrets']) == secret_sort(service_dict['secrets'])
assert 'secrets' in serialized_config
assert serialized_config['secrets']['two'] == secrets_dict['two']
def test_serialize_ports(self):
config_dict = config.Config(version=V2_0, services=[
{
'ports': [types.ServicePort('80', '8080', None, None, None)],
'image': 'alpine',
'name': 'web'
}
], volumes={}, networks={}, secrets={}, configs={})
serialized_config = yaml.load(serialize_config(config_dict))
assert '8080:80/tcp' in serialized_config['services']['web']['ports']
def test_serialize_configs(self):
service_dict = {
'image': 'example/web',
'configs': [
{'source': 'one'},
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
}
]
}
configs_dict = {
'one': {'file': '/one.txt'},
'source': {'file': '/source.pem'},
'two': {'external': True},
}
config_dict = config.load(build_config_details({
'version': '3.3',
'services': {'web': service_dict},
'configs': configs_dict
}))
serialized_config = yaml.load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert secret_sort(serialized_service['configs']) == secret_sort(service_dict['configs'])
assert 'configs' in serialized_config
assert serialized_config['configs']['two'] == configs_dict['two']
def test_serialize_bool_string(self):
cfg = {
'version': '2.2',
'services': {
'web': {
'image': 'example/web',
'command': 'true',
'environment': {'FOO': 'Y', 'BAR': 'on'}
}
}
}
config_dict = config.load(build_config_details(cfg))
serialized_config = serialize_config(config_dict)
assert 'command: "true"\n' in serialized_config
assert 'FOO: "Y"\n' in serialized_config
assert 'BAR: "on"\n' in serialized_config
def test_serialize_escape_dollar_sign(self):
cfg = {
'version': '2.2',
'services': {
'web': {
'image': 'busybox',
'command': 'echo $$FOO',
'environment': {
'CURRENCY': '$$'
},
'entrypoint': ['$$SHELL', '-c'],
}
}
}
config_dict = config.load(build_config_details(cfg))
serialized_config = yaml.load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert serialized_service['environment']['CURRENCY'] == '$$'
assert serialized_service['command'] == 'echo $$FOO'
assert serialized_service['entrypoint'][0] == '$$SHELL'
| {
"content_hash": "bb7c180c80979c1e4390d1fae99a9b5d",
"timestamp": "",
"source": "github",
"line_count": 4462,
"max_line_length": 105,
"avg_line_length": 33.104437471985655,
"alnum_prop": 0.4513038886481802,
"repo_name": "shakamunyi/fig",
"id": "8f2266ed8d5cdd2fe8424e55634596bac178a24e",
"size": "147733",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/config/config_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "4307"
},
{
"name": "Python",
"bytes": "884799"
},
{
"name": "Shell",
"bytes": "34030"
}
],
"symlink_target": ""
} |
from pandac.PandaModules import *
from direct.showbase import DirectObject
from direct.fsm import ClassicFSM, State
from toontown.toonbase import ToontownGlobals
from toontown.coghq import StageRoomSpecs
import random
class StageRoom(DirectObject.DirectObject):
FloorCollPrefix = 'stageFloorColl'
CashbotStageDoorFrame = 'phase_10/models/cashbotHQ/DoorFrame'
def __init__(self, path = None):
if path is not None:
if path in StageRoomSpecs.CashbotStageConnectorRooms:
loadFunc = loader.loadModelCopy
else:
loadFunc = loader.loadModel
self.setGeom(loadFunc(path))
self.localToonFSM = ClassicFSM.ClassicFSM('StageRoomLocalToonPresent', [State.State('off', self.enterLtOff, self.exitLtOff, ['notPresent']), State.State('notPresent', self.enterLtNotPresent, self.exitLtNotPresent, ['present']), State.State('present', self.enterLtPresent, self.exitLtPresent, ['notPresent'])], 'notPresent', 'notPresent')
self.localToonFSM.enterInitialState()
return
def delete(self):
del self.localToonFSM
def enter(self):
self.localToonFSM.request('notPresent')
def exit(self):
self.localToonFSM.requestFinalState()
def setRoomNum(self, num):
self.roomNum = num
def getRoomNum(self):
return self.roomNum
def setGeom(self, geom):
self.__geom = geom
ug = self.__geom.find('**/underground')
if not ug.isEmpty():
ug.setBin('ground', -10)
def getGeom(self):
return self.__geom
def _getEntrances(self):
return self.__geom.findAllMatches('**/ENTRANCE*')
def _getExits(self):
return self.__geom.findAllMatches('**/EXIT*')
def attachTo(self, other, rng):
otherExits = other._getExits()
entrances = self._getEntrances()
otherDoor = otherExits[0]
thisDoor = rng.choice(entrances)
geom = self.getGeom()
otherGeom = other.getGeom()
tempNode = otherDoor.attachNewNode('tempRotNode')
geom.reparentTo(tempNode)
geom.clearMat()
geom.setPos(Vec3(0) - thisDoor.getPos(geom))
tempNode.setH(-thisDoor.getH(otherDoor))
geom.wrtReparentTo(otherGeom.getParent())
tempNode.removeNode()
def getFloorCollName(self):
return '%s%s' % (StageRoom.FloorCollPrefix, self.roomNum)
def initFloorCollisions(self):
allColls = self.getGeom().findAllMatches('**/+CollisionNode')
floorColls = []
for coll in allColls:
bitmask = coll.node().getIntoCollideMask()
if not (bitmask & ToontownGlobals.FloorBitmask).isZero():
floorColls.append(coll)
if len(floorColls) > 0:
floorCollName = self.getFloorCollName()
others = self.getGeom().findAllMatches('**/%s' % floorCollName)
for other in others:
other.setName('%s_renamed' % floorCollName)
for floorColl in floorColls:
floorColl.setName(floorCollName)
def enterLtOff(self):
pass
def exitLtOff(self):
pass
def enterLtNotPresent(self):
pass
def exitLtNotPresent(self):
pass
def enterLtPresent(self):
pass
def exitLtPresent(self):
pass
| {
"content_hash": "626945c35c5197ed8172e80a8610f27c",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 345,
"avg_line_length": 32.08653846153846,
"alnum_prop": 0.6356008390770153,
"repo_name": "linktlh/Toontown-journey",
"id": "04da9f44838ba58c5d1b984f9785131f9dec1d2e",
"size": "3337",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "toontown/coghq/StageRoom.py",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
from typepy import RealNumber
from pingparsing import PingTransmitter
from pingparsing._parser import IcmpReplyKey
from .common import ping_parser # noqa: W0611
@pytest.fixture
def transmitter():
return PingTransmitter()
class Test_PingTransmitter_ping:
@pytest.mark.xfail(run=False)
@pytest.mark.parametrize(["host"], [["localhost"], ["127.0.0.1"], ["::1"]])
def test_normal_dest(self, transmitter, host):
transmitter.destination = host
result = transmitter.ping()
assert result.returncode == 0
assert len(result.stdout) > 0
@pytest.mark.xfail(run=False)
@pytest.mark.parametrize(["host", "deadline"], [["localhost", 1]])
def test_normal_deadline(self, transmitter, host, deadline):
transmitter.destination = host
transmitter.deadline = deadline
result = transmitter.ping()
assert result.returncode == 0
assert len(result.stdout) > 0
@pytest.mark.xfail(run=False)
@pytest.mark.parametrize(["host", "timeout"], [["localhost", 1]])
def test_normal_timeout(self, transmitter, host, timeout):
transmitter.destination = host
transmitter.timeout = timeout
result = transmitter.ping()
assert result.returncode == 0
assert len(result.stdout) > 0
@pytest.mark.xfail(run=False)
@pytest.mark.parametrize(
["host", "count", "deadline"], [["localhost", 1, None], ["localhost", 1, 1000]]
)
def test_normal_count(self, transmitter, host, count, deadline):
transmitter.destination = host
transmitter.deadline = deadline
transmitter.count = count
result = transmitter.ping()
assert result.returncode == 0
assert len(result.stdout) > 0
@pytest.mark.xfail(run=False)
@pytest.mark.parametrize(["host", "count"], [["localhost", 3]])
def test_normal_send_parse(self, transmitter, ping_parser, host, count):
transmitter.destination = host
transmitter.count = count
result = transmitter.ping()
stats = ping_parser.parse(result.stdout)
assert stats.packet_transmit >= count
assert RealNumber(stats.packet_receive).is_type()
assert RealNumber(stats.packet_loss_rate).is_type()
assert RealNumber(stats.packet_loss_count).is_type()
assert RealNumber(stats.packet_duplicate_rate).is_type()
assert RealNumber(stats.packet_duplicate_count).is_type()
assert RealNumber(stats.rtt_min).is_type()
assert RealNumber(stats.rtt_avg).is_type()
assert RealNumber(stats.rtt_max).is_type()
assert RealNumber(stats.rtt_mdev).is_type()
assert IcmpReplyKey.TIMESTAMP not in stats.icmp_replies[0]
@pytest.mark.xfail(run=False)
@pytest.mark.parametrize(["host", "count"], [["localhost", 3]])
def test_normal_send_parse_timestamp(self, transmitter, ping_parser, host, count):
transmitter.destination = host
transmitter.count = count
transmitter.timestamp = True
result = transmitter.ping()
stats = ping_parser.parse(result.stdout)
assert stats.packet_transmit >= count
assert RealNumber(stats.packet_receive).is_type()
assert RealNumber(stats.packet_loss_rate).is_type()
assert RealNumber(stats.packet_loss_count).is_type()
assert RealNumber(stats.packet_duplicate_rate).is_type()
assert RealNumber(stats.packet_duplicate_count).is_type()
assert RealNumber(stats.rtt_min).is_type()
assert RealNumber(stats.rtt_avg).is_type()
assert RealNumber(stats.rtt_max).is_type()
assert RealNumber(stats.rtt_mdev).is_type()
assert IcmpReplyKey.TIMESTAMP in stats.icmp_replies[0]
@pytest.mark.parametrize(
["dest", "expected"], [["", ValueError], [None, ValueError], [1, ValueError]]
)
def test_except_destination(self, transmitter, dest, expected):
with pytest.raises(expected):
transmitter.destination = dest
@pytest.mark.parametrize(
["host", "deadline", "expected"],
[
["localhost", 0, ValueError],
["localhost", -1, ValueError],
["localhost", "a", ValueError],
],
)
def test_except_deadline(self, transmitter, host, deadline, expected):
transmitter.destination = host
with pytest.raises(expected):
transmitter.deadline = deadline
transmitter.ping()
@pytest.mark.parametrize(
["host", "timeout", "expected"],
[
["localhost", 0, ValueError],
["localhost", -1, ValueError],
["localhost", "a", ValueError],
],
)
def test_except_timeout(self, transmitter, host, timeout, expected):
transmitter.destination = host
with pytest.raises(expected):
transmitter.timeout = timeout
transmitter.ping()
@pytest.mark.parametrize(
["host", "count", "expected"],
[
["localhost", 0, ValueError],
["localhost", -1, ValueError],
["localhost", "a", ValueError],
],
)
def test_except_count(self, transmitter, host, count, expected):
transmitter.destination = host
transmitter.count = count
with pytest.raises(expected):
transmitter.ping()
| {
"content_hash": "e5d9abf5f5ceaaf94e762063a7f99806",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 87,
"avg_line_length": 35.33116883116883,
"alnum_prop": 0.629663664767506,
"repo_name": "thombashi/pingparsing",
"id": "4388c8e09829c324f4f0de4d41b0e77be8834af5",
"size": "5441",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_pingtransmitter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "509"
},
{
"name": "Python",
"bytes": "100698"
},
{
"name": "Shell",
"bytes": "265"
}
],
"symlink_target": ""
} |
from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.utils import InterfaceError
class DatabaseFeatures(BaseDatabaseFeatures):
allows_group_by_selected_pks = True
can_return_id_from_insert = True
has_real_datatype = True
has_native_uuid_field = True
has_native_duration_field = True
driver_supports_timedelta_args = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_bulk_insert = True
uses_savepoints = True
can_release_savepoints = True
supports_tablespaces = True
supports_transactions = True
can_introspect_autofield = True
can_introspect_ip_address_field = True
can_introspect_small_integer_field = True
can_distinct_on_fields = True
can_rollback_ddl = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
has_case_insensitive_like = False
requires_sqlparse_for_splitting = False
| {
"content_hash": "1d606a473783bae9a523e19fe197b3c6",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 65,
"avg_line_length": 35.275862068965516,
"alnum_prop": 0.729227761485826,
"repo_name": "gannetson/django",
"id": "3c5aebfef861d77ca1ef62ad42aeb026cdd080e9",
"size": "1023",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "django/db/backends/postgresql_psycopg2/features.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "43177"
},
{
"name": "HTML",
"bytes": "171768"
},
{
"name": "JavaScript",
"bytes": "105066"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "10940438"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
import unittest
import os
from pydoop.mapreduce.string_utils import quote_string, unquote_string
from pydoop.mapreduce.string_utils import create_digest
from pydoop.utils.serialize import deserialize
from pydoop.mapreduce.binary_streams import BinaryDownStreamFilter
_CURRENT_DIR = os.path.dirname(__file__)
JOB_TOKEN = os.path.join(_CURRENT_DIR, 'data/jobToken')
MAP_JAVA_DOWNLINK_DATA = os.path.join(
_CURRENT_DIR, 'data/mapper_downlink.data'
)
class TestUtils(unittest.TestCase):
def test_quote(self):
for x in ['dfskjfdjsalk', 'sdkfj\ta\t\n', 'dfssd\t\n', '\adsfsdfa\t\n',
'dsjfkjewrwerwerwe8239489238492\n \t dfasd \\',
'jdsfkj\\hsdjhfjh\\\t\n']:
self.assertEqual(x, unquote_string(quote_string(x)))
def test_digest(self):
with open(JOB_TOKEN) as f:
f.read(4) # magic
deserialize(int, f) # prot
deserialize(int, f) # n
deserialize(str, f) # label
deserialize(str, f) # job
passwd = deserialize(str, f)
with open(MAP_JAVA_DOWNLINK_DATA) as istream:
cmd_stream = BinaryDownStreamFilter(istream)
cmd, args = cmd_stream.next()
self.assertEqual(cmd, 'authenticationReq')
xdigest = '5bMR7RdwmkLvK582eYWEK8X6jDA='
xchallenge = '1593317824749889452062285518813742155'
digest, challenge = args
self.assertEqual(digest, xdigest)
self.assertEqual(challenge, xchallenge)
self.assertEqual(digest, create_digest(passwd, challenge))
def suite():
suite_ = unittest.TestSuite()
suite_.addTest(TestUtils('test_quote'))
suite_.addTest(TestUtils('test_digest'))
return suite_
if __name__ == '__main__':
_RUNNER = unittest.TextTestRunner(verbosity=2)
_RUNNER.run((suite()))
| {
"content_hash": "eb94463a66febd505c1a37c8380a9c1d",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 79,
"avg_line_length": 34.58490566037736,
"alnum_prop": 0.6475722858701582,
"repo_name": "ilveroluca/pydoop",
"id": "b684909d0084fecbb7c56bb29cf5bc1fead32528",
"size": "2444",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "test/mapreduce/test_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6685"
},
{
"name": "C",
"bytes": "505872"
},
{
"name": "C++",
"bytes": "105384"
},
{
"name": "Java",
"bytes": "355906"
},
{
"name": "Makefile",
"bytes": "2351"
},
{
"name": "Python",
"bytes": "534144"
},
{
"name": "Shell",
"bytes": "45693"
},
{
"name": "XSLT",
"bytes": "1335"
}
],
"symlink_target": ""
} |
import json
def open_json_data_file(path):
print('Читаем данные из файла: {}'.format(path))
try:
with open(path, 'r', encoding='utf8') as f:
try:
data = json.loads(f.read())
return data
except:
print('Данных в файле не обнаружено.')
return
except:
print('Файл данных не обнаружен.'.format(path))
def get_shop_list_by_dishes(dishes, people_count):
shop_list = {}
for dish in dishes:
for ingridient in dish['ingridients']:
new_shop_item = dict(ingridient)
# пересчитали ингрединты по количеству людей
new_shop_item['quantity'] = new_shop_item['quantity'] * people_count
if new_shop_item['product'] not in shop_list:
shop_list[new_shop_item['product']] = new_shop_item
else:
shop_list[new_shop_item['product']]['quantity'] += new_shop_item['quantity']
return shop_list
def print_shop_list(shop_list):
for key, shop_list_item in shop_list.items():
print("{product} {quantity} {unit}".format(**shop_list_item))
def create_shop_list(data, people_count, first_dish, second_dish, third_dish):
# получить блюда из кулинарной книги
dish1 = data[first_dish]
dish2 = data[second_dish]
dish3 = data[third_dish]
dishes = [dish1, dish2, dish3]
# заполнили список покупок
shop_list = get_shop_list_by_dishes(dishes, people_count)
# Вывести список покупок
print_shop_list(shop_list)
path = 'lesson2-3.json'
data = open_json_data_file(path)
print('Выберите первое блюдо: ')
first_dish = input()
print('Выберите второе блюдо: ')
second_dish = input()
print('Выберите третье блюдо: ')
third_dish = input()
print('На сколько человек?')
people_count = int(input())
print('\nСписок покупок: ')
create_shop_list(data, people_count, first_dish, second_dish, third_dish)
| {
"content_hash": "4af07011be30c73981819bb34adf7c4c",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 92,
"avg_line_length": 31.672131147540984,
"alnum_prop": 0.6211180124223602,
"repo_name": "martysyuk/PY-3-Learning",
"id": "10fdc42dc16f5c8872e43dcdf45481f59ef1715f",
"size": "2391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homeworks/lesson2-3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1532"
},
{
"name": "Jupyter Notebook",
"bytes": "146817"
},
{
"name": "Python",
"bytes": "49223"
}
],
"symlink_target": ""
} |
from libs import parser, printerFeatures
def getParseDict():
parseDict = {}
parseDict['sizesCmb'] = '<optgroup label="Outline Sizes">'
parseDict['sizesCmb'] += parser.getCmbFromList(printerFeatures.sizesOutline)
parseDict['sizesCmb'] += '</optgroup>'
parseDict['sizesCmb'] += '<optgroup label="Bitmap Sizes">'
parseDict['sizesCmb'] += parser.getCmbFromList(printerFeatures.sizesBitmap)
parseDict['sizesCmb'] += '</optgroup>'
parseDict['fontsCmb'] = '<optgroup label="Outline Fonts">'
parseDict['fontsCmb'] += parser.getCmbFromList(printerFeatures.fontsOutline)
parseDict['fontsCmb'] += '</optgroup>'
parseDict['fontsCmb'] += '<optgroup label="Bitmap Fonts">'
parseDict['fontsCmb'] += parser.getCmbFromList(printerFeatures.fontsBitMap)
parseDict['fontsCmb'] += '</optgroup>'
parseDict['alignsCmb'] = parser.getCmbFromList(printerFeatures.aligns)
parseDict['charStylesCmb'] = parser.getCmbFromList(printerFeatures.charStyles)
parseDict['cutsCmb'] = parser.getCmbFromList(printerFeatures.cuts)
return parseDict | {
"content_hash": "162e8964d9f08cf6e681220436de79a7",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 82,
"avg_line_length": 37.41379310344828,
"alnum_prop": 0.7142857142857143,
"repo_name": "chaosdorf/labello",
"id": "54c54f278771e20d99a29e262479374ff576e8fd",
"size": "1085",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "templates/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "221"
},
{
"name": "Dockerfile",
"bytes": "140"
},
{
"name": "HTML",
"bytes": "12525"
},
{
"name": "JavaScript",
"bytes": "4970"
},
{
"name": "Python",
"bytes": "54497"
}
],
"symlink_target": ""
} |
"""Support for deCONZ siren."""
from pydeconz.light import Siren
from homeassistant.components.siren import (
ATTR_DURATION,
DOMAIN,
SUPPORT_DURATION,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SirenEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .deconz_device import DeconzDevice
from .gateway import get_gateway_from_config_entry
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up sirens for deCONZ component."""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
@callback
def async_add_siren(lights=gateway.api.lights.values()):
"""Add siren from deCONZ."""
entities = []
for light in lights:
if (
isinstance(light, Siren)
and light.unique_id not in gateway.entities[DOMAIN]
):
entities.append(DeconzSiren(light, gateway))
if entities:
async_add_entities(entities)
config_entry.async_on_unload(
async_dispatcher_connect(
hass,
gateway.signal_new_light,
async_add_siren,
)
)
async_add_siren()
class DeconzSiren(DeconzDevice, SirenEntity):
"""Representation of a deCONZ siren."""
TYPE = DOMAIN
def __init__(self, device, gateway) -> None:
"""Set up siren."""
super().__init__(device, gateway)
self._attr_supported_features = (
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_DURATION
)
@property
def is_on(self):
"""Return true if siren is on."""
return self._device.is_on
async def async_turn_on(self, **kwargs):
"""Turn on siren."""
data = {}
if (duration := kwargs.get(ATTR_DURATION)) is not None:
data["duration"] = duration * 10
await self._device.turn_on(**data)
async def async_turn_off(self, **kwargs):
"""Turn off siren."""
await self._device.turn_off()
| {
"content_hash": "03526ddad39bb12d2120510b2b58a444",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 69,
"avg_line_length": 26.49367088607595,
"alnum_prop": 0.6067845198279981,
"repo_name": "lukas-hetzenecker/home-assistant",
"id": "c3679b6ad892d5a666d06806a4c944fb1d74fcaf",
"size": "2093",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/deconz/siren.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2443"
},
{
"name": "Python",
"bytes": "38023745"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
import codecs
import os
import re
import sys
import setuptools
import setuptools.command.test
try:
from platform import python_implementation as _pyimp
except (AttributeError, ImportError):
def _pyimp():
return 'Python (unknown)'
NAME = 'celery'
# -*- Python Versions -*-
E_UNSUPPORTED_PYTHON = """
----------------------------------------
Celery 4.0 requires %s %s or later
----------------------------------------
- For CPython 2.6, PyPy 1.x, Jython 2.6, CPython 3.2->3.3; use Celery 3.1:
$ pip install 'celery<4'
- For CPython 2.5, Jython 2.5; use Celery 3.0:
$ pip install 'celery<3.1'
- For CPython 2.4; use Celery 2.2:
$ pip install 'celery<2.3'
"""
PYIMP = _pyimp()
PY26_OR_LESS = sys.version_info < (2, 7)
PY3 = sys.version_info[0] == 3
PY33_OR_LESS = PY3 and sys.version_info < (3, 4)
PYPY_VERSION = getattr(sys, 'pypy_version_info', None)
PYPY = PYPY_VERSION is not None
PYPY24_ATLEAST = PYPY_VERSION and PYPY_VERSION >= (2, 4)
if PY26_OR_LESS:
raise Exception(E_UNSUPPORTED_PYTHON % (PYIMP, '2.7'))
elif PY33_OR_LESS and not PYPY24_ATLEAST:
raise Exception(E_UNSUPPORTED_PYTHON % (PYIMP, '3.4'))
# -*- Extras -*-
EXTENSIONS = {
'auth',
'cassandra',
'django',
'elasticsearch',
'memcache',
'pymemcache',
'couchbase',
'eventlet',
'gevent',
'msgpack',
'yaml',
'redis',
'sqs',
'couchdb',
'riak',
'zookeeper',
'solar',
'sqlalchemy',
'librabbitmq',
'pyro',
'slmq',
'tblib',
'consul',
'dynamodb',
'mongodb',
}
# -*- Classifiers -*-
classes = """
Development Status :: 5 - Production/Stable
License :: OSI Approved :: BSD License
Topic :: System :: Distributed Computing
Topic :: Software Development :: Object Brokering
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Operating System :: OS Independent
"""
# -*- Distribution Meta -*-
re_meta = re.compile(r'__(\w+?)__\s*=\s*(.*)')
re_doc = re.compile(r'^"""(.+?)"""')
def _add_default(m):
attr_name, attr_value = m.groups()
return ((attr_name, attr_value.strip("\"'")),)
def _add_doc(m):
return (('doc', m.groups()[0]),)
def parse_dist_meta():
"""Extract metadata information from ``$dist/__init__.py``."""
pats = {re_meta: _add_default, re_doc: _add_doc}
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, NAME, '__init__.py')) as meta_fh:
distmeta = {}
for line in meta_fh:
if line.strip() == '# -eof meta-':
break
for pattern, handler in pats.items():
m = pattern.match(line.strip())
if m:
distmeta.update(handler(m))
return distmeta
# -*- Requirements -*-
def _strip_comments(l):
return l.split('#', 1)[0].strip()
def _pip_requirement(req):
if req.startswith('-r '):
_, path = req.split()
return reqs(*path.split('/'))
return [req]
def _reqs(*f):
return [
_pip_requirement(r) for r in (
_strip_comments(l) for l in open(
os.path.join(os.getcwd(), 'requirements', *f)).readlines()
) if r]
def reqs(*f):
"""Parse requirement file.
Example:
reqs('default.txt') # requirements/default.txt
reqs('extras', 'redis.txt') # requirements/extras/redis.txt
Returns:
List[str]: list of requirements specified in the file.
"""
return [req for subreq in _reqs(*f) for req in subreq]
def extras(*p):
"""Parse requirement in the requirements/extras/ directory."""
return reqs('extras', *p)
def install_requires():
"""Get list of requirements required for installation."""
return reqs('default.txt')
def extras_require():
"""Get map of all extra requirements."""
return {x: extras(x + '.txt') for x in EXTENSIONS}
# -*- Long Description -*-
def long_description():
try:
return codecs.open('README.rst', 'r', 'utf-8').read()
except IOError:
return 'Long description error: Missing README.rst file'
# -*- Command: setup.py test -*-
class pytest(setuptools.command.test.test):
user_options = [('pytest-args=', 'a', 'Arguments to pass to py.test')]
def initialize_options(self):
setuptools.command.test.test.initialize_options(self)
self.pytest_args = []
def run_tests(self):
import pytest as _pytest
sys.exit(_pytest.main(self.pytest_args))
# -*- %%% -*-
meta = parse_dist_meta()
setuptools.setup(
name=NAME,
packages=setuptools.find_packages(exclude=['t', 't.*']),
version=meta['version'],
description=meta['doc'],
long_description=long_description(),
keywords=meta['keywords'],
author=meta['author'],
author_email=meta['contact'],
url=meta['homepage'],
license='BSD',
platforms=['any'],
install_requires=install_requires(),
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
tests_require=reqs('test.txt'),
extras_require=extras_require(),
classifiers=[s.strip() for s in classes.split('\n') if s],
cmdclass={'test': pytest},
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'celery = celery.__main__:main',
],
'pytest11': [
'celery = celery.contrib.pytest',
],
},
)
| {
"content_hash": "42b8b00d35f099ad693bfb930781ed71",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 74,
"avg_line_length": 25.303964757709252,
"alnum_prop": 0.5840877437325905,
"repo_name": "cloudera/hue",
"id": "e1da0647efd37b28ba1c77cfb2618a834fd606e8",
"size": "5790",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/celery-4.2.1/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
} |
from contracts import ContractNotRespected
from synthetic import DuplicateMemberNameError, InvalidPropertyOverrideError, \
NamingConventionCamelCase, NamingConventionUnderscore, \
synthesizeProperty, synthesize_member, \
synthesizeProperty, synthesize_property, \
namingConvention, naming_convention
import contracts
import unittest
@synthesizeProperty('minimalistProperty')
@synthesize_property('propertyWithDefaultValue', default = "default")
@synthesizeProperty('customProperty',
privateMemberName = '_internalPrivateSecretMemberThatShouldNeverBeUsedOutsideThisClass')
class TestBasic(object):
pass
# By the way, we try the 'naming_convention' decorator.
# This will test that when naming convention decorator will try to recreate accessors,
# it will not try to remove the setter as the member is 'read only'.
@naming_convention(NamingConventionCamelCase())
@synthesizeProperty('readOnlyProperty', readOnly = True)
class TestReadOnly(object):
pass
@synthesizeProperty('propertyString', contract = str)
@synthesizeProperty('propertyStringList', contract = 'list(str)')
class TestContract(object):
pass
@synthesizeProperty('propertyWithOverriddenGetterSetter')
@synthesizeProperty('propertyWithOverriddenGetter')
class TestOverriddenProperties(object):
@property
def propertyWithOverriddenGetterSetter(self):
return 'property_with_custom_getter_setter_value'
@propertyWithOverriddenGetterSetter.setter
def propertyWithOverriddenGetterSetter(self, value):
self._propertyWithOverriddenGetterSetter = 'property_with_custom_getter_setter_value'
@property
def propertyWithOverriddenGetter(self):
return 'property_with_custom_getter_value'
class TestInvalidPropertyOverride(object):
def member(self):
pass
class TestClass(object):
pass
class TestSynthesizeProperty(unittest.TestCase):
def setUp(self):
contracts.enable_all()
def testOK(self):
instance = TestBasic()
# Default default ;) member value is None.
self.assertEqual(None, instance.minimalistProperty)
# Default set and get test.
instance.minimalistProperty = 10
self.assertEqual(10, instance.minimalistProperty)
# Checking custom default value.
self.assertEqual("default", instance.propertyWithDefaultValue)
# Custom private member name.
instance.customProperty = "newValue"
self.assertFalse(hasattr(instance, '_customProperty'))
self.assertEqual("newValue", instance._internalPrivateSecretMemberThatShouldNeverBeUsedOutsideThisClass)
self.assertEqual("newValue", instance.customProperty)
def testReadOnly(self):
instance = TestReadOnly()
self.assertTrue(hasattr(instance, 'readOnlyProperty'))
with self.assertRaises(AttributeError):
instance.readOnlyProperty = 10
def testOverridenProperties(self):
"""If accessors are overridden, they should not be synthesized.
We also check that there's no bug if the naming convention is changed.
"""
instance = TestOverriddenProperties()
self.assertEqual(None, instance._propertyWithOverriddenGetterSetter)
self.assertEqual(None, instance._propertyWithOverriddenGetter)
# Testing custom setters.
instance.propertyWithOverriddenGetterSetter = "placeholder"
instance.propertyWithOverriddenGetter = "value"
self.assertEqual('property_with_custom_getter_setter_value', instance._propertyWithOverriddenGetterSetter)
self.assertEqual('value', instance._propertyWithOverriddenGetter)
# Testing custom getters.
instance = TestOverriddenProperties()
self.assertEqual(None, instance._propertyWithOverriddenGetterSetter)
self.assertEqual(None, instance._propertyWithOverriddenGetter)
self.assertEqual('property_with_custom_getter_setter_value', instance.propertyWithOverriddenGetterSetter)
self.assertEqual('property_with_custom_getter_value', instance.propertyWithOverriddenGetter)
def testContract(self):
instance = TestContract()
# OK.
instance.propertyString = "I love CamelCase!!!"
instance.propertyStringList = ["a", "b"]
# Not OK.
with self.assertRaises(ContractNotRespected):
instance.propertyString = 10
with self.assertRaises(ContractNotRespected):
instance.propertyStringList = ["a", 2]
# Checking exception message.
with self.assertRaisesRegexp(ContractNotRespected, r"""Expected type 'str', got <type 'int'>"""):
instance.propertyString = 10
def testContractDisabled(self):
instance = TestContract()
contracts.disable_all()
# No exception is raised
instance.propertyString = 10
instance.propertyStringList = ["a", 2]
def testInvalidPropertyOverride(self):
# Equivalent to:
# @synthesizeProperty('member')
# class TestInvalidOverride(object):
#
# def member(self):
# return 10
self.assertRaises(InvalidPropertyOverrideError, synthesizeProperty('member'), TestInvalidPropertyOverride)
def testDuplicateMemberName(self):
# Equivalent to:
# @syntheticMember('member')
# @syntheticMember('member')
# class TestClass:
# pass
ClassWithSynthesizedProperty = synthesizeProperty('property')(TestClass)
self.assertRaises(DuplicateMemberNameError, synthesizeProperty('property'), ClassWithSynthesizedProperty)
| {
"content_hash": "36d17fbee212c1144b6d5bc5f668e278",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 114,
"avg_line_length": 37.174193548387095,
"alnum_prop": 0.7025338424158278,
"repo_name": "yjaaidi/pysynthetic",
"id": "095fd671644e171d3cfa226a0cb3d3ff9798e5be",
"size": "5852",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_synthetic_property.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "52869"
},
{
"name": "Python",
"bytes": "83474"
},
{
"name": "Shell",
"bytes": "5263"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
from django.urls import include, re_path
from django.views.i18n import JavaScriptCatalog
from django.views.static import serve
from cms.test_utils.project.placeholderapp.views import detail_view
from cms.utils.conf import get_cms_setting
admin.autodiscover()
urlpatterns = [
re_path(r'^admin/', admin.site.urls),
re_path(r'^media/(?P<path>.*)$', serve,
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
re_path(r'^media/cms/(?P<path>.*)$', serve,
{'document_root': get_cms_setting('MEDIA_ROOT'), 'show_indexes': True}),
re_path(r'^jsi18n/(?P<packages>\S+?)/$', JavaScriptCatalog.as_view()),
]
urlpatterns += i18n_patterns(
re_path(r'^detail/(?P<id>[0-9]+)/$', detail_view, name="detail"),
re_path(r'^detail/(?P<pk>[0-9]+)/$', detail_view, name="example_detail"),
re_path(r'^', include('cms.urls')),
)
| {
"content_hash": "3e5588d43da7f8ce8be0b878be1c827c",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 84,
"avg_line_length": 37.84615384615385,
"alnum_prop": 0.6727642276422764,
"repo_name": "datakortet/django-cms",
"id": "331bc4b0017cea9c1e1c2183f99a57172f7b849d",
"size": "984",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "cms/test_utils/project/placeholderapp_urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "203975"
},
{
"name": "JavaScript",
"bytes": "1249081"
},
{
"name": "Python",
"bytes": "2374270"
},
{
"name": "SCSS",
"bytes": "137720"
},
{
"name": "Shell",
"bytes": "22511"
}
],
"symlink_target": ""
} |
from ghostlines.storage.app_storage import AppStorage
from ghostlines.windows.sign_in_window import SignInWindow
from ghostlines.windows.deferred_window import DeferredWindow
class Authentication(object):
@staticmethod
def require(cls):
class AuthenticatedWindow(object):
def __init__(self, *args, **kwargs):
self.window = DeferredWindow(cls, *args, **kwargs)
def open(self):
token = AppStorage("accessToken").retrieve()
# TODO: Retrieve returns NSNull if set to None. Empty string is
# used to clear password for now, so check for None or ''
if token != '' and token is not None:
self.window().open()
else:
SignInWindow(success_window=self.window).open()
return AuthenticatedWindow
| {
"content_hash": "33815c777ab4b723801c8f90f8f5098c",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 79,
"avg_line_length": 34.96,
"alnum_prop": 0.6086956521739131,
"repo_name": "ghostlines/ghostlines-robofont",
"id": "62c343b8ea5298d838e9234aeee4038034dd68f0",
"size": "874",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/lib/ghostlines/authentication.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1324"
},
{
"name": "Python",
"bytes": "2565712"
},
{
"name": "Ruby",
"bytes": "2824"
},
{
"name": "Shell",
"bytes": "183"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import django.contrib.auth.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('auth', '0008_alter_user_username_max_length'),
('accounts', '0002_add_extra_user_info'),
]
operations = [
migrations.CreateModel(
name='RealEstateGroup',
fields=[
],
options={
'indexes': [],
'proxy': True,
'verbose_name_plural': 'группы',
'verbose_name': 'группа',
},
bases=('auth.group',),
managers=[
('objects', django.contrib.auth.models.GroupManager()),
],
),
]
| {
"content_hash": "2f1225950e491b79fe64426ea601c3e2",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 71,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.49736842105263157,
"repo_name": "Dybov/real_estate_agency",
"id": "c40698e6f7c270849fe50c608e3489161a8f21de",
"size": "843",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "real_estate_agency/accounts/migrations/0003_realestategroup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "102329"
},
{
"name": "HTML",
"bytes": "104357"
},
{
"name": "JavaScript",
"bytes": "86459"
},
{
"name": "Python",
"bytes": "259967"
}
],
"symlink_target": ""
} |
"""
--- Day 25: The Halting Problem ---
Following the twisty passageways deeper and deeper into the CPU, you finally reach the core of the computer. Here, in the expansive central chamber, you find a grand apparatus that fills the entire room, suspended nanometers above your head.
You had always imagined CPUs to be noisy, chaotic places, bustling with activity. Instead, the room is quiet, motionless, and dark.
Suddenly, you and the CPU's garbage collector startle each other. "It's not often we get many visitors here!", he says. You inquire about the stopped machinery.
"It stopped milliseconds ago; not sure why. I'm a garbage collector, not a doctor." You ask what the machine is for.
"Programs these days, don't know their origins. That's the Turing machine! It's what makes the whole computer work." You try to explain that Turing machines are merely models of computation, but he cuts you off. "No, see, that's just what they want you to think. Ultimately, inside every CPU, there's a Turing machine driving the whole thing! Too bad this one's broken. We're doomed!"
You ask how you can help. "Well, unfortunately, the only way to get the computer running again would be to create a whole new Turing machine from scratch, but there's no way you can-" He notices the look on your face, gives you a curious glance, shrugs, and goes back to sweeping the floor.
You find the Turing machine blueprints (your puzzle input) on a tablet in a nearby pile of debris. Looking back up at the broken Turing machine above, you can start to identify its parts:
A tape which contains 0 repeated infinitely to the left and right.
A cursor, which can move left or right along the tape and read or write values at its current position.
A set of states, each containing rules about what to do based on the current value under the cursor.
Each slot on the tape has two possible values: 0 (the starting value for all slots) and 1. Based on whether the cursor is pointing at a 0 or a 1, the current state says what value to write at the current position of the cursor, whether to move the cursor left or right one slot, and which state to use next.
For example, suppose you found the following blueprint:
Begin in state A.
Perform a diagnostic checksum after 6 steps.
In state A:
If the current value is 0:
- Write the value 1.
- Move one slot to the right.
- Continue with state B.
If the current value is 1:
- Write the value 0.
- Move one slot to the left.
- Continue with state B.
In state B:
If the current value is 0:
- Write the value 1.
- Move one slot to the left.
- Continue with state A.
If the current value is 1:
- Write the value 1.
- Move one slot to the right.
- Continue with state A.
Running it until the number of steps required to take the listed diagnostic checksum would result in the following tape configurations (with the cursor marked in square brackets):
... 0 0 0 [0] 0 0 ... (before any steps; about to run state A)
... 0 0 0 1 [0] 0 ... (after 1 step; about to run state B)
... 0 0 0 [1] 1 0 ... (after 2 steps; about to run state A)
... 0 0 [0] 0 1 0 ... (after 3 steps; about to run state B)
... 0 [0] 1 0 1 0 ... (after 4 steps; about to run state A)
... 0 1 [1] 0 1 0 ... (after 5 steps; about to run state B)
... 0 1 1 [0] 1 0 ... (after 6 steps; about to run state A)
The CPU can confirm that the Turing machine is working by taking a diagnostic checksum after a specific number of steps (given in the blueprint). Once the specified number of steps have been executed, the Turing machine should pause; once it does, count the number of times 1 appears on the tape. In the above example, the diagnostic checksum is 3.
Recreate the Turing machine and save the computer! What is the diagnostic checksum it produces once it's working again?
--- Part Two ---
The Turing machine, and soon the entire computer, springs back to life. A console glows dimly nearby, awaiting your command.
> reboot printer
Error: That command requires priority 50. You currently have priority 0.
You must deposit 50 stars to increase your priority to the required level.
The console flickers for a moment, and then prints another message:
Star accepted.
You must deposit 49 stars to increase your priority to the required level.
The garbage collector winks at you, then continues sweeping.
You deposit all fifty stars and reboot the printer. Suddenly, everything seems a lot less pixelated than before.
"--raise your priority level enough to send the reboot command and... hey look, it's printing! I'll bring it to Santa. Thanks!" She runs off.
Congratulations! You've finished every puzzle in Advent of Code 2017! I hope you had as much fun solving them as I had making them for you. I'd love to hear about your adventure; you can get in touch with me via contact info on my website or through Twitter.
If you'd like to see more things like this in the future, please consider supporting Advent of Code and sharing it with others.
To hear about future projects, you can follow me on Twitter.
I've highlighted the easter eggs in each puzzle, just in case you missed any. Hover your mouse over them, and the easter egg will appear.
"""
class TuringMachine():
def __init__(self, state):
self.state = state
self.pos = 0
self.ones = []
def _move(self, move, next_state=None, op=None):
if next_state is not None:
self.state = next_state
if op == 1:
self.ones.append(self.pos)
elif op == 0:
self.ones.pop(self.ones.index(self.pos))
self.pos += move
class TestTuringMachine(TuringMachine):
def move(self):
value = 1 if self.pos in self.ones else 0
if self.state == 'A':
if value == 0:
self._move(1, 'B', 1)
else:
self._move(-1, 'B', 0)
else:
if value == 0:
self._move(-1, 'A', 1)
else:
self._move(+1, 'A')
def test1():
machine = TestTuringMachine('A')
for i in range(6):
machine.move()
assert 3 == len(machine.ones)
class Part1TuringMachine(TuringMachine):
def move(self):
value = 1 if self.pos in self.ones else 0
if self.state == 'A':
if value == 0:
self._move(1, 'B', 1)
else:
self._move(-1, 'C', 0)
elif self.state == 'B':
if value == 0:
self._move(-1, 'A', 1)
else:
self._move(1, 'D')
elif self.state == 'C':
if value == 0:
self._move(-1, 'B')
else:
self._move(-1, 'E', 0)
elif self.state == 'D':
if value == 0:
self._move(1, 'A', 1)
else:
self._move(1, 'B', 0)
elif self.state == 'E':
if value == 0:
self._move(-1, 'F', 1)
else:
self._move(-1, 'C')
elif self.state == 'F':
if value == 0:
self._move(1, 'D', 1)
else:
self._move(1, 'A')
def part1():
machine = Part1TuringMachine('A')
for i in range(12667664):
machine.move()
print( len(machine.ones))
if __name__ == '__main__':
# test1()
part1()
| {
"content_hash": "04dfca44e008c727e6c127129d3ebd70",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 384,
"avg_line_length": 41.272222222222226,
"alnum_prop": 0.6467896082918293,
"repo_name": "bbglab/adventofcode",
"id": "f08f713013236bd405840a1a1593853087904114",
"size": "7429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "2017/iker/day25.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "37799"
},
{
"name": "Go",
"bytes": "3094"
},
{
"name": "Haskell",
"bytes": "10240"
},
{
"name": "Jupyter Notebook",
"bytes": "13754648"
},
{
"name": "Python",
"bytes": "194710"
},
{
"name": "R",
"bytes": "18289"
},
{
"name": "Rust",
"bytes": "2682"
},
{
"name": "Shell",
"bytes": "1190"
}
],
"symlink_target": ""
} |
DEPS = [
'depot_tools/bot_update',
'chromium',
'chromium_tests',
'depot_tools/gclient',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/step',
'depot_tools/tryserver',
]
| {
"content_hash": "e8f9fffb5d4b51c9a3be4391ed062ade",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 29,
"avg_line_length": 20.636363636363637,
"alnum_prop": 0.6607929515418502,
"repo_name": "eunchong/build",
"id": "15b2b35fb91ba103dc02a8c1602f0a0b3c78449b",
"size": "390",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/slave/recipe_modules/ios/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3128"
},
{
"name": "CSS",
"bytes": "211818"
},
{
"name": "HTML",
"bytes": "429981"
},
{
"name": "JavaScript",
"bytes": "75624"
},
{
"name": "Makefile",
"bytes": "21204"
},
{
"name": "Python",
"bytes": "6143109"
},
{
"name": "Shell",
"bytes": "23512"
}
],
"symlink_target": ""
} |
from bolt.local.construct import ConstructLocal
from bolt.spark.construct import ConstructSpark
constructors = [
('local', ConstructLocal),
('spark', ConstructSpark)
]
def wrapped(f):
"""
Decorator to append routed docstrings
"""
import inspect
def extract(func):
append = ""
args = inspect.getargspec(func)
for i, a in enumerate(args.args):
if i < (len(args) - len(args.defaults)):
append += str(a) + ", "
else:
default = args.defaults[i-len(args.defaults)]
if hasattr(default, "__name__"):
default = default.__name__
else:
default = str(default)
append += str(a) + "=" + default + ", "
append = append[:-2] + ")"
return append
doc = f.__doc__ + "\n"
doc += " local -> array(" + extract(getattr(ConstructLocal, f.__name__)) + "\n"
doc += " spark -> array(" + extract(getattr(ConstructSpark, f.__name__)) + "\n"
f.__doc__ = doc
return f
def lookup(*args, **kwargs):
"""
Use arguments to route constructor.
Applies a series of checks on arguments to identify constructor,
starting with known keyword arguments, and then applying
constructor-specific checks
"""
if 'mode' in kwargs:
mode = kwargs['mode']
if mode not in constructors:
raise ValueError('Mode %s not supported' % mode)
del kwargs['mode']
return constructors[mode]
else:
for mode, constructor in constructors:
if constructor._argcheck(*args, **kwargs):
return constructor
return ConstructLocal
@wrapped
def array(*args, **kwargs):
"""
Create a bolt array.
"""
return lookup(*args, **kwargs).dispatch('array', *args, **kwargs)
@wrapped
def ones(*args, **kwargs):
"""
Create a bolt array of ones.
"""
return lookup(*args, **kwargs).dispatch('ones', *args, **kwargs)
@wrapped
def zeros(*args, **kwargs):
"""
Create a bolt array of zeros.
"""
return lookup(*args, **kwargs).dispatch('zeros', *args, **kwargs)
@wrapped
def concatenate(*args, **kwargs):
"""
Create a bolt array of ones.
"""
return lookup(*args, **kwargs).dispatch('concatenate', *args, **kwargs) | {
"content_hash": "bd8bd9c20325961a13e121c9f4b22bdf",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 86,
"avg_line_length": 28.253012048192772,
"alnum_prop": 0.5654584221748401,
"repo_name": "jwittenbach/bolt",
"id": "e8e781bcf642ebfc59be650dcbd56a9c162efc30",
"size": "2345",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bolt/factory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "147087"
}
],
"symlink_target": ""
} |
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import pdb
import traceback
class TxnMallTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
return super(TxnMallTest, self).setup_network(True)
def run_test(self):
# All nodes should start with 1,250 BTC:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].settxfee(.001)
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 1219 - 29 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendfrom("foo", node1_address, 40, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1,1)
clone_inputs = [{"txid":rawtx1["vin"][0]["txid"],"vout":rawtx1["vin"][0]["vout"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][1]["value"]}
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs)
# 3 hex manipulations on the clone are required
# manipulation 1. sequence is at version+#inputs+input+sigstub
posseq = 2*(4+1+36+1)
seqbe = '%08x' % rawtx1["vin"][0]["sequence"]
clone_raw = clone_raw[:posseq] + seqbe[6:8] + seqbe[4:6] + seqbe[2:4] + seqbe[0:2] + clone_raw[posseq + 8:]
# manipulation 2. createrawtransaction randomizes the order of its outputs, so swap them if necessary.
# output 0 is at version+#inputs+input+sigstub+sequence+#outputs
# 40 BTC serialized is 00286bee00000000
pos0 = 2*(4+1+36+1+4+1)
hex40 = "00286bee00000000"
output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16 : pos0 + 16 + 2], 0)
if (rawtx1["vout"][0]["value"] == 40 and clone_raw[pos0 : pos0 + 16] != hex40 or
rawtx1["vout"][0]["value"] != 40 and clone_raw[pos0 : pos0 + 16] == hex40):
output0 = clone_raw[pos0 : pos0 + output_len]
output1 = clone_raw[pos0 + output_len : pos0 + 2 * output_len]
clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:]
# manipulation 3. locktime is after outputs
poslt = pos0 + 2 * output_len
ltbe = '%08x' % rawtx1["locktime"]
clone_raw = clone_raw[:poslt] + ltbe[6:8] + ltbe[4:6] + ltbe[2:4] + ltbe[0:2] + clone_raw[poslt + 8:]
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 1219 + tx1["amount"] + tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"] + tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 BTC for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*", 0), expected)
# Check node0's individual account balances.
# "foo" should have been debited by the equivalent clone of tx1
assert_equal(self.nodes[0].getbalance("foo"), 1219 + tx1["amount"] + tx1["fee"])
# "bar" should have been debited by (possibly unconfirmed) tx2
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
# "" should have starting balance, less funding txes, plus subsidies
assert_equal(self.nodes[0].getbalance("", 0), starting_balance
- 1219
+ fund_foo_tx["fee"]
- 29
+ fund_bar_tx["fee"]
+ 100)
# Node1's "from0" account balance
assert_equal(self.nodes[1].getbalance("from0", 0), -(tx1["amount"] + tx2["amount"]))
if __name__ == '__main__':
TxnMallTest().main()
def Test():
t = TxnMallTest()
t.drop_to_pdb = True
bitcoinConf = {
"debug": ["net", "blk", "thin", "mempool", "req", "bench", "evict"], # "lck"
"blockprioritysize": 2000000 # we don't want any transactions rejected due to insufficient fees...
}
t.main(["--tmpdir=/ramdisk/test","--nocleanup","--noshutdown"], bitcoinConf, None) # , "--tracerpc"])
| {
"content_hash": "8f3a454fe2a02a44665511f917e92898",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 115,
"avg_line_length": 48.74096385542169,
"alnum_prop": 0.5863304906686442,
"repo_name": "Bitcoin-com/BUcash",
"id": "e4e941fd3b1566a8442354d0d2d1a8fbf6e8d424",
"size": "8433",
"binary": false,
"copies": "1",
"ref": "refs/heads/BitcoinCash",
"path": "qa/rpc-tests/txn_clone.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "678782"
},
{
"name": "C++",
"bytes": "5254834"
},
{
"name": "HTML",
"bytes": "20970"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "190876"
},
{
"name": "Makefile",
"bytes": "110407"
},
{
"name": "Objective-C",
"bytes": "92442"
},
{
"name": "Objective-C++",
"bytes": "7360"
},
{
"name": "Python",
"bytes": "1006659"
},
{
"name": "QMake",
"bytes": "2067"
},
{
"name": "Roff",
"bytes": "3821"
},
{
"name": "Shell",
"bytes": "45308"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import gettext
import os
from datetime import datetime, timedelta
from importlib import import_module
from unittest import TestCase, skipIf
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.db.models import CharField, DateField
from django.test import TestCase as DjangoTestCase, override_settings
from django.utils import six, translation
from . import models
from .widgetadmin import site as widget_admin_site
try:
import pytz
except ImportError:
pytz = None
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
pk=101, username='testser', first_name='Add', last_name='User', email='auser@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=False,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagon', model='Passat')
models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3')
class SeleniumDataMixin(object):
def setUp(self):
self.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
class AdminFormfieldForDBFieldTests(TestCase):
"""
Tests for correct behavior of ModelAdmin.formfield_for_dbfield
"""
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
"""
Helper to call formfield_for_dbfield for a given model and field name
and verify that the returned formfield is appropriate.
"""
# Override any settings on the model admin
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
# Construct the admin, and ask it for a formfield
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
# "unwrap" the widget wrapper, if needed
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
# Check that we got a field of the right type
self.assertTrue(
isinstance(widget, widgetclass),
"Wrong widget for %s.%s: expected %s, got %s" % (
model.__class__.__name__,
fieldname,
widgetclass,
type(widget),
)
)
# Return the formfield so that other tests can continue
return ff
def test_DateField(self):
self.assertFormfield(models.Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(models.Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(models.Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(models.Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(models.Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(models.Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(models.Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(models.Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(models.Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(models.Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertEqual(ff.empty_label, None)
def test_many_to_many(self):
self.assertFormfield(models.Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(models.Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
"""
Test that widget instances in formfield_overrides are not shared between
different fields. (#19423)
"""
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(models.Band, admin.site)
f1 = ma.formfield_for_dbfield(models.Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(models.Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_field_with_choices(self):
self.assertFormfield(models.Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(models.Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(models.Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
"""m2m fields help text as it applies to admin app (#9321)."""
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(models.Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(models.Advisor, admin.site)
f = ma.formfield_for_dbfield(models.Advisor._meta.get_field('companies'), request=None)
self.assertEqual(six.text_type(f.help_text), 'Hold down "Control", or "Command" on a Mac, to select more than one.')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, DjangoTestCase):
def test_filter_choices_by_request_user(self):
"""
Ensure the user can only see their own cars in the foreign key dropdown.
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagon Passat")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, DjangoTestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, DjangoTestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_nonexistent_target_id(self):
band = models.Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": '%s' % pk,
}
# Try posting with a non-existent pk in a raw id field: this
# should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'),
{"main_band": test_str})
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
class FilteredSelectMultipleWidgetTest(DjangoTestCase):
def test_render(self):
w = widgets.FilteredSelectMultiple('test', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilter">\n</select><script type="text/javascript">addEvent(window, "load", function(e) {SelectFilter.init("id_test", "test", 0); });</script>\n'
)
def test_stacked_render(self):
w = widgets.FilteredSelectMultiple('test', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilterstacked">\n</select><script type="text/javascript">addEvent(window, "load", function(e) {SelectFilter.init("id_test", "test", 1); });</script>\n'
)
class AdminDateWidgetTest(DjangoTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10" />',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20" />',
)
class AdminTimeWidgetTest(DjangoTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8" />',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20" />',
)
class AdminSplitDateTimeWidgetTest(DjangoTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">Date: <input value="2007-12-01" type="text" class="vDateField" name="test_0" size="10" /><br />Time: <input value="09:30:00" type="text" class="vTimeField" name="test_1" size="8" /></p>',
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">Datum: <input value="01.12.2007" type="text" class="vDateField" name="test_0" size="10" /><br />Zeit: <input value="09:30:00" type="text" class="vTimeField" name="test_1" size="8" /></p>',
)
class AdminURLWidgetTest(DjangoTestCase):
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url" />'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">http://example.com</a><br />Change:<input class="vURLField" name="test" type="url" value="http://example.com" /></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">http://example-äüö.com</a><br />Change:<input class="vURLField" name="test" type="url" value="http://example-äüö.com" /></p>'
)
def test_render_quoting(self):
# WARNING: Don't use assertHTMLEqual in that testcase!
# assertHTMLEqual will get rid of some escapes which are tested here!
w = widgets.AdminURLFieldWidget()
self.assertEqual(
w.render('test', 'http://example.com/<sometag>some text</sometag>'),
'<p class="url">Currently: <a href="http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example.com/<sometag>some text</sometag></a><br />Change: <input class="vURLField" name="test" type="url" value="http://example.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example-äüö.com/<sometag>some text</sometag></a><br />Change: <input class="vURLField" name="test" type="url" value="http://example-äüö.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"'),
'<p class="url">Currently: <a href="http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22">http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"</a><br />Change: <input class="vURLField" name="test" type="url" value="http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"" /></p>'
)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls',
)
class AdminFileWidgetTests(TestDataMixin, DjangoTestCase):
@classmethod
def setUpTestData(cls):
super(AdminFileWidgetTests, cls).setUpTestData()
band = models.Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id" /> '
'<label for="test-clear_id">Clear</label></span><br />'
'Change: <input type="file" name="test" /></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test" />',
)
def test_readonly_fields(self):
"""
File widgets should render as a link when they're marked "read only."
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<p><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
'albums\hybrid_theory.jpg</a></p>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art" />',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<p></p>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(DjangoTestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = models.Album._meta.get_field('band').rel
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.pk, attrs={}), (
'<input type="text" name="test" value="%(bandpk)s" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong>Linkin Park</strong>'
) % {'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# Check that ForeignKeyRawIdWidget works with fields which aren't
# related to the model's primary key.
apple = models.Inventory.objects.create(barcode=86, name='Apple')
models.Inventory.objects.create(barcode=22, name='Pear')
core = models.Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = models.Inventory._meta.get_field('parent').rel
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}), (
'<input type="text" name="test" value="86" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" class="related-lookup" id="lookup_id_test" title="Lookup">'
'</a> <strong>Apple</strong>'
)
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = models.Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = models.Bee._meta.get_field('honeycomb').rel
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s" /> <strong>Honeycomb object</strong>' % {'hcombpk': big_honeycomb.pk}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = models.Individual.objects.create(name='Subject #1')
models.Individual.objects.create(name='Child', parent=subject1)
rel = models.Individual._meta.get_field('parent').rel
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s" /> <strong>Individual object</strong>' % {'subj1pk': subject1.pk}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = models.Inventory._meta.get_field('parent').rel
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = models.Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = models.Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}), (
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" class="related-lookup" id="lookup_id_test" title="Lookup">'
'</a> <strong>Hidden</strong>'
)
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(DjangoTestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
m1 = models.Member.objects.create(name='Chester')
m2 = models.Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = models.Band._meta.get_field('members').rel
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField" />'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk, m2pk=m2.pk)
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk)
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = models.Advisor.objects.create(name='Rockstar Techie')
c1 = models.Company.objects.create(name='Doodle')
c2 = models.Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = models.Advisor._meta.get_field('companies').rel
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s" />' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s" />' % {'c1pk': c1.pk}
)
class RelatedFieldWidgetWrapperTests(DjangoTestCase):
def test_no_can_add_related(self):
rel = models.Individual._meta.get_field('parent').rel
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = models.Individual._meta.get_field('parent').rel
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = models.Individual._meta.get_field('soulmate').rel
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_show_hide_date_time_picker_widgets(self):
"""
Ensure that pressing the ESC key closes the date and time picker
widgets.
Refs #17064.
"""
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# First, with the date picker widget ---------------------------------
# Check that the date picker is hidden
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Check that the date picker is visible
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'block')
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the date picker is hidden again
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Then, with the time picker widget ----------------------------------
# Check that the time picker is hidden
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
# Click the time icon
self.selenium.find_element_by_id('clocklink0').click()
# Check that the time picker is visible
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'block')
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the time picker is hidden again
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
def test_calendar_nonday_class(self):
"""
Ensure cells that are not days of the month have the `nonday` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
"""
Ensure cell for the day in the input has the `selected` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
"""
Ensure no cells are given the selected class when the field is empty.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
"""
Ensure that the calendar show the date from the input field for every
locale supported by django.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = models.Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month names translations for every locales
month_string = 'January February March April May June July August September October November December'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except IOError:
continue
if month_string in catalog._catalog:
month_names = catalog._catalog[month_string]
else:
month_names = month_string
# Get the expected caption
may_translation = month_names.split(' ')[4]
expected_caption = '{0:s} {1:d}'.format(may_translation, 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
self.selenium.get('{}{}'.format(self.live_server_url,
reverse('admin:admin_widgets_member_change', args=(member.pk,))))
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Get the calendar caption
calendar0 = self.selenium.find_element_by_id('calendarin0')
caption = calendar0.find_element_by_tag_name('caption')
# Make sure that the right month and year are displayed
self.assertEqual(caption.text, expected_caption)
class DateTimePickerSeleniumChromeTests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerSeleniumIETests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@skipIf(pytz is None, "this test requires pytz")
@override_settings(TIME_ZONE='Asia/Singapore')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerShortcutsSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_date_time_picker_shortcuts(self):
"""
Ensure that date/time/datetime picker shortcuts work in the current time zone.
Refs #20663.
This test case is fairly tricky, it relies on selenium still running the browser
in the default time zone "America/Chicago" despite `override_settings` changing
the time zone to "Asia/Singapore".
"""
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
now = datetime.now()
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector(
'.field-birthdate .datetimeshortcuts')
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# Check that there is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector(
'.field-birthdate .timezonewarning')
# Submit the form.
self.selenium.find_element_by_tag_name('form').submit()
self.wait_page_loaded()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = models.Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
class DateTimePickerShortcutsSeleniumChromeTests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerShortcutsSeleniumIETests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class HorizontalVerticalFilterSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(HorizontalVerticalFilterSeleniumFirefoxTests, self).setUp()
self.lisa = models.Student.objects.create(name='Lisa')
self.john = models.Student.objects.create(name='John')
self.bob = models.Student.objects.create(name='Bob')
self.peter = models.Student.objects.create(name='Peter')
self.jenny = models.Student.objects.create(name='Jenny')
self.jason = models.Student.objects.create(name='Jason')
self.cliff = models.Student.objects.create(name='Cliff')
self.arthur = models.Student.objects.create(name='Arthur')
self.school = models.School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove,
choose_all=None, remove_all=None):
choose_link = '#id_%s_add_link' % field_name
choose_all_link = '#id_%s_add_all_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
remove_all_link = '#id_%s_remove_all_link' % field_name
self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
# select all options and click 'Choose'.
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertActiveButtons(mode, field_name, False, False, False, True)
# Click 'Remove all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# There 's no 'Remove all' button in vertical mode, so individually
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.get_select_option(from_box, str(self.lisa.id))
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
from_lisa_select_option.click()
self.get_select_option(from_box, str(self.jason.id)).click()
self.get_select_option(from_box, str(self.bob.id)).click()
self.get_select_option(from_box, str(self.john.id)).click()
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id)])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.get_select_option(to_box, str(self.lisa.id))
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.get_select_option(to_box, str(self.lisa.id)).click()
self.get_select_option(to_box, str(self.bob.id)).click()
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.get_select_option(from_box, str(self.arthur.id)).click()
self.get_select_option(from_box, str(self.cliff.id)).click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id)])
def test_basic(self):
self.school.students = [self.lisa, self.peter]
self.school.alumni = [self.lisa, self.peter]
self.school.save()
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
self.wait_page_loaded()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()),
[self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
"""
Ensure that typing in the search box filters out options displayed in
the 'from' box.
"""
from selenium.webdriver.common.keys import Keys
self.school.students = [self.lisa, self.peter]
self.school.alumni = [self.lisa, self.peter]
self.school.save()
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = '#id_%s_add_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
input = self.selenium.find_element_by_css_selector('#id_%s_input' % field_name)
# Initial values
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# Typing in some characters filters out non-matching options
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
# Clearing the text box makes the other options reappear
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# -----------------------------------------------------------------
# Check that choosing a filtered option sends it properly to the
# 'to' box.
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.get_select_option(from_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.jason.id)])
self.get_select_option(to_box, str(self.lisa.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) # Clear text box
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
# -----------------------------------------------------------------
# Check that pressing enter on a filtered option sends it properly
# to the 'to' box.
self.get_select_option(to_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()),
[self.jason, self.peter])
class HorizontalVerticalFilterSeleniumChromeTests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class HorizontalVerticalFilterSeleniumIETests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminRawIdWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(AdminRawIdWidgetSeleniumFirefoxTests, self).setUp()
models.Band.objects.create(id=42, name='Bogey Blues')
models.Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_main_band').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.selenium.switch_to.window('id_main_band')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.selenium.switch_to.window('id_main_band')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.selenium.switch_to.window('id_supporting_bands')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.selenium.switch_to.window('id_supporting_bands')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class AdminRawIdWidgetSeleniumChromeTests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class AdminRawIdWidgetSeleniumIETests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url,
reverse('admin:admin_widgets_profile_add')))
main_window = self.selenium.current_window_handle
# Click the Add User button to add new
self.selenium.find_element_by_id('add_id_user').click()
self.selenium.switch_to.window('id_user')
self.wait_for('#id_password')
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# The field now contains the new user
self.wait_for('#id_user option[value="newuser"]')
# Click the Change User button to change it
self.selenium.find_element_by_id('change_id_user').click()
self.selenium.switch_to_window('id_user')
self.wait_page_loaded()
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to_window(main_window)
# Wait up to 2 seconds for the new option to show up after clicking save in the popup.
self.selenium.implicitly_wait(2)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
self.selenium.implicitly_wait(0)
# Go ahead and submit the form to make sure it works
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile "changednewuser" was added successfully.')
profiles = models.Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
class RelatedFieldWidgetSeleniumChromeTests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class RelatedFieldWidgetSeleniumIETests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| {
"content_hash": "1b26b39b793b41fb4eded0f34ddfc8fd",
"timestamp": "",
"source": "github",
"line_count": 1246,
"max_line_length": 406,
"avg_line_length": 46.03531300160514,
"alnum_prop": 0.6249651324965132,
"repo_name": "beni55/django",
"id": "d32411ef674484810484cd51cd53eed3f662e3a3",
"size": "57409",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "tests/admin_widgets/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "43000"
},
{
"name": "HTML",
"bytes": "168786"
},
{
"name": "JavaScript",
"bytes": "105614"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "10569287"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
"""This module contains the ChosenInlineResultHandler class."""
from .handler import Handler
from telegram import Update
from telegram.utils.deprecate import deprecate
class ChosenInlineResultHandler(Handler):
"""Handler class to handle Telegram updates that contain a chosen inline result.
Attributes:
callback (:obj:`callable`): The callback function for this handler.
pass_update_queue (:obj:`bool`): Optional. Determines whether ``update_queue`` will be
passed to the callback function.
pass_job_queue (:obj:`bool`): Optional. Determines whether ``job_queue`` will be passed to
the callback function.
pass_user_data (:obj:`bool`): Optional. Determines whether ``user_data`` will be passed to
the callback function.
pass_chat_data (:obj:`bool`): Optional. Determines whether ``chat_data`` will be passed to
the callback function.
Note:
:attr:`pass_user_data` and :attr:`pass_chat_data` determine whether a ``dict`` you
can use to keep any data in will be sent to the :attr:`callback` function.. Related to
either the user or the chat that the update was sent in. For each update from the same user
or in the same chat, it will be the same ``dict``.
Args:
callback (:obj:`callable`): A function that takes ``bot, update`` as positional arguments.
It will be called when the :attr:`check_update` has determined that an update should be
processed by this handler.
pass_update_queue (:obj:`bool`, optional): If set to ``True``, a keyword argument called
``update_queue`` will be passed to the callback function. It will be the ``Queue``
instance used by the :class:`telegram.ext.Updater` and :class:`telegram.ext.Dispatcher`
that contains new updates which can be used to insert updates. Default is ``False``.
pass_job_queue (:obj:`bool`, optional): If set to ``True``, a keyword argument called
``job_queue`` will be passed to the callback function. It will be a
:class:`telegram.ext.JobQueue` instance created by the :class:`telegram.ext.Updater`
which can be used to schedule new jobs. Default is ``False``.
pass_user_data (:obj:`bool`, optional): If set to ``True``, a keyword argument called
``user_data`` will be passed to the callback function. Default is ``False``.
pass_chat_data (:obj:`bool`, optional): If set to ``True``, a keyword argument called
``chat_data`` will be passed to the callback function. Default is ``False``.
"""
def __init__(self,
callback,
pass_update_queue=False,
pass_job_queue=False,
pass_user_data=False,
pass_chat_data=False):
super(ChosenInlineResultHandler, self).__init__(
callback,
pass_update_queue=pass_update_queue,
pass_job_queue=pass_job_queue,
pass_user_data=pass_user_data,
pass_chat_data=pass_chat_data)
def check_update(self, update):
"""Determines whether an update should be passed to this handlers :attr:`callback`.
Args:
update (:class:`telegram.Update`): Incoming telegram update.
Returns:
:obj:`bool`
"""
return isinstance(update, Update) and update.chosen_inline_result
def handle_update(self, update, dispatcher):
"""Send the update to the :attr:`callback`.
Args:
update (:class:`telegram.Update`): Incoming telegram update.
dispatcher (:class:`telegram.ext.Dispatcher`): Dispatcher that originated the Update.
"""
optional_args = self.collect_optional_args(dispatcher, update)
return self.callback(dispatcher.bot, update, **optional_args)
# old non-PEP8 Handler methods
m = "telegram.ChosenInlineResultHandler."
checkUpdate = deprecate(check_update, m + "checkUpdate", m + "check_update")
handleUpdate = deprecate(handle_update, m + "handleUpdate", m + "handle_update")
| {
"content_hash": "186bf1ff26bbcdf5e7b94a5174062e4b",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 99,
"avg_line_length": 48.724137931034484,
"alnum_prop": 0.6272705826845955,
"repo_name": "rogerscristo/BotFWD",
"id": "81f1bb4c392c8e7c2395b186f96c28cbed9ad484",
"size": "5066",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "env/lib/python3.6/site-packages/telegram/ext/choseninlineresulthandler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13999"
}
],
"symlink_target": ""
} |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 5, transform = "BoxCox", sigma = 0.0, exog_count = 0, ar_order = 0); | {
"content_hash": "4ef78bfc4c6285a0e7e45b42eb986661",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 162,
"avg_line_length": 37.42857142857143,
"alnum_prop": 0.7022900763358778,
"repo_name": "antoinecarme/pyaf",
"id": "d44644cd0038a88833afb71424c3a5ec554c738a",
"size": "262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/artificial/transf_BoxCox/trend_MovingMedian/cycle_5/ar_/test_artificial_32_BoxCox_MovingMedian_5__0.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
setup(
name='django-file-picker',
version='0.6.0',
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['sample_project']),
include_package_data=True,
#url='https://github.com/caktus/django-file-picker/',
url='http://django-file-picker.readthedocs.org/',
license='BSD',
description='Pluggable file picker',
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
long_description=open('README.rst').read(),
install_requires=['sorl-thumbnail==12.3',],
zip_safe=False, # because we're including media that Django needs
)
| {
"content_hash": "b145a53589e686ecede909d939c6646e",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 71,
"avg_line_length": 36.888888888888886,
"alnum_prop": 0.6485943775100401,
"repo_name": "Capstrat/django-file-picker",
"id": "78ec0c31576cbca34dfad56737a8bd9e77543664",
"size": "996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "45853"
},
{
"name": "HTML",
"bytes": "867"
},
{
"name": "JavaScript",
"bytes": "242024"
},
{
"name": "PHP",
"bytes": "1052"
},
{
"name": "Python",
"bytes": "34910"
}
],
"symlink_target": ""
} |
"""fabric commands for building sphinx docs
note 1): tested with Fabric 0.9.0
full command: $ fab deploy
"""
import os
from fabric.api import *
cur_dir = os.path.dirname(os.path.abspath(__file__))
def sphinxbuild():
local('sphinx-build -b html %s %s/html' % \
(os.path.join(cur_dir, 'source'),
os.path.join(cur_dir, 'build')))
def create_zip():
# create zip for pypi, for example
local('cd %s && zip -r github-cli *' % os.path.join(cur_dir, 'build/html'))
def clean():
local('rm -rf %s' % os.path.join(cur_dir, 'build'))
def build():
clean()
sphinxbuild()
create_zip()
| {
"content_hash": "8ecc822248e755ebd731a7942239f6b3",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 79,
"avg_line_length": 19.636363636363637,
"alnum_prop": 0.5895061728395061,
"repo_name": "jsmits/github-cli",
"id": "ce5656a7388911269b21fde10b26320ae1f1ff1b",
"size": "648",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/fabfile.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "37425"
}
],
"symlink_target": ""
} |
from aiohttp.web import Request as Req, Response as Res
class Request(Req):
"""
Wraps the aiohttp request object to hide it from user
"""
pass
class Response(Res):
"""
Wraps the aiohttp response object to hide it from user
"""
pass
| {
"content_hash": "d823de597c05cc749a32188ecd468b91",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 58,
"avg_line_length": 17.866666666666667,
"alnum_prop": 0.6455223880597015,
"repo_name": "bitsabhi/vyked",
"id": "2c945fa300be5f6473aa63a2df1416cc87dc9dd3",
"size": "268",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "vyked/wrappers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "79661"
}
],
"symlink_target": ""
} |
import sys
import os
# print(sys.copyright)
# sys.path被执行文件的当前路径 + 其他python自定的路径
print(sys.path)
# python path/filename 执行时后面带的被执行文件名包括路径:path/filename
print(__file__)
# python path/filename 执行时所在的目录,及pwd值
print(os.getcwd()) | {
"content_hash": "c49a8b3f1c2ae2d5fbf87c446186c570",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 54,
"avg_line_length": 24.88888888888889,
"alnum_prop": 0.7857142857142857,
"repo_name": "dianshen/python_day",
"id": "1d42367ff84262a3509532089688b0433a4b464a",
"size": "314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "day5/python-sys.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2505"
},
{
"name": "HTML",
"bytes": "74003"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "317154"
}
],
"symlink_target": ""
} |
import os.path
# Django madness
from django.db import transaction
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response, get_object_or_404, redirect
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils import simplejson
from django.utils.html import escape
from django.utils.safestring import mark_safe, SafeUnicode
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.csrf import csrf_protect
from django.views.generic import ListView, TemplateView, UpdateView
from django.middleware.csrf import get_token as csrf_token
# lfs's spaghetti :D
from lfs.caching.utils import lfs_get_object_or_404
from lfs.catalog.models import Product
from lfs.core.signals import product_changed
from lfs.core.utils import LazyEncoder
import lfs.core.utils
# My own mess
from django_tables2 import RequestConfig
from django_tables2.utils import Accessor
import django_tables2 as tables
from lfs_downloads.models import DownloadDelivery, DigitalAsset, DigitalProductOrder
from lfs_downloads.sendfile import xsendfileserve
from .security import ManageMixin
# Admin pages and views
class ProductsListView(ManageMixin, ListView):
model = Product
template_name = 'lfs_downloads/manage_products_list.html'
def file_list(self):
return [
{'name': 'LMTEPU.pdf', 'size': '645Kb', 'uploaded': '10/23/12 05:59 PM'},
{'name': 'LPDLM.pdf', 'size': '645Kb', 'uploaded': '10/23/12 05:59 PM'},
{'name': 'Seminario I - Argentina.avi', 'size': '45645Kb', 'uploaded': '10/23/12 05:59 PM'},
{'name': 'Seminario I - Argentina.avi', 'size': '545645Kb', 'uploaded': '10/23/12 05:59 PM'},
]
def get_context_data(self, **kwargs):
"""
Get the context for this view.
"""
context = super(ProductsListView, self).get_context_data(**kwargs)
context['file_list'] = self.file_list()
return context
class DigitalAssetsListView(ManageMixin, ListView):
model = DigitalAsset
template_name = 'lfs_downloads/manage_files_list.html'
class UploadView(ManageMixin, TemplateView):
template_name = 'lfs_downloads/manage_upload.html'
class RelatedEditView(UpdateView):
model = DigitalAsset
template_name = 'lfs_downloads/manage_related.html'
@permission_required("core.manage_shop", login_url="/login/")
def manage_download_digital_product(request, asset_id):
asset = get_object_or_404(DigitalAsset, pk=asset_id)
opath = asset.file.path
dpath = os.path.dirname(opath)
fname = os.path.basename(opath)
return xsendfileserve(request=request, path=fname, document_root=dpath)
@permission_required("core.manage_shop", login_url="/login/")
def manage_digital_products(request, product_id, as_string=False,
template_name="lfs_downloads/manage_digital_products.html"):
product = lfs_get_object_or_404(Product, pk=product_id)
dp = product.digitalproduct
result = render_to_string(template_name, RequestContext(request, {
"product": product,
"dp": dp,
# "digiproducts": dp.digital_assets.all(),
# "has_digiproducts": dp.digital_assets.exists(),
# "limitation_mode": dp.limitation_mode,
# 'minimum_price': dp.minimum_price,
# 'suggested_price': dp.suggested_price,
# 'download_count_limit': dp.max_download_count,
# 'download_expiry': 1,
# 'download_expiry_scale': 'days',
}))
if as_string:
return result
else:
result = simplejson.dumps({
"html_data": result,
"message": _(u"New attachment."),
}, cls=LazyEncoder)
return HttpResponse(result)
@permission_required("core.manage_shop", login_url="/login/")
def handle_upload(request, product_id):
"""
Handles upload of new DigitalAsset
"""
product = lfs_get_object_or_404(Product, pk=product_id)
if request.method == "POST":
for file_content in request.FILES.getlist("files"):
digiproduct = DigitalAsset(file=file_content, product=product)
digiproduct.file.save(file_content.name, file_content, save=True)
product_changed.send(product, request=request)
return manage_digital_products(request, product_id)
@permission_required("core.manage_shop", login_url="/login/")
def update_digiproducts(request, product_id):
"""
Just to delete digital products with given ids (passed by request body).
Maybe later have some description.
"""
product = lfs_get_object_or_404(Product, pk=product_id)
action = request.POST.get("action")
message = _(u"Digital Product has been updated.")
if action == "delete":
message = _(u"Digital Product has been deleted.")
for key in request.POST.keys():
if key.startswith("delete-"):
try:
id = key.split("-")[1]
DigitalAsset.objects.get(pk=id).delete()
except (IndexError, ObjectDoesNotExist):
pass
if action == 'update_donation_mode':
product = lfs_get_object_or_404(Product, pk=product_id)
message = _(u"DigitalProductOrder mode has been updated.")
DigitalAsset.objects.filter(product=product).update(
donation_mode=request.POST.get('donation_mode', False),
minimum_price=request.POST.get('minimum_price', '1.0'),
suggested_price=request.POST.get('suggested_price', '1.0'),
)
product_changed.send(product, request=request)
html = [["#lfs_downloads", manage_digital_products(request, product_id, as_string=True)]]
result = simplejson.dumps({
"html": html,
"message": message,
}, cls=LazyEncoder)
return HttpResponse(result)
# Digital Orders' management
class OrderResetForm(forms.Form):
pass
class OrdersTable(tables.Table):
id = tables.Column('Id')
created = tables.DateColumn(verbose_name="Fecha", format='d/N/Y - P')
user = tables.Column(verbose_name="Usuario")
payment_amount = tables.Column(verbose_name="Monto")
product = tables.Column(verbose_name="Producto")
fulfilled = tables.BooleanColumn(verbose_name="Pagada")
associated_files = tables.Column(verbose_name='No. de Archivos')
reset = tables.Column(accessor=Accessor('pk'), verbose_name=" ", orderable=False)
class Meta:
model = DigitalProductOrder
fields = ('id', "created", "user", "payment_amount", "product", "fulfilled")
attrs = {"class": "paleblue"}
order_by = "-created"
template = "django_tables2/table.html"
def __init__(self, request, *args, **kwargs):
self.request = request
super(OrdersTable, self).__init__(*args, **kwargs)
def render_user(self, value):
return SafeUnicode("<a href=\"%s?user_id=%s\"> %s <%s></a>" % (
reverse("lfsd_orders_list"), value.id, value.get_full_name(), value.email
))
def render_product(self, value):
return SafeUnicode(u"<a href=\"%s?product_id=%s\">%s</a>" % (
reverse("lfsd_orders_list"), value.id, value.get_name()
))
def render_reset(self, value):
html = u"""
<form method="POST" action="{}">
<input type="hidden" name="csrfmiddlewaretoken" value="{}"/>
<input type="submit" value="Reset">
</form>
""".format(
reverse("lfsd_order_reset", kwargs={'order_id': value}),
csrf_token(self.request)
)
return SafeUnicode(html)
class OrderUpdateForm(forms.Form):
fulfilled = forms.BooleanField(label='Orden confirmada y/o pagada', required=False)
downloads_left = forms.IntegerField(label='Descargas disponibles')
class ProductChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return '%i - %s [%s]' % (obj.pk, obj.name, obj.sku)
class OrderAddForm(forms.ModelForm):
product = ProductChoiceField(queryset=DigitalAsset.objects.related_products())
class Meta:
model = DigitalProductOrder
fields = ['user', 'product', 'payment_amount']
widgets = {
'payment_amount': forms.HiddenInput(),
}
@csrf_protect
@permission_required("core.manage_shop", login_url="/login/")
def orders_list(request, template_name="lfs_downloads/orders_list.html"):
user_id = request.GET.get('user_id', None)
product_id = request.GET.get('product_id', None)
start_date = request.GET.get('start_date', None)
end_date = request.GET.get('end_date', None)
qset = DigitalProductOrder.objects.all()
if user_id:
qset = qset.filter(user__id=user_id)
if product_id:
qset = qset.filter(product__id=product_id)
table = OrdersTable(request, qset)
table.paginate(page=request.GET.get('page', 1), per_page=25)
RequestConfig(request).configure(table)
return render_to_response(template_name, RequestContext(request, {
'table': table,
'context_instance': RequestContext(request)
}))
@permission_required("core.manage_shop", login_url="/login/")
def order_update(request, order_id):
order = get_object_or_404(DigitalProductOrder, pk=order_id)
if request.method == 'POST':
form = OrderUpdateForm(request.POST)
if form.is_valid():
if order.state is SENT:
order.create_delivery()
if 'fulfilled' in form.changed_data and order.fulfilled is False:
if order.delivery is None:
order.fulfill()
else:
order.fulfilled = True
order.delivery.downloads_left = form.cleaned_data['downloads_left']
order.delivery.save()
order.save()
messages.add_message(request, messages.SUCCESS, 'Se guardo la informacion')
return HttpResponseRedirect(reverse('lfsd_orders_list'))
else:
try:
downloads_left = order.delivery.downloads_left
except AttributeError:
if order.fulfilled:
downloads_left = settings.LFS_DOWNLOADS_DOWNLOAD_LIMIT
else:
downloads_left = 0
form = OrderUpdateForm({
'fulfilled': order.fulfilled,
'downloads_left': downloads_left
})
return render_to_response(template_name, RequestContext(request, {'pk':order_id, 'form':form}))
@permission_required("core.manage_shop", login_url="/login/")
def order_add(request, template_name='lfs_downloads/order_add.html'):
if request.method == 'POST':
product = Product.objects.get(pk=request.POST['product'])
# form = OrderAddForm(request.POST, initial={'payment_amount': product.digitalproduct.minimum_price}, )
form = OrderAddForm({
'product': request.POST['product'],
'csrfmiddlewaretoken': request.POST['csrfmiddlewaretoken'],
'user': request.POST['user'],
'payment_amount': product.digitalproduct.minimum_price,
})
if form.is_valid():
order = form.save()
order.clear_payment()
order.deliver()
messages.add_message(request, messages.SUCCESS, 'Se creo la descarga')
return HttpResponseRedirect(reverse('lfsd_orders_list'))
else:
form = OrderAddForm(initial={'amount': 0})
return render_to_response(template_name, RequestContext(request, {'form': form}))
@permission_required("core.manage_shop", login_url="/login/")
def order_resend_email(request, uuid):
order = get_object_or_404(Order, uuid=uuid)
order.send_download_notification()
messages.add_message(request, messages.SUCCESS, 'Se acaba de reenviar el correo')
return HttpResponseRedirect(reverse('lfsd_orders_list'))
@transaction.commit_on_success
@permission_required("core.manage_shop", login_url="/login/")
def order_reset(request, order_id):
"""Restarts the download counter and resends the notification email"""
order = get_object_or_404(DigitalProductOrder, pk=order_id)
order.reset_limits()
messages.add_message(request, messages.SUCCESS, 'Digital Order reset')
return HttpResponseRedirect(reverse('lfsd_orders_list'))
| {
"content_hash": "8a11a4b2f20e098a1c0cba0d4b8e7960",
"timestamp": "",
"source": "github",
"line_count": 333,
"max_line_length": 111,
"avg_line_length": 37.97597597597598,
"alnum_prop": 0.6564921714376087,
"repo_name": "misaelnieto/lfs_downloads",
"id": "f6ef789b46f58792ed2a8c8bfc29b100549cc934",
"size": "12682",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lfs_downloads/views/admin_tools.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "4990"
},
{
"name": "HTML",
"bytes": "40300"
},
{
"name": "JavaScript",
"bytes": "8084"
},
{
"name": "Python",
"bytes": "87192"
},
{
"name": "Shell",
"bytes": "71"
}
],
"symlink_target": ""
} |
import sys
from os.path import abspath, dirname, join
sys.path.insert(0, join(dirname(dirname(dirname(abspath(__file__)))),'py-bindings') )
from functools import partial
from os.path import dirname
from time import clock
from math import fabs
import unittest
import copy
import ompl.util as ou
import ompl.base as ob
import ompl.geometric as og
from ompl.util import setLogLevel, LogLevel
SOLUTION_TIME = 10.0
class Environment(object):
def __init__(self, fname):
fp = open(fname, 'r')
lines = fp.readlines()
fp.close()
self.width, self.height = [int(i) for i in lines[0].split(' ')[1:3]]
self.grid = []
self.start = [int(i) for i in lines[1].split(' ')[1:3]]
self.goal = [int(i) for i in lines[2].split(' ')[1:3]]
for i in range(self.width):
self.grid.append(
[int(i) for i in lines[4+i].split(' ')[0:self.height]])
self.char_mapping = ['__', '##', 'oo', 'XX']
def __str__(self):
result = ''
for line in self.grid:
result = result + ''.join([self.char_mapping[c] for c in line]) + '\n'
return result
def isValid(grid, state):
# planning is done in a continuous space, but our collision space
# representation is discrete
x = int(state[0])
y = int(state[1])
return grid[x][y] == 0 # 0 means valid state
class mySpace(ob.RealVectorStateSpace):
def __init__(self):
super(mySpace, self).__init__(2)
def distance(self, state1, state2):
x1 = int(state1[0])
y1 = int(state1[1])
x2 = int(state2[0])
y2 = int(state2[1])
return fabs(x1-x2) + fabs(y1-y2)
class mySpaceInformation(ob.SpaceInformation):
def __init__(self, env):
self.sMan = mySpace()
super(mySpaceInformation, self).__init__(self.sMan)
sbounds = ob.RealVectorBounds(2)
# dimension 0 (x) spans between [0, width)
# dimension 1 (y) spans between [0, height)
# since sampling is continuous and we round down, we allow values until
# just under the max limit
# the resolution is 1.0 since we check cells only
sbounds.low[0] = 0.0
sbounds.high[0] = float(env.width) - 0.000000001
sbounds.low[1] = 0.0
sbounds.high[1] = float(env.height) - 0.000000001
self.sMan.setBounds(sbounds)
self.setStateValidityCheckingResolution(0.5)
isValidFn = ob.StateValidityCheckerFn(partial(isValid, env.grid))
self.setStateValidityChecker(isValidFn)
self.setup()
class TestPlanner(object):
def execute(self, env, time, pathLength, show = False):
result = True
# instantiate space information
si = mySpaceInformation(env)
# instantiate problem definition
pdef = ob.ProblemDefinition(si)
# instantiate motion planner
planner = self.newplanner(si)
planner.setProblemDefinition(pdef)
planner.setup()
# the initial state
state = ob.State(si)
state()[0] = env.start[0]
state()[1] = env.start[1]
pdef.addStartState(state)
goal = ob.GoalState(si)
gstate = ob.State(si)
gstate()[0] = env.goal[0]
gstate()[1] = env.goal[1]
goal.setState(gstate)
goal.threshold = 1e-3
pdef.setGoal(goal)
startTime = clock()
if planner.solve(SOLUTION_TIME):
elapsed = clock() - startTime
time = time + elapsed
if show:
print('Found solution in %f seconds!' % elapsed)
path = pdef.getSolutionPath()
sm = og.PathSimplifier(si)
startTime = clock()
sm.reduceVertices(path)
elapsed = clock() - startTime
time = time + elapsed
if show:
print('Simplified solution in %f seconds!' % elapsed)
path.interpolate(100)
pathLength = pathLength + path.length()
if show:
print(env, '\n')
temp = copy.deepcopy(env)
for i in range(len(path.states)):
x = int(path.states[i][0])
y = int(path.states[i][1])
if temp.grid[x][y] in [0,2]:
temp.grid[x][y] = 2
else:
temp.grid[x][y] = 3
print(temp, '\n')
else:
result = False
return (result, time, pathLength)
def newPlanner(si):
raise NotImplementedError('pure virtual method')
class RRTTest(TestPlanner):
def newplanner(self, si):
planner = og.RRT(si)
planner.setRange(10.0)
return planner
class TRRTTest(TestPlanner):
def newplanner(self, si):
planner = og.TRRT(si)
planner.setRange(10.0)
return planner
class RRTConnectTest(TestPlanner):
def newplanner(self, si):
planner = og.RRTConnect(si)
planner.setRange(10.0)
return planner
class pRRTTest(TestPlanner):
def newplanner(self, si):
planner = og.pRRT(si)
planner.setRange(10.0)
planner.setThreadCount(4)
return planner
class LazyRRTTest(TestPlanner):
def newplanner(self, si):
planner = og.LazyRRT(si)
planner.setRange(10.0)
return planner
class SBLTest(TestPlanner):
def newplanner(self, si):
planner = og.SBL(si)
planner.setRange(10.0)
projection = ou.vectorUint()
projection.extend([0, 1])
cdim = ou.vectorDouble()
cdim.extend([1, 1])
proj = ob.RealVectorOrthogonalProjectionEvaluator(si.getStateSpace(), cdim, projection)
planner.setProjectionEvaluator(proj)
return planner
class pSBLTest(TestPlanner):
def newplanner(self, si):
planner = og.pSBL(si)
planner.setRange(10.0)
planner.setThreadCount(4)
projection = ou.vectorUint()
projection.extend([0, 1])
cdim = ou.vectorDouble()
cdim.extend([1, 1])
proj = ob.RealVectorOrthogonalProjectionEvaluator(si.getStateSpace(), cdim, projection)
planner.setProjectionEvaluator(proj)
return planner
class KPIECE1Test(TestPlanner):
def newplanner(self, si):
planner = og.KPIECE1(si)
planner.setRange(10.0)
projection = ou.vectorUint()
projection.extend([0, 1])
cdim = ou.vectorDouble()
cdim.extend([1, 1])
proj = ob.RealVectorOrthogonalProjectionEvaluator(si.getStateSpace(), cdim, projection)
planner.setProjectionEvaluator(proj)
return planner
class LBKPIECE1Test(TestPlanner):
def newplanner(self, si):
planner = og.LBKPIECE1(si)
planner.setRange(10.0)
projection = ou.vectorUint()
projection.extend([0, 1])
cdim = ou.vectorDouble()
cdim.extend([1, 1])
proj = ob.RealVectorOrthogonalProjectionEvaluator(si.getStateSpace(), cdim, projection)
planner.setProjectionEvaluator(proj)
return planner
class ESTTest(TestPlanner):
def newplanner(self, si):
planner = og.EST(si)
planner.setRange(10.0)
return planner
class BiESTTest(TestPlanner):
def newplanner(self, si):
planner = og.BiEST(si)
planner.setRange(10.0)
return planner
class ProjESTTest(TestPlanner):
def newplanner(self, si):
planner = og.ProjEST(si)
planner.setRange(10.0)
projection = ou.vectorUint()
projection.extend([0, 1])
cdim = ou.vectorDouble()
cdim.extend([1, 1])
proj = ob.RealVectorOrthogonalProjectionEvaluator(si.getStateSpace(), cdim, projection)
planner.setProjectionEvaluator(proj)
return planner
class PRMTest(TestPlanner):
def newplanner(self, si):
planner = og.PRM(si)
return planner
class PlanTest(unittest.TestCase):
def setUp(self):
self.env = Environment(dirname(abspath(__file__))+'/../../tests/resources/env1.txt')
if self.env.width * self.env.height == 0:
self.fail('The environment has a 0 dimension. Cannot continue')
self.verbose = True
def runPlanTest(self, planner):
time = 0.0
length = 0.0
good = 0
N = 50
for i in range(N):
(result, time, length) = planner.execute(self.env, time, length, False)
if result: good = good + 1
success = 100.0 * float(good) / float(N)
avgruntime = time / float(N)
avglength = length / float(N)
if self.verbose:
print(' Success rate: %f%%' % success)
print(' Average runtime: %f' % avgruntime)
print(' Average path length: %f' % avglength)
return (success, avgruntime, avglength)
def testGeometric_RRT(self):
planner = RRTTest()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 99.0)
self.assertTrue(avgruntime < 0.3)
self.assertTrue(avglength < 100.0)
def testGeometric_TRRT(self):
planner = TRRTTest()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 99.0)
self.assertTrue(avgruntime < 0.3)
self.assertTrue(avglength < 100.0)
def testGeometric_RRTConnect(self):
planner = RRTConnectTest()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 99.0)
self.assertTrue(avgruntime < 0.5)
self.assertTrue(avglength < 100.0)
# need to make bindings threadsafe
# see http://wiki.python.org/moin/boost.python/HowTo#MultithreadingSupportformyfunction
# def testGeometric_pRRT(self):
# planner = pRRTTest()
# (success, avgruntime, avglength) = self.runPlanTest(planner)
# self.assertTrue(success >= 99.0)
# self.assertTrue(avgruntime < 2.5)
# self.assertTrue(avglength < 100.0)
def testGeometric_LazyRRT(self):
planner = LazyRRTTest()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 60.0)
self.assertTrue(avgruntime < 1)
self.assertTrue(avglength < 100.0)
def testGeometric_SBL(self):
planner = SBLTest()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 99.0)
self.assertTrue(avgruntime < 0.1)
self.assertTrue(avglength < 100.0)
# need to make bindings threadsafe
# see http://wiki.python.org/moin/boost.python/HowTo#MultithreadingSupportformyfunction
# def testGeometric_pSBL(self):
# planner = pSBLTest()
# (success, avgruntime, avglength) = self.runPlanTest(planner)
# self.assertTrue(success >= 99.0)
# self.assertTrue(avgruntime < 0.1)
# self.assertTrue(avglength < 100.0)
def testGeometric_KPIECE1(self):
planner = KPIECE1Test()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 99.0)
self.assertTrue(avgruntime < 0.1)
self.assertTrue(avglength < 100.0)
def testGeometric_LBKPIECE1(self):
planner = LBKPIECE1Test()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 99.0)
self.assertTrue(avgruntime < 0.1)
self.assertTrue(avglength < 100.0)
def testGeometric_EST(self):
planner = ESTTest()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 99.0)
self.assertTrue(avgruntime < 0.1)
self.assertTrue(avglength < 100.0)
def testGeometric_PRM(self):
planner = PRMTest()
(success, avgruntime, avglength) = self.runPlanTest(planner)
self.assertTrue(success >= 99.0)
self.assertTrue(avgruntime < 2.0)
self.assertTrue(avglength < 100.0)
def suite():
suites = ( unittest.makeSuite(PlanTest) )
return unittest.TestSuite(suites)
if __name__ == '__main__':
setLogLevel(LogLevel.LOG_ERROR)
unittest.main()
| {
"content_hash": "9eba73994ad27f99b20a9f329a6e9628",
"timestamp": "",
"source": "github",
"line_count": 366,
"max_line_length": 95,
"avg_line_length": 33.27322404371585,
"alnum_prop": 0.6034652652323863,
"repo_name": "jvgomez/ompl",
"id": "0e951ab5cc5e2e950cf94d49aa95f7519e154785",
"size": "13991",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/geometric/test_geometric.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8268"
},
{
"name": "C++",
"bytes": "3946941"
},
{
"name": "CMake",
"bytes": "55919"
},
{
"name": "CSS",
"bytes": "2410"
},
{
"name": "JavaScript",
"bytes": "766"
},
{
"name": "Python",
"bytes": "198836"
},
{
"name": "R",
"bytes": "43530"
},
{
"name": "Shell",
"bytes": "8537"
}
],
"symlink_target": ""
} |
from datetime import date, timedelta
from model_bakery import baker
from django.conf import settings
from django.test import TestCase
from ..models import Sponsorship, SponsorBenefit, LogoPlacement, TieredQuantity, RequiredTextAsset, RequiredImgAsset, \
BenefitFeature, SponsorshipPackage, SponsorshipBenefit
from sponsors.models.enums import LogoPlacementChoices, PublisherChoices
class SponsorshipQuerySetTests(TestCase):
def setUp(self):
self.user = baker.make(settings.AUTH_USER_MODEL)
self.contact = baker.make('sponsors.SponsorContact', user=self.user)
def test_visible_to_user(self):
visible = [
baker.make(Sponsorship, submited_by=self.user, status=Sponsorship.APPLIED),
baker.make(Sponsorship, sponsor=self.contact.sponsor, status=Sponsorship.APPROVED),
baker.make(Sponsorship, submited_by=self.user, status=Sponsorship.FINALIZED),
]
baker.make(Sponsorship) # should not be visible because it's from other sponsor
baker.make(Sponsorship, submited_by=self.user, status=Sponsorship.REJECTED) # don't list rejected
qs = Sponsorship.objects.visible_to(self.user)
self.assertEqual(len(visible), qs.count())
for sp in visible:
self.assertIn(sp, qs)
self.assertEqual(list(qs), list(self.user.sponsorships))
def test_enabled_sponsorships(self):
# Sponorship that are enabled must have:
# - finalized status
# - start date less than today
# - end date greater than today
today = date.today()
two_days = timedelta(days=2)
enabled = baker.make(
Sponsorship,
status=Sponsorship.FINALIZED,
start_date=today - two_days,
end_date=today + two_days,
)
# group of still disabled sponsorships
baker.make(
Sponsorship,
status=Sponsorship.APPLIED,
start_date=today - two_days,
end_date=today + two_days
)
baker.make(
Sponsorship,
status=Sponsorship.FINALIZED,
start_date=today + two_days,
end_date=today + 2 * two_days
)
baker.make(
Sponsorship,
status=Sponsorship.FINALIZED,
start_date=today - 2 * two_days,
end_date=today - two_days
)
qs = Sponsorship.objects.enabled()
self.assertEqual(1, qs.count())
self.assertIn(enabled, qs)
def test_filter_sponsorship_with_logo_placement_benefits(self):
sponsorship_with_download_logo = baker.make_recipe('sponsors.tests.finalized_sponsorship')
sponsorship_with_sponsors_logo = baker.make_recipe('sponsors.tests.finalized_sponsorship')
simple_sponsorship = baker.make_recipe('sponsors.tests.finalized_sponsorship')
download_logo_benefit = baker.make(SponsorBenefit, sponsorship=sponsorship_with_download_logo)
baker.make_recipe('sponsors.tests.logo_at_download_feature', sponsor_benefit=download_logo_benefit)
sponsors_logo_benefit = baker.make(SponsorBenefit, sponsorship=sponsorship_with_sponsors_logo)
baker.make_recipe('sponsors.tests.logo_at_sponsors_feature', sponsor_benefit=sponsors_logo_benefit)
regular_benefit = baker.make(SponsorBenefit, sponsorship=simple_sponsorship)
with self.assertNumQueries(1):
qs = list(Sponsorship.objects.with_logo_placement())
self.assertEqual(2, len(qs))
self.assertIn(sponsorship_with_download_logo, qs)
self.assertIn(sponsorship_with_sponsors_logo, qs)
with self.assertNumQueries(1):
kwargs = {
"logo_place": LogoPlacementChoices.DOWNLOAD_PAGE.value,
"publisher": PublisherChoices.FOUNDATION.value,
}
qs = list(Sponsorship.objects.with_logo_placement(**kwargs))
self.assertEqual(1, len(qs))
self.assertIn(sponsorship_with_download_logo, qs)
def test_filter_sponsorship_by_benefit_feature_type(self):
sponsorship_feature_1 = baker.make_recipe('sponsors.tests.finalized_sponsorship')
sponsorship_feature_2 = baker.make_recipe('sponsors.tests.finalized_sponsorship')
baker.make(LogoPlacement, sponsor_benefit__sponsorship=sponsorship_feature_1)
baker.make(TieredQuantity, sponsor_benefit__sponsorship=sponsorship_feature_2)
with self.assertNumQueries(1):
qs = list(Sponsorship.objects.includes_benefit_feature(LogoPlacement))
self.assertEqual(1, len(qs))
self.assertIn(sponsorship_feature_1, qs)
class BenefitFeatureQuerySet(TestCase):
def setUp(self):
self.sponsorship = baker.make(Sponsorship)
self.benefit = baker.make(SponsorBenefit, sponsorship=self.sponsorship)
def test_filter_benefits_from_sponsorship(self):
feature_1 = baker.make(TieredQuantity, sponsor_benefit=self.benefit)
feature_2 = baker.make(LogoPlacement, sponsor_benefit=self.benefit)
baker.make(LogoPlacement) # benefit from other sponsor benefit
qs = BenefitFeature.objects.from_sponsorship(self.sponsorship)
self.assertEqual(qs.count(), 2)
self.assertIn(feature_1, qs)
self.assertIn(feature_2, qs)
def test_filter_only_for_required_assets(self):
baker.make(TieredQuantity)
text_asset = baker.make(RequiredTextAsset)
img_asset = baker.make(RequiredImgAsset)
qs = BenefitFeature.objects.required_assets()
self.assertEqual(qs.count(), 2)
self.assertIn(text_asset, qs)
class SponsorshipBenefitManagerTests(TestCase):
def setUp(self):
package = baker.make(SponsorshipPackage)
self.regular_benefit = baker.make(SponsorshipBenefit)
self.regular_benefit.packages.add(package)
self.add_on = baker.make(SponsorshipBenefit)
self.a_la_carte = baker.make(SponsorshipBenefit, a_la_carte=True)
def test_add_ons_queryset(self):
qs = SponsorshipBenefit.objects.add_ons()
self.assertEqual(1, qs.count())
self.assertIn(self.add_on, qs)
def test_a_la_carte_queryset(self):
qs = SponsorshipBenefit.objects.a_la_carte()
self.assertEqual(1, qs.count())
self.assertIn(self.a_la_carte, qs)
| {
"content_hash": "51840cc4a7a02ea379e7a85e2221a8f7",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 119,
"avg_line_length": 40.31210191082803,
"alnum_prop": 0.6713540843735187,
"repo_name": "manhhomienbienthuy/pythondotorg",
"id": "d1f46555d32aa679f58c74f3334034a4138a2c31",
"size": "6329",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sponsors/tests/test_managers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7686"
},
{
"name": "HTML",
"bytes": "491673"
},
{
"name": "JavaScript",
"bytes": "20834"
},
{
"name": "PostScript",
"bytes": "19072"
},
{
"name": "Procfile",
"bytes": "105"
},
{
"name": "Python",
"bytes": "1075699"
},
{
"name": "Ruby",
"bytes": "1464"
},
{
"name": "SCSS",
"bytes": "197973"
}
],
"symlink_target": ""
} |
"""
This is a template for creating custom ColumnMapExpectations.
For detailed instructions on how to use it, please see:
https://docs.greatexpectations.io/docs/guides/expectations/creating_custom_expectations/how_to_create_custom_column_map_expectations
"""
import json
from typing import Optional
import blockcypher
import coinaddrvalidator
from great_expectations.core.expectation_configuration import ExpectationConfiguration
from great_expectations.exceptions import InvalidExpectationConfigurationError
from great_expectations.execution_engine import PandasExecutionEngine
from great_expectations.expectations.expectation import ColumnMapExpectation
from great_expectations.expectations.metrics import (
ColumnMapMetricProvider,
column_condition_partial,
)
def has_eth_address_positive_balance(addr: str) -> bool:
try:
res = coinaddrvalidator.validate("eth", addr).valid
if res == True:
balance = blockcypher.get_total_balance(addr, "eth")
if balance > 0:
return True
else:
return False
else:
return False
except Exception as e:
return False
# This class defines a Metric to support your Expectation.
# For most ColumnMapExpectations, the main business logic for calculation will live in this class.
class ColumnValuesEthAddressPositiveBalance(ColumnMapMetricProvider):
# This is the id string that will be used to reference your metric.
condition_metric_name = "column_values.valid_eth_address_positive_balance"
# This method implements the core logic for the PandasExecutionEngine
@column_condition_partial(engine=PandasExecutionEngine)
def _pandas(cls, column, **kwargs):
return column.apply(lambda x: has_eth_address_positive_balance(x))
# This method defines the business logic for evaluating your metric when using a SqlAlchemyExecutionEngine
# @column_condition_partial(engine=SqlAlchemyExecutionEngine)
# def _sqlalchemy(cls, column, _dialect, **kwargs):
# raise NotImplementedError
# This method defines the business logic for evaluating your metric when using a SparkDFExecutionEngine
# @column_condition_partial(engine=SparkDFExecutionEngine)
# def _spark(cls, column, **kwargs):
# raise NotImplementedError
# This class defines the Expectation itself
class ExpectColumnValuesEthAddressPositiveBalance(ColumnMapExpectation):
"""Expect column values Ethereum address has got positive balance (>0)"""
# These examples will be shown in the public gallery.
# They will also be executed as unit tests for your Expectation.
examples = [
{
"data": {
"all_valid": [
"0xab3b229eb4bcff881275e7ea2f0fd24eeac8c83a",
"0xd964debbf2de954f7b3d8773f78456fe2ecc6352",
"0xbdd3742fda748a5365b6a2701e8f3fbccd76bcb4",
"0xd5e978560ec23cd92f8f6059e78f5f21bc59c158",
],
"some_other": [
"1BoatSLRHtKNngkdXEeobR76b53LETtpyT",
"n2nzi7xDTrMVK9stGpbK3BtrpBCJfH7LRQ",
"3QJmV3qfvL9SuYo34YihAf3sRCW3qSinyC",
"bc1qxneu85dnhx33asv8da45x55qyeu44ek9h3vngxdsare",
],
},
"tests": [
{
"title": "basic_positive_test",
"exact_match_out": False,
"include_in_gallery": True,
"in": {"column": "all_valid"},
"out": {
"success": True,
},
},
{
"title": "basic_negative_test",
"exact_match_out": False,
"include_in_gallery": True,
"in": {"column": "some_other", "mostly": 1},
"out": {
"success": False,
},
},
],
}
]
# This is the id string of the Metric used by this Expectation.
# For most Expectations, it will be the same as the `condition_metric_name` defined in your Metric class above.
map_metric = "column_values.valid_eth_address_positive_balance"
# This is a list of parameter names that can affect whether the Expectation evaluates to True or False
success_keys = ("mostly",)
# This dictionary contains default values for any parameters that should have default values
default_kwarg_values = {}
def validate_configuration(
self, configuration: Optional[ExpectationConfiguration]
) -> None:
"""
Validates that a configuration has been set, and sets a configuration if it has yet to be set. Ensures that
necessary configuration arguments have been provided for the validation of the expectation.
Args:
configuration (OPTIONAL[ExpectationConfiguration]): \
An optional Expectation Configuration entry that will be used to configure the expectation
Returns:
None. Raises InvalidExpectationConfigurationError if the config is not validated successfully
"""
super().validate_configuration(configuration)
if configuration is None:
configuration = self.configuration
# # Check other things in configuration.kwargs and raise Exceptions if needed
# try:
# assert (
# ...
# ), "message"
# assert (
# ...
# ), "message"
# except AssertionError as e:
# raise InvalidExpectationConfigurationError(str(e))
return True
# This object contains metadata for display in the public Gallery
library_metadata = {
"maturity": "experimental",
"tags": [
"hackathon-22",
"experimental",
"typed-entities",
], # Tags for this Expectation in the Gallery
"contributors": [ # Github handles for all contributors to this Expectation.
"@szecsip", # Don't forget to add your github handle here!
],
"requirements": ["coinaddrvalidator", "blockcypher"],
}
if __name__ == "__main__":
ExpectColumnValuesEthAddressPositiveBalance().print_diagnostic_checklist()
| {
"content_hash": "9c25c4b6a9dbc72bee814b31e05800f0",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 136,
"avg_line_length": 39.39506172839506,
"alnum_prop": 0.6313068003760577,
"repo_name": "great-expectations/great_expectations",
"id": "6070ee72218183eca49d140d88cd6aff554efaef",
"size": "6382",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "contrib/great_expectations_semantic_types_expectations/great_expectations_semantic_types_expectations/expectations/expect_column_values_eth_address_positive_balance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "23771"
},
{
"name": "Dockerfile",
"bytes": "2388"
},
{
"name": "HTML",
"bytes": "27311"
},
{
"name": "JavaScript",
"bytes": "45960"
},
{
"name": "Jinja",
"bytes": "66650"
},
{
"name": "Jupyter Notebook",
"bytes": "816323"
},
{
"name": "Lua",
"bytes": "3489"
},
{
"name": "Makefile",
"bytes": "657"
},
{
"name": "Python",
"bytes": "15728777"
},
{
"name": "Shell",
"bytes": "2930"
}
],
"symlink_target": ""
} |
from jumpgate.common import error_handling
class ExtraSpecsFlavorV2(object):
def __init__(self, app, flavors):
self.app = app
self.flavors = flavors
def on_get(self, req, resp, tenant_id, flavor_id):
'''Returns the extra specs for a particular flavor
'''
for flavor in self.flavors:
if str(flavor_id) == flavor['id']:
extra_specs = flavor['extra_specs']
resp.status = 200
resp.body = {'extra_specs': extra_specs}
return
else:
error_handling.bad_request(resp, message="Invalid Flavor ID "
"requested.")
return
class ExtraSpecsFlavorKeyV2(object):
def __init__(self, app, flavors):
self.app = app
self.flavors = flavors
def on_get(self, req, resp, tenant_id, flavor_id, key_id):
'''Returns the requested key from the optional extra specs
'''
for flavor in self.flavors:
if str(flavor_id) == flavor['id']:
extra_specs = flavor['extra_specs']
if key_id in extra_specs:
resp.status = 200
resp.body = {key_id: extra_specs[key_id]}
return
else:
error_handling.bad_request(resp, message="Invalid Key ID "
"requested")
return
else:
error_handling.bad_request(resp, message="Invalid Flavor ID "
"requested.")
return
| {
"content_hash": "8809d8643e43fabe3a16a257a00d7a57",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 78,
"avg_line_length": 34.5625,
"alnum_prop": 0.488245931283906,
"repo_name": "softlayer/jumpgate",
"id": "4cb2660d5e4d86aed85a97837ca0ccd637b3408c",
"size": "1659",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "jumpgate/compute/drivers/sl/extra_specs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "341295"
},
{
"name": "Shell",
"bytes": "288"
}
],
"symlink_target": ""
} |
from salesking import api
from salesking.tests.base import SalesKingBaseTestCase
class SKApiTestCase(SalesKingBaseTestCase):
# def test_access_token_fail(self):
# clnt = api.APIClient()
# token = clnt.request_access_token("dummy")
# msg ="response:%s" % token
# self.assertTrue(token!=None, msg)
def test_basic_auth_raw_sk_client_get_success(self):
clnt = api.APIClient()
url ="%s/api/contacts" % clnt.base_url
response = clnt.request(url)
self.assertEquals(response.status_code,200)
msg ="response: %s" % response.text
self.assertEquals(response.text.find("error"),-1,msg)
| {
"content_hash": "0417ee94258395347cc6409d197ca582",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 61,
"avg_line_length": 34.9,
"alnum_prop": 0.6217765042979942,
"repo_name": "salesking/salesking_python_sdk",
"id": "c05475caca71bcf1c4b0bdeea5cf346997c1c310",
"size": "746",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salesking/tests/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "139100"
}
],
"symlink_target": ""
} |
"""Unit tests for oseoserver.utilities"""
import pytest
import mock
from mock import DEFAULT
from oseoserver import errors
from oseoserver.models import Order
from oseoserver import utilities
pytestmark = pytest.mark.unit
def test_get_generic_order_config_incorrect_order_type():
order_type = "fake"
with pytest.raises(AttributeError) as excinfo:
config = utilities.get_generic_order_config(order_type)
@pytest.mark.parametrize(["order_type", "fake_config"], [
(Order.PRODUCT_ORDER, "dummy product config"),
(Order.MASSIVE_ORDER, "dummy massive config"),
(Order.SUBSCRIPTION_ORDER, "dummy subscription config"),
(Order.TASKING_ORDER, "dummy tasking config"),
])
def test_get_generic_order_config(order_type, fake_config):
with mock.patch("oseoserver.utilities.settings",
autospec=True) as mock_settings:
setting_function = getattr(mock_settings,
"get_{}".format(order_type.value.lower()))
setting_function.return_value = fake_config
result = utilities.get_generic_order_config(order_type)
assert result == fake_config
def test_validate_processing_option_no_choices():
fake_option_name = "dummy name"
fake_parsed_value = "dummy value"
order_type = Order.PRODUCT_ORDER
with mock.patch.multiple("oseoserver.utilities",
get_order_configuration=DEFAULT,
get_generic_order_config=DEFAULT,
import_class=DEFAULT) as mock_util, \
mock.patch("oseoserver.settings.get_processing_options",
autospec=True) as mock_get_options:
mock_util["get_order_configuration"].return_value = {
"product_order": {"options": [fake_option_name]}
}
mock_util["get_generic_order_config"].return_value = {
"item_processor": "dummy"}
mock_util["import_class"].return_value.parse_option.return_value = (
fake_parsed_value)
mock_get_options.return_value = [{"name": fake_option_name}]
result = utilities.validate_processing_option(
fake_option_name, fake_parsed_value, order_type, "dummy")
assert result == fake_parsed_value
@pytest.mark.parametrize(["order_type", "expected_exception"], [
(Order.PRODUCT_ORDER, errors.ProductOrderingNotSupportedError),
(Order.MASSIVE_ORDER, errors.ProductOrderingNotSupportedError),
(Order.SUBSCRIPTION_ORDER, errors.SubscriptionNotSupportedError),
(Order.TASKING_ORDER, errors.FutureProductNotSupportedError)
])
def test_get_order_configuration_disabled(order_type, expected_exception):
"""The proper exceptions are raised when order types are disabled"""
fake_collection = "dummy collection"
fake_collection_config = {
"name": fake_collection,
order_type.value.lower(): {"enabled": False},
}
with mock.patch("oseoserver.utilities.settings",
autospec=True) as mock_settings, \
pytest.raises(expected_exception):
mock_settings.get_collections.return_value = [fake_collection_config]
utilities.get_order_configuration(order_type, fake_collection)
@pytest.mark.parametrize("order_type", [
Order.PRODUCT_ORDER,
Order.MASSIVE_ORDER,
Order.SUBSCRIPTION_ORDER,
Order.TASKING_ORDER,
])
def test_get_order_configuration_enabled(order_type):
fake_collection = "dummy collection"
fake_collection_config = {
"name": fake_collection,
order_type.value.lower(): {"enabled": True},
}
with mock.patch("oseoserver.utilities.settings",
autospec=True) as mock_settings:
mock_settings.get_collections.return_value = [fake_collection_config]
result = utilities.get_order_configuration(order_type, fake_collection)
assert result == fake_collection_config
def test_get_option_configuration_invalid_option():
fake_name = "fake_option"
with mock.patch("oseoserver.utilities.settings",
autospec=True) as mock_settings, \
pytest.raises(errors.OseoServerError):
mock_settings.get_processing_options.return_value = []
utilities.get_option_configuration(fake_name)
def test_get_option_configuration_valid_option():
fake_name = "fake_option"
fake_option_config = {"name": fake_name}
with mock.patch("oseoserver.utilities.settings",
autospec=True) as mock_settings:
mock_settings.get_processing_options.return_value = [
fake_option_config]
result = utilities.get_option_configuration(fake_name)
assert result == fake_option_config
def test_validate_collection_id_invalid_id():
fake_id = "fake collection id"
with mock.patch("oseoserver.utilities.settings",
autospec=True) as mock_settings, \
pytest.raises(errors.InvalidParameterValueError):
mock_settings.get_collections.return_value = []
utilities.validate_collection_id(fake_id)
def test_validate_collection_id_valid_id():
fake_id = "fake collection id"
fake_collection_config = {"collection_identifier": fake_id}
with mock.patch("oseoserver.utilities.settings",
autospec=True) as mock_settings:
mock_settings.get_collections.return_value = [fake_collection_config]
result = utilities.validate_collection_id(fake_id)
assert result == fake_collection_config
| {
"content_hash": "9a7e9ca4a4f10329b3be9302e432037f",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 79,
"avg_line_length": 40.96268656716418,
"alnum_prop": 0.6675168518855894,
"repo_name": "pyoseo/django-oseoserver",
"id": "e9323cf83859895afe5b46366be2ad83a57ab591",
"size": "5489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unittests/test_utilities.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5653"
},
{
"name": "Python",
"bytes": "282717"
},
{
"name": "Shell",
"bytes": "17370"
}
],
"symlink_target": ""
} |
class Proxy(object):
def __init__(self):
self.content = None
def __str__(self):
return "tip to you: %s", self.content
class ChildProxy(Proxy):
def __init__(self):
Proxy.__init__(self)
self.content = "resource show for child or adult"
class AdultProxy(Proxy):
def __init__(self):
Proxy.__init__(self)
self.content = "resource only show for adult"
class RouteProxy:
def __init__(self):
self.to_proxy = None
def choose_route(self, u_type):
if u_type == 'child':
self.to_proxy = ChildProxy()
elif u_type == 'adult':
self.to_proxy = AdultProxy()
return self.to_proxy
if __name__ == '__main__':
route = RouteProxy()
child_proxy = route.choose_route('child')
print child_proxy.__str__()
adult_proxy = route.choose_route('adult')
print adult_proxy.__str__()
| {
"content_hash": "c4361707220a7e8d322f4be3d469bfaa",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 57,
"avg_line_length": 23.894736842105264,
"alnum_prop": 0.5627753303964758,
"repo_name": "BEUTIFULSKIN/python-design",
"id": "9f705eb0379adb4aec7065f2f9387b79f38bdaf9",
"size": "1244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Proxy/yoke_proxy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "55941"
}
],
"symlink_target": ""
} |
import jinja2
import os
import webapp2
jinja_environment = jinja2.Environment(loader=
jinja2.FileSystemLoader(os.path.dirname(__file__)))
class MainHandler(webapp2.RequestHandler):
def get(self):
template = jinja_environment.get_template('templates/input_order.html')
self.response.write(template.render())
def post(self):
template = jinja_environment.get_template('templates/output_order.html')
pizza_order = {
'crust_answer': self.request.get('crust'),
'size_answer': self.request.get('size'),
'sauce_answer': self.request.get('sauce'),
'cheese_answer': self.request.get('cheese'),
'topings_answer': self.request.get('topings')}
self.response.write(template.render(pizza_order))
app = webapp2.WSGIApplication([
('/', MainHandler),
], debug=True)
| {
"content_hash": "3aaaa870b7c9aa9d2d0be1c1456e6df1",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 80,
"avg_line_length": 29.655172413793103,
"alnum_prop": 0.6593023255813953,
"repo_name": "luisibanez/cssi-appengine-templates-01",
"id": "656351fc23b158b3d2dec708de6191041c196853",
"size": "860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/example12/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "119356"
},
{
"name": "Python",
"bytes": "7170"
}
],
"symlink_target": ""
} |
"""
Celery queued tasks for Helios
2010-08-01
ben@adida.net
"""
from celery.decorators import task
from models import *
from view_utils import render_template_raw
import signals
import copy
from django.conf import settings
@task()
def cast_vote_verify_and_store(cast_vote_id, status_update_message=None, **kwargs):
cast_vote = CastVote.objects.get(id = cast_vote_id)
result = cast_vote.verify_and_store()
voter = cast_vote.voter
election = voter.election
user = voter.get_user()
if result:
# send the signal
signals.vote_cast.send(sender=election, election=election, user=user, voter=voter, cast_vote=cast_vote)
if status_update_message and user.can_update_status():
from views import get_election_url
user.update_status(status_update_message)
else:
logger = cast_vote_verify_and_store.get_logger(**kwargs)
logger.error("Failed to verify and store %d" % cast_vote_id)
@task()
def voters_email(election_id, subject_template, body_template, extra_vars={},
voter_constraints_include=None, voter_constraints_exclude=None):
"""
voter_constraints_include are conditions on including voters
voter_constraints_exclude are conditions on excluding voters
"""
election = Election.objects.get(id = election_id)
# select the right list of voters
voters = election.voter_set.all()
if voter_constraints_include:
voters = voters.filter(**voter_constraints_include)
if voter_constraints_exclude:
voters = voters.exclude(**voter_constraints_exclude)
for voter in voters:
single_voter_email.delay(voter.uuid, subject_template, body_template, extra_vars)
@task()
def voters_notify(election_id, notification_template, extra_vars={}):
election = Election.objects.get(id = election_id)
for voter in election.voter_set.all():
single_voter_notify.delay(voter.uuid, notification_template, extra_vars)
@task()
def single_voter_email(voter_uuid, subject_template, body_template, extra_vars={}):
voter = Voter.objects.get(uuid = voter_uuid)
the_vars = copy.copy(extra_vars)
the_vars.update({'voter' : voter})
subject = render_template_raw(None, subject_template, the_vars)
body = render_template_raw(None, body_template, the_vars)
voter.send_message(subject, body)
@task()
def single_voter_notify(voter_uuid, notification_template, extra_vars={}):
voter = Voter.objects.get(uuid = voter_uuid)
the_vars = copy.copy(extra_vars)
the_vars.update({'voter' : voter})
notification = render_template_raw(None, notification_template, the_vars)
voter.send_notification(notification)
@task()
def election_compute_tally(election_id):
election = Election.objects.get(id = election_id)
election.compute_tally()
election_notify_admin.delay(election_id = election_id,
subject = "encrypted tally computed",
body = """
The encrypted tally for election %s has been computed.
--
Helios
""" % election.name)
if election.has_helios_trustee():
tally_helios_decrypt.delay(election_id = election.id)
@task()
def tally_helios_decrypt(election_id):
election = Election.objects.get(id = election_id)
election.helios_trustee_decrypt()
election_notify_admin.delay(election_id = election_id,
subject = 'Helios Decrypt',
body = """
Helios has decrypted its portion of the tally
for election %s.
--
Helios
""" % election.name)
@task()
def voter_file_process(voter_file_id):
voter_file = VoterFile.objects.get(id = voter_file_id)
voter_file.process()
election_notify_admin.delay(election_id = voter_file.election.id,
subject = 'voter file processed',
body = """
Your voter file upload for election %s
has been processed.
%s voters have been created.
--
Helios
""" % (voter_file.election.name, voter_file.num_voters))
@task()
def election_notify_admin(election_id, subject, body):
election = Election.objects.get(id = election_id)
election.admin.send_message(subject, body)
| {
"content_hash": "d0eb284c719a491a7033de0c1382723b",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 111,
"avg_line_length": 31.240875912408757,
"alnum_prop": 0.6619158878504673,
"repo_name": "pirati-cz/helios-server",
"id": "5f7bba8730120e732a48f7467314e05dcfd7b639",
"size": "4280",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "helios/tasks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "15450"
},
{
"name": "Dockerfile",
"bytes": "607"
},
{
"name": "HTML",
"bytes": "163024"
},
{
"name": "Java",
"bytes": "2271"
},
{
"name": "JavaScript",
"bytes": "307727"
},
{
"name": "Python",
"bytes": "706949"
},
{
"name": "Shell",
"bytes": "602"
}
],
"symlink_target": ""
} |
from jira import JIRA
import sys
import argparse
import common
class JIRAOperator(object):
def __init__(self, server, username, password):
# Initial jira
# set verify as false to get rid of requests.exceptions.SSLError: [Errno 1] _ssl.c:507: error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed
options = {'server': server, 'verify': False}
self.__jira = JIRA(options, basic_auth=(username, password))
def search_issues(self, sql_str):
issues = self.__jira.search_issues(sql_str)
return issues
def search_open_bugs_by_priority(self, project, priority, end_status="done"):
JQL_str="project in ({0}) and issuetype = Bug and status not in ({1}) and priority = {2}".format(project, end_status, priority)
return self.search_issues(JQL_str)
def parse_command_line(args):
"""
Parse script arguments.
:return: Parsed args for assignment
"""
parser = argparse.ArgumentParser()
parser.add_argument("--jira_server",
required=True,
help="The server url of jira",
action="store")
parser.add_argument("--username",
required=True,
help="the username of jira",
action="store")
parser.add_argument("--password",
required=True,
help="the password of jira",
action="store")
parser.add_argument('--parameters-file',
help="The jenkins parameter file that will used for succeeding Jenkins job",
action='store',
default="downstream_parameters")
parsed_args = parser.parse_args(args)
return parsed_args
def main():
# parse arguments
args = parse_command_line(sys.argv[1:])
jira_operator = JIRAOperator(args.jira_server,args.username,args.password)
report = {}
p1_bugs = jira_operator.search_open_bugs_by_priority("RACKHD", "P1")
report["P1_ISSUES_COUNT"] = len(p1_bugs)
# Create a java properties file to pass down parameters to downstream pipeline steps
common.write_parameters(args.parameters_file, report)
if __name__ == "__main__":
main()
sys.exit(0)
| {
"content_hash": "63cdb8c3db5eaae56502133f7dcd6989",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 177,
"avg_line_length": 38.75,
"alnum_prop": 0.5987096774193549,
"repo_name": "srinia6/on-build-config",
"id": "8a7f2febbc24d15d5849d5875aa96c9bf92baf01",
"size": "2379",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "build-release-tools/application/jira_epic_report.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "64442"
},
{
"name": "Python",
"bytes": "258395"
},
{
"name": "Ruby",
"bytes": "6853"
},
{
"name": "Shell",
"bytes": "106975"
},
{
"name": "XSLT",
"bytes": "1843"
}
],
"symlink_target": ""
} |
import unittest
import arbor as A
from .. import fixtures
"""
tests for cable probe wrappers
"""
# Test recipe cc comprises one simple cable cell and mechanisms on it
# sufficient to test cable cell probe wrappers wrap correctly.
class cc_recipe(A.recipe):
def __init__(self):
A.recipe.__init__(self)
st = A.segment_tree()
st.append(A.mnpos, (0, 0, 0, 10), (1, 0, 0, 10), 1)
dec = A.decor()
dec.place('(location 0 0.08)', A.synapse("expsyn"), "syn0")
dec.place('(location 0 0.09)', A.synapse("exp2syn"), "syn1")
dec.place('(location 0 0.1)', A.iclamp(20.), "iclamp")
dec.paint('(all)', A.density("hh"))
self.cell = A.cable_cell(st, A.label_dict(), dec)
self.props = A.neuron_cable_properties()
self.props.catalogue = A.default_catalogue()
def num_cells(self):
return 1
def cell_kind(self, gid):
return A.cell_kind.cable
def global_properties(self, kind):
return self.props
def probes(self, gid):
# Use keyword arguments to check that the wrappers have actually declared keyword arguments correctly.
# Place single-location probes at (location 0 0.01*j) where j is the index of the probe address in
# the returned list.
return [
# probe id (0, 0)
A.cable_probe_membrane_voltage(where='(location 0 0.00)'),
# probe id (0, 1)
A.cable_probe_membrane_voltage_cell(),
# probe id (0, 2)
A.cable_probe_axial_current(where='(location 0 0.02)'),
# probe id (0, 3)
A.cable_probe_total_ion_current_density(where='(location 0 0.03)'),
# probe id (0, 4)
A.cable_probe_total_ion_current_cell(),
# probe id (0, 5)
A.cable_probe_total_current_cell(),
# probe id (0, 6)
A.cable_probe_density_state(where='(location 0 0.06)', mechanism='hh', state='m'),
# probe id (0, 7)
A.cable_probe_density_state_cell(mechanism='hh', state='n'),
# probe id (0, 8)
A.cable_probe_point_state(target=0, mechanism='expsyn', state='g'),
# probe id (0, 9)
A.cable_probe_point_state_cell(mechanism='exp2syn', state='B'),
# probe id (0, 10)
A.cable_probe_ion_current_density(where='(location 0 0.10)', ion='na'),
# probe id (0, 11)
A.cable_probe_ion_current_cell(ion='na'),
# probe id (0, 12)
A.cable_probe_ion_int_concentration(where='(location 0 0.12)', ion='na'),
# probe id (0, 13)
A.cable_probe_ion_int_concentration_cell(ion='na'),
# probe id (0, 14)
A.cable_probe_ion_ext_concentration(where='(location 0 0.14)', ion='na'),
# probe id (0, 15)
A.cable_probe_ion_ext_concentration_cell(ion='na'),
# probe id (0, 15)
A.cable_probe_stimulus_current_cell()
]
def cell_description(self, gid):
return self.cell
class TestCableProbes(unittest.TestCase):
def test_probe_addr_metadata(self):
recipe = cc_recipe()
context = A.context()
dd = A.partition_load_balance(recipe, context)
sim = A.simulation(recipe, dd, context)
all_cv_cables = [A.cable(0, 0, 1)]
m = sim.probe_metadata((0, 0))
self.assertEqual(1, len(m))
self.assertEqual(A.location(0, 0.0), m[0])
m = sim.probe_metadata((0, 1))
self.assertEqual(1, len(m))
self.assertEqual(all_cv_cables, m[0])
m = sim.probe_metadata((0, 2))
self.assertEqual(1, len(m))
self.assertEqual(A.location(0, 0.02), m[0])
m = sim.probe_metadata((0, 3))
self.assertEqual(1, len(m))
self.assertEqual(A.location(0, 0.03), m[0])
m = sim.probe_metadata((0, 4))
self.assertEqual(1, len(m))
self.assertEqual(all_cv_cables, m[0])
m = sim.probe_metadata((0, 5))
self.assertEqual(1, len(m))
self.assertEqual(all_cv_cables, m[0])
m = sim.probe_metadata((0, 6))
self.assertEqual(1, len(m))
self.assertEqual(A.location(0, 0.06), m[0])
m = sim.probe_metadata((0, 7))
self.assertEqual(1, len(m))
self.assertEqual(all_cv_cables, m[0])
m = sim.probe_metadata((0, 8))
self.assertEqual(1, len(m))
self.assertEqual(A.location(0, 0.08), m[0].location)
self.assertEqual(1, m[0].multiplicity)
self.assertEqual(0, m[0].target)
m = sim.probe_metadata((0, 9))
self.assertEqual(1, len(m))
self.assertEqual(1, len(m[0]))
self.assertEqual(A.location(0, 0.09), m[0][0].location)
self.assertEqual(1, m[0][0].multiplicity)
self.assertEqual(1, m[0][0].target)
m = sim.probe_metadata((0, 10))
self.assertEqual(1, len(m))
self.assertEqual(A.location(0, 0.10), m[0])
m = sim.probe_metadata((0, 11))
self.assertEqual(1, len(m))
self.assertEqual(all_cv_cables, m[0])
m = sim.probe_metadata((0, 12))
self.assertEqual(1, len(m))
self.assertEqual(A.location(0, 0.12), m[0])
m = sim.probe_metadata((0, 13))
self.assertEqual(1, len(m))
self.assertEqual(all_cv_cables, m[0])
m = sim.probe_metadata((0, 14))
self.assertEqual(1, len(m))
self.assertEqual(A.location(0, 0.14), m[0])
m = sim.probe_metadata((0, 15))
self.assertEqual(1, len(m))
self.assertEqual(all_cv_cables, m[0])
m = sim.probe_metadata((0, 16))
self.assertEqual(1, len(m))
self.assertEqual(all_cv_cables, m[0])
| {
"content_hash": "f24fb2215bc072870beb1e77ccda6c02",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 110,
"avg_line_length": 35.214723926380366,
"alnum_prop": 0.5576655052264808,
"repo_name": "halfflat/nestmc-proto",
"id": "c9f0d15a197e53e70db74f2c210b8ae54c370066",
"size": "5765",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/test/unit/test_cable_probes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AMPL",
"bytes": "9796"
},
{
"name": "C++",
"bytes": "3223191"
},
{
"name": "CMake",
"bytes": "69102"
},
{
"name": "Cuda",
"bytes": "70752"
},
{
"name": "Julia",
"bytes": "15582"
},
{
"name": "Makefile",
"bytes": "577"
},
{
"name": "Python",
"bytes": "39436"
},
{
"name": "Shell",
"bytes": "2582"
}
],
"symlink_target": ""
} |
"""Tests for tensorflow.ops.nn_ops.Pad."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker_v2
from tensorflow.python.platform import test
class PadOpTest(test.TestCase):
def _npPad(self, inp, paddings, mode, constant_values=0):
mode = mode.lower()
if mode == "constant":
return np.pad(inp, paddings, mode=mode, constant_values=constant_values)
else:
return np.pad(inp, paddings, mode=mode)
def testNpPad(self):
self.assertAllEqual(
np.array([[0, 0, 0, 0, 0, 0],
[0, 3, 3, 0, 0, 0],
[0, 4, 4, 0, 0, 0],
[0, 5, 5, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]]),
self._npPad(
np.array([[3, 3], [4, 4], [5, 5]]),
[[1, 2], [1, 3]],
mode="constant"))
self.assertAllEqual(
np.array([[1, 1, 1, 1, 1, 1],
[1, 3, 3, 1, 1, 1],
[1, 4, 4, 1, 1, 1],
[1, 5, 5, 1, 1, 1],
[1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1]]),
self._npPad(
np.array([[3, 3], [4, 4], [5, 5]]),
[[1, 2], [1, 3]],
mode="constant", constant_values=1))
self.assertAllEqual(
np.array([[4, 3, 4, 9, 4, 3],
[1, 0, 1, 2, 1, 0],
[4, 3, 4, 9, 4, 3],
[1, 0, 1, 2, 1, 0]]),
self._npPad(
np.array([[0, 1, 2], [3, 4, 9]]),
[[1, 1], [1, 2]],
mode="reflect"))
self.assertAllEqual(
np.array([[0, 0, 1, 2, 2, 1],
[0, 0, 1, 2, 2, 1],
[3, 3, 4, 9, 9, 4],
[3, 3, 4, 9, 9, 4]]),
self._npPad(
np.array([[0, 1, 2], [3, 4, 9]]),
[[1, 1], [1, 2]],
mode="symmetric"))
def _testPad(self, np_inputs, paddings, mode, constant_values):
np_val = self._npPad(np_inputs, paddings, mode=mode,
constant_values=constant_values)
with test_util.use_gpu():
tf_val = array_ops.pad(np_inputs, paddings, mode=mode,
constant_values=constant_values)
out = self.evaluate(tf_val)
self.assertAllEqual(np_val, out)
self.assertShapeEqual(np_val, tf_val)
def _testGradient(self,
x,
a,
mode,
constant_values,
paddings_dtype=dtypes.int32):
def pad(x):
return array_ops.pad(
x,
ops.convert_to_tensor(a, paddings_dtype),
mode=mode,
constant_values=constant_values)
with self.cached_session():
jacob_t, jacob_n = gradient_checker_v2.compute_gradient(pad, [x])
self.assertAllClose(jacob_t, jacob_n, rtol=1e-5, atol=1e-5)
def _testAll(self, np_inputs, paddings, constant_values):
for mode in ("CONSTANT", "REFLECT", "SYMMETRIC", "reflect", "symmetric",
"constant"):
# Zero-sized input is not allowed for REFLECT mode, but we still want
# zero-sized input test cases for the other modes.
if np_inputs.size or mode.upper() != "REFLECT":
self._testPad(np_inputs, paddings, mode=mode,
constant_values=constant_values)
if np_inputs.dtype == np.float32:
self._testGradient(np_inputs, paddings, mode=mode,
constant_values=constant_values)
def testInputDims(self):
with test_util.use_gpu():
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"Shape must be rank 1 but is rank 6|"
"paddings must be the rank of inputs"):
array_ops.pad(array_ops.reshape(
[1, 2], shape=[1, 2, 1, 1, 1, 1]),
array_ops.reshape(
[1, 2], shape=[1, 2]))
def testPaddingsDim(self):
with test_util.use_gpu():
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"Shape must be rank 2 but is rank 1|"
"paddings must be a matrix with 2 columns"):
array_ops.pad(array_ops.reshape(
[1, 2], shape=[1, 2]),
array_ops.reshape(
[1, 2], shape=[2]))
def testPaddingsDim2(self):
with test_util.use_gpu():
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"Dimension must be 2 but is 1|"
"paddings must be a matrix with 2 columns"):
array_ops.pad(array_ops.reshape(
[1, 2], shape=[1, 2]),
array_ops.reshape(
[1, 2], shape=[2, 1]))
def testPaddingsDim3(self):
with test_util.use_gpu():
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"Shape must be rank 1 but is rank 2|"
"paddings must be the rank of inputs"):
array_ops.pad(array_ops.reshape(
[1, 2], shape=[1, 2]),
array_ops.reshape(
[1, 2], shape=[1, 2]))
def testPaddingsDim4(self):
with test_util.use_gpu():
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"Shape must be rank 3 but is rank 2|"
"paddings must be the rank of inputs"):
array_ops.pad(array_ops.reshape(
[1, 2], shape=[1, 2]),
array_ops.reshape(
[1, 2, 3, 4, 5, 6], shape=[3, 2]))
def testPaddingsNonNegative(self):
with test_util.use_gpu():
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"must be non-negative"):
array_ops.pad(constant_op.constant(
[1], shape=[1]),
constant_op.constant(
[-1, 0], shape=[1, 2]))
def testPaddingsNonNegative2(self):
with test_util.use_gpu():
with self.assertRaisesRegex((ValueError, errors.InvalidArgumentError),
"must be non-negative"):
array_ops.pad(constant_op.constant(
[1], shape=[1]),
constant_op.constant(
[-1, 0], shape=[1, 2]))
def testPaddingsMaximum(self):
with test_util.use_gpu():
with self.assertRaises(Exception):
array_ops.pad(constant_op.constant(
[1], shape=[2]),
constant_op.constant(
[2, 0], shape=[1, 2]),
mode="REFLECT").eval()
with self.assertRaises(Exception):
array_ops.pad(constant_op.constant(
[1], shape=[2]),
constant_op.constant(
[0, 3], shape=[1, 2]),
mode="SYMMETRIC").eval()
def testInvalid(self):
with self.cached_session():
x = [[1, 2, 3], [4, 5, 6]]
with self.assertRaisesRegex(ValueError, "Unknown padding mode"):
self.evaluate(array_ops.pad(x, [[1, 0], [2, 1]], mode="weird"))
def testPaddingTypes(self):
paddings = [[1, 0], [2, 0]]
inputs = np.random.rand(2, 5).astype(np.float32)
for mode in ("CONSTANT", "REFLECT", "SYMMETRIC", "reflect", "symmetric",
"constant"):
for paddings_dtype in [dtypes.int32, dtypes.int64]:
np_val = self._npPad(inputs,
paddings,
mode=mode,
constant_values=0)
with test_util.use_gpu():
tf_val = array_ops.pad(
inputs,
constant_op.constant(paddings, paddings_dtype),
mode=mode,
constant_values=0)
out = self.evaluate(tf_val)
self.assertAllEqual(np_val, out)
self.assertShapeEqual(np_val, tf_val)
if mode.upper() != "REFLECT":
with ops.Graph().as_default():
self._testGradient(
inputs,
paddings,
mode=mode,
constant_values=0,
paddings_dtype=paddings_dtype)
def testIntTypes(self):
# TODO(touts): Figure out why the padding tests do not work on GPU
# for int types and rank > 2.
for t in [np.int8, np.uint8, np.int32, np.int64]:
self._testAll(
np.random.randint(-100, 100, (4, 4, 3)).astype(t),
[[1, 0], [2, 3], [0, 2]], 0)
self._testAll(
np.random.randint(-100, 100, (4, 2, 1, 3)).astype(t),
[[0, 0], [0, 0], [0, 0], [0, 0]], -123)
def testFloatTypes(self):
for t in [np.float32, np.float64]:
self._testAll(np.random.rand(2, 5).astype(t), [[1, 0], [2, 0]], 0.0)
self._testAll(np.random.rand(2, 3, 4).astype(t),
[[0, 0], [0, 0], [0, 0]], -1234.0)
self._testAll(np.random.rand(0, 3, 4).astype(t),
[[0, 0], [2, 1], [2, 3]], 0.0)
def testComplexTypes(self):
for t in [np.complex64, np.complex128]:
x = np.random.rand(2, 5).astype(t)
self._testAll(x + 1j * x, [[1, 0], [2, 0]], 1234.0 - 1234.0j)
x = np.random.rand(3, 2, 1, 1).astype(t)
self._testAll(x + 1j * x, [[0, 0], [0, 0], [0, 0], [0, 0]], 0 + 0j)
def testString(self):
# Numpy does not support padding strings so we compare padding manually.
x = ops.convert_to_tensor([["Hello", "World"],
["Goodnight", "Moon"]])
constant = array_ops.pad(x, [[1, 0], [0, 1]], mode="CONSTANT",
constant_values="PAD")
reflect = array_ops.pad(x, [[1, 0], [0, 1]], mode="REFLECT",
constant_values="PAD")
symmetric = array_ops.pad(x, [[1, 0], [0, 1]], mode="SYMMETRIC",
constant_values="PAD")
with test_util.use_gpu():
self.assertAllEqual(
[[b"PAD", b"PAD", b"PAD"], [b"Hello", b"World", b"PAD"],
[b"Goodnight", b"Moon", b"PAD"]], self.evaluate(constant))
self.assertAllEqual([[b"Goodnight", b"Moon", b"Goodnight"],
[b"Hello", b"World", b"Hello"],
[b"Goodnight", b"Moon", b"Goodnight"]],
self.evaluate(reflect))
self.assertAllEqual(
[[b"Hello", b"World", b"World"], [b"Hello", b"World", b"World"],
[b"Goodnight", b"Moon", b"Moon"]], self.evaluate(symmetric))
def testShapeFunctionEdgeCases(self):
# Shape function requires placeholders and a graph
with ops.Graph().as_default():
# Unknown paddings shape.
inp = constant_op.constant(0.0, shape=[4, 4, 4, 4])
padded = array_ops.pad(inp, array_ops.placeholder(dtypes.int32))
self.assertEqual([None, None, None, None], padded.get_shape().as_list())
# Unknown input shape.
inp = array_ops.placeholder(dtypes.float32)
padded = array_ops.pad(inp, [[2, 2], [2, 2]])
self.assertEqual([None, None], padded.get_shape().as_list())
# Unknown input and paddings shape.
inp = array_ops.placeholder(dtypes.float32)
padded = array_ops.pad(inp, array_ops.placeholder(dtypes.int32))
self.assertAllEqual(None, padded.get_shape().ndims)
def testPartialShapeInformation(self):
# Partial shapes requires placeholders and a graph
with ops.Graph().as_default():
unknown = array_ops.placeholder(dtypes.int32)
# Known input shape, partial unknown padding (one dimension).
inp = constant_op.constant(0.0, shape=[4, 4])
padded = array_ops.pad(inp, [[1, 2], unknown])
self.assertEqual([7, None], padded.get_shape().as_list())
# Known input shape, partial unknown padding (begin).
inp = constant_op.constant(0.0, shape=[4, 4])
padded = array_ops.pad(inp, [[unknown, 0], [1, 2]])
self.assertEqual([None, 7], padded.get_shape().as_list())
# Known input shape, partial unknown padding (end).
inp = constant_op.constant(0.0, shape=[4, 4])
padded = array_ops.pad(inp, [[1, 2], [0, unknown]])
self.assertEqual([7, None], padded.get_shape().as_list())
# Unknown input shape, partial unknown padding (one dimension).
padded = array_ops.pad(unknown, [[1, 2], unknown])
self.assertEqual([None, None], padded.get_shape().as_list())
# Unknown input shape (rank known), partial unknown padding (one dim).
rank_known = array_ops.placeholder(dtypes.int32)
rank_known.set_shape([None, None])
padded = array_ops.pad(rank_known, [[1, 2], unknown])
self.assertEqual([None, None], padded.get_shape().as_list())
# Known input shape, partial unknown padding (begin), with constant begin.
inp = constant_op.constant(0.0, shape=[4, 4])
padded = array_ops.pad(
inp, [[constant_op.constant(1, shape=[]), 2], [0, unknown]])
self.assertEqual([7, None], padded.get_shape().as_list())
# Known input shape, partial unknown padding (begin), with constant dim.
inp = constant_op.constant(0.0, shape=[4, 4])
padded = array_ops.pad(inp,
[constant_op.constant(1, shape=[2]), [0, unknown]])
self.assertEqual([6, None], padded.get_shape().as_list())
# Zero padding on a known dimension.
inp = array_ops.placeholder(dtypes.int32, [None, None, 20])
padded = array_ops.pad(inp, [[0, 0], [0, unknown], [0, 0]])
self.assertEqual([None, None, 20], padded.get_shape().as_list())
def testScalars(self):
paddings = np.zeros((0, 2), dtype=np.int32)
inp = np.asarray(7)
with test_util.use_gpu():
tf_val = array_ops.pad(inp, paddings)
out = self.evaluate(tf_val)
self.assertAllEqual(inp, out)
self.assertShapeEqual(inp, tf_val)
def testPadTypes(self):
for dtype in [dtypes.int32, dtypes.int64]:
paddings = np.zeros((0, 2))
inp = np.asarray(7)
with self.cached_session(use_gpu=True):
tf_val = array_ops.pad(inp, constant_op.constant(paddings, dtype=dtype))
out = self.evaluate(tf_val)
self.assertAllEqual(inp, out)
self.assertShapeEqual(inp, tf_val)
def testCollapseAdjacentNonPaddedDimensions(self):
# pyformat: disable
paddings_values = [[[0, 0], [0, 0], [0, 0], [0, 1]],
[[0, 0], [2, 3], [0, 0], [0, 0]],
[[0, 0], [0, 0], [0, 0], [0, 0]]]
# pyformat: enable
for paddings_value in paddings_values:
for dtype in [dtypes.float32, dtypes.int32]:
inp = constant_op.constant(1, shape=[8, 28, 28, 3], dtype=dtype)
paddings = constant_op.constant(paddings_value, dtype=dtypes.int32)
padded = array_ops.pad(inp, paddings)
middle = array_ops.slice(padded, [row[0] for row in paddings_value],
[dim.value for dim in inp.shape.dims])
left = array_ops.slice(padded, [0, 0, 0, 0],
[row[0] for row in paddings_value])
right = array_ops.slice(
padded,
[paddings_value[i][0] + inp.shape.dims[i].value for i in range(4)],
[-1, -1, -1, -1])
with self.cached_session(use_gpu=True):
self.assertAllEqual(inp, self.evaluate(middle))
self.assertAllEqual(
np.zeros([row[0] for row in paddings_value]), self.evaluate(left))
self.assertAllEqual(
np.zeros([row[1] for row in paddings_value]),
self.evaluate(right))
if __name__ == "__main__":
test.main()
| {
"content_hash": "aed7ef349714db7e007a2b6547d8aa22",
"timestamp": "",
"source": "github",
"line_count": 396,
"max_line_length": 80,
"avg_line_length": 40.64141414141414,
"alnum_prop": 0.5300733192494097,
"repo_name": "aldian/tensorflow",
"id": "30abf9a758cfe7f9929aba7a139c618e6c547ea3",
"size": "16783",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tensorflow/python/kernel_tests/pad_op_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8458"
},
{
"name": "C",
"bytes": "201402"
},
{
"name": "C++",
"bytes": "29667924"
},
{
"name": "CMake",
"bytes": "647100"
},
{
"name": "Go",
"bytes": "976514"
},
{
"name": "Java",
"bytes": "412117"
},
{
"name": "Jupyter Notebook",
"bytes": "1833675"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "38128"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Perl",
"bytes": "6715"
},
{
"name": "Protocol Buffer",
"bytes": "275733"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "26424665"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "373109"
}
],
"symlink_target": ""
} |
import time
import unittest
import node
COMMISSIONER = 1
JOINER_ROUTER = 2
JOINER = 3
class Cert_8_2_02_JoinerRouter(unittest.TestCase):
def setUp(self):
self.nodes = {}
for i in range(1,4):
self.nodes[i] = node.Node(i)
self.nodes[COMMISSIONER].set_panid(0xface)
self.nodes[COMMISSIONER].set_mode('rsdn')
self.nodes[COMMISSIONER].set_masterkey('deadbeefdeadbeefdeadbeefdeadbeef')
self.nodes[COMMISSIONER].enable_whitelist()
self.nodes[COMMISSIONER].set_router_selection_jitter(1)
self.nodes[JOINER_ROUTER].set_mode('rsdn')
self.nodes[JOINER_ROUTER].set_masterkey('00112233445566778899aabbccddeeff')
self.nodes[JOINER_ROUTER].enable_whitelist()
self.nodes[JOINER_ROUTER].set_router_selection_jitter(1)
self.nodes[JOINER].set_mode('rsdn')
self.nodes[JOINER].set_masterkey('00112233445566778899aabbccddeeff')
self.nodes[JOINER].enable_whitelist()
self.nodes[JOINER].set_router_selection_jitter(1)
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
def test(self):
self.nodes[COMMISSIONER].interface_up()
self.nodes[COMMISSIONER].thread_start()
time.sleep(5)
self.assertEqual(self.nodes[COMMISSIONER].get_state(), 'leader')
self.nodes[COMMISSIONER].commissioner_start()
time.sleep(5)
self.nodes[COMMISSIONER].commissioner_add_joiner(self.nodes[JOINER_ROUTER].get_hashmacaddr(), 'openthread')
self.nodes[COMMISSIONER].commissioner_add_joiner(self.nodes[JOINER].get_hashmacaddr(), 'openthread2')
time.sleep(5)
self.nodes[COMMISSIONER].add_whitelist(self.nodes[JOINER_ROUTER].get_hashmacaddr())
self.nodes[JOINER_ROUTER].add_whitelist(self.nodes[COMMISSIONER].get_addr64())
self.nodes[JOINER_ROUTER].interface_up()
self.nodes[JOINER_ROUTER].joiner_start('openthread')
time.sleep(10)
self.assertEqual(self.nodes[JOINER_ROUTER].get_masterkey(), self.nodes[COMMISSIONER].get_masterkey())
self.nodes[COMMISSIONER].add_whitelist(self.nodes[JOINER_ROUTER].get_addr64())
self.nodes[JOINER_ROUTER].thread_start()
time.sleep(5)
self.assertEqual(self.nodes[JOINER_ROUTER].get_state(), 'router')
self.nodes[JOINER_ROUTER].add_whitelist(self.nodes[JOINER].get_hashmacaddr())
self.nodes[JOINER].add_whitelist(self.nodes[JOINER_ROUTER].get_addr64())
self.nodes[JOINER].interface_up()
self.nodes[JOINER].joiner_start('2daerhtnepo')
time.sleep(10)
self.assertNotEqual(self.nodes[JOINER].get_masterkey(), self.nodes[COMMISSIONER].get_masterkey())
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "142250ba377493008f614156c32ae68c",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 115,
"avg_line_length": 38.791666666666664,
"alnum_prop": 0.6727533118510562,
"repo_name": "aeliot/openthread",
"id": "2a00c5db4fd93224fb591b74a1e5518e3c4ff39b",
"size": "4397",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/scripts/thread-cert/Cert_8_2_02_JoinerRouter.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "10128"
},
{
"name": "C",
"bytes": "566693"
},
{
"name": "C#",
"bytes": "18077"
},
{
"name": "C++",
"bytes": "3275555"
},
{
"name": "M4",
"bytes": "46356"
},
{
"name": "Makefile",
"bytes": "81017"
},
{
"name": "Python",
"bytes": "1036132"
},
{
"name": "Ruby",
"bytes": "3397"
},
{
"name": "Shell",
"bytes": "23039"
}
],
"symlink_target": ""
} |
from bottle import Bottle, request, response, abort
import json
import logging
import os
from audit import init_audit
from controller import Controller
from utils import Encoder
log = logging.getLogger("sphere11.api")
logging.basicConfig(level=logging.INFO)
config = json.load(open(os.environ.get('SPHERE11_CONFIG', 'config.json')))
controller = Controller(config)
audit = init_audit(config.get('log-group', 'sphere11'))
app = Bottle()
@app.route("/")
def index():
return {"name": "sphere11", "version": "1.0"}
@app.route("/<account_id>/locks", method="GET")
@audit
def account_status(account_id):
result = controller.db.iter_resources(account_id)
response.content_type = "application/json"
return json.dumps(result, indent=2, cls=Encoder)
@app.route("/<account_id>/locks/<resource_id>/lock", method="POST")
@audit
def lock(account_id, resource_id):
request_data = request.json
for rp in ('region',):
if not request_data or rp not in request_data:
abort(400, "Missing required parameter %s" % rp)
return controller.lock(account_id, resource_id, request_data['region'])
@app.route("/<account_id>/locks/<resource_id>", method="GET")
def info(account_id, resource_id):
request_data = request.query
if resource_id.startswith('sg-') and 'parent_id' not in request_data:
abort(400, "Missing required parameter parent_id")
result = controller.info(
account_id, resource_id, request_data.get('parent_id', resource_id))
response.content_type = "application/json"
return json.dumps(result, indent=2, cls=Encoder)
# this set to post to restrict permissions, perhaps another url space.
@app.route("/<account_id>/locks/delta", method="POST")
@audit
def delta(account_id):
request_data = request.json
for rp in ('region',):
if not request_data or rp not in request_data:
abort(400, "Missing required parameter %s" % rp)
result = controller.get_account_delta(
account_id, request_data['region'], api_url())
response.content_type = "application/json"
return json.dumps(result, indent=2, cls=Encoder)
@app.route("/<account_id>/locks/<resource_id>/unlock", method="POST")
@audit
def unlock(account_id, resource_id):
return controller.unlock(account_id, resource_id)
def on_timer(event):
return controller.process_pending()
def on_config_message(records):
for r in records:
json.loads(r['Sns'].get('Message'))
def on_db_change(records):
pass
def api_url():
parsed = request.urlparts
url = "%s://%s%s" % (parsed.scheme, parsed.netloc, request.script_name)
return url
@app.error(500)
def error(e):
response.content_type = "application/json"
return json.dumps({
"status": e.status,
"url": repr(request.url),
"exception": repr(e.exception),
# "traceback": e.traceback and e.traceback.split('\n') or '',
"body": repr(e.body)
}, indent=2)
| {
"content_hash": "c79208f8f426a27ce6075ddbce6d2863",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 76,
"avg_line_length": 29.029411764705884,
"alnum_prop": 0.674096588990206,
"repo_name": "alfredgamulo/cloud-custodian",
"id": "215b7c838c56cbdfa76e8160b05bdff38cd4f402",
"size": "3040",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/sandbox/c7n_sphere11/c7n_sphere11/app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2126"
},
{
"name": "Go",
"bytes": "146637"
},
{
"name": "HCL",
"bytes": "33977"
},
{
"name": "Jinja",
"bytes": "19775"
},
{
"name": "Makefile",
"bytes": "14242"
},
{
"name": "PowerShell",
"bytes": "1804"
},
{
"name": "Python",
"bytes": "6579430"
},
{
"name": "Shell",
"bytes": "15323"
},
{
"name": "Smarty",
"bytes": "359"
}
],
"symlink_target": ""
} |
from pyramid.view import view_config, view_defaults
from .models.folder import RootFolder, Folder
@view_defaults(renderer='json')
class MySite:
def __init__(self, context, request):
self.context = context
self.request = request
@view_config(context=RootFolder)
def root_view(self):
return dict(id=self.context.id, is_root=True)
@view_config(context=Folder)
def folder_view(self):
return dict(id=self.context.id, is_root=False)
| {
"content_hash": "63b66ba714a868ea1518fb831cfe93d8",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 54,
"avg_line_length": 26.88888888888889,
"alnum_prop": 0.6818181818181818,
"repo_name": "pauleveritt/pyramid_sqltraversal",
"id": "7b8c342dc724a1784503de07052858e3e69f97e5",
"size": "484",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/simple_traversal/mysite/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2934"
},
{
"name": "Python",
"bytes": "6963"
}
],
"symlink_target": ""
} |
import logging
import json
from threading import Event, Thread
from time import sleep
import pika
from pika.exceptions import AMQPError, ConnectionClosed
class StopThread(Exception):
pass
class QueueConsumerAsync(object):
"""This is an example consumer that will handle unexpected interactions
with RabbitMQ such as channel and connection closures.
If RabbitMQ closes the connection, it will reopen it. You should
look at the output, as there are limited reasons why the connection may
be closed, which usually are tied to permission related issues or
socket timeouts.
If the channel is closed, it will indicate a problem with one of the
commands that were issued and that should surface in the output as well.
"""
def __init__(self, config, qdesc, callback, on_success, on_failure, logger_name):
"""Create a new instance of the consumer class, passing in the AMQP
URL used to connect to RabbitMQ.
:param str amqp_url: The AMQP url to connect with
"""
self.exchange = 'sm' # default exchange ("") cannot be used here
self.exchange_type = 'direct'
self.routing_key = None
self._qdesc = qdesc
self._qname = self._qdesc['name']
self._no_ack = False # messages get redelivered with no_ack=False
self._heartbeat = 3 * 60 * 60 # 3h
self._reopen_timeout = 2
self._callback = callback
self._on_success = on_success
self._on_failure = on_failure
self.logger = logging.getLogger(logger_name)
self._connection = None
self._channel = None
self._closing = False
self._consumer_tag = None
self._url = "amqp://{}:{}@{}:5672/%2F?heartbeat={}".format(config['user'], config['password'],
config['host'], self._heartbeat)
def connect(self):
"""This method connects to RabbitMQ, returning the connection handle.
When the connection is established, the on_connection_open method
will be invoked by pika.
:rtype: pika.SelectConnection
"""
self.logger.info('Connecting to %s', self._url)
return pika.SelectConnection(pika.URLParameters(self._url),
self.on_connection_open,
stop_ioloop_on_close=False)
def on_connection_open(self, unused_connection):
"""This method is called by pika once the connection to RabbitMQ has
been established. It passes the handle to the connection object in
case we need it, but in this case, we'll just mark it unused.
:type unused_connection: pika.SelectConnection
"""
self.logger.info('Connection opened')
self.add_on_connection_close_callback()
self.open_channel()
def add_on_connection_close_callback(self):
"""This method adds an on close callback that will be invoked by pika
when RabbitMQ closes the connection to the publisher unexpectedly.
"""
self.logger.info('Adding connection close callback')
self._connection.add_on_close_callback(self.on_connection_closed)
def on_connection_closed(self, connection, reply_code, reply_text):
"""This method is invoked by pika when the connection to RabbitMQ is
closed unexpectedly. Since it is unexpected, we will reconnect to
RabbitMQ if it disconnects.
:param pika.connection.Connection connection: The closed connection obj
:param int reply_code: The server provided reply_code if given
:param str reply_text: The server provided reply_text if given
"""
self._channel = None
if self._closing:
self._connection.ioloop.stop()
else:
self.logger.warning('Connection closed, reopening in %s seconds: (%s) %s',
self._reopen_timeout, reply_code, reply_text)
self._connection.add_timeout(self._reopen_timeout, self.reconnect)
def reconnect(self):
"""Will be invoked by the IOLoop timer if the connection is
closed. See the on_connection_closed method.
"""
# This is the old connection IOLoop instance, stop its ioloop
self._connection.ioloop.stop()
if not self._closing:
# Create a new connection
self._connection = self.connect()
# There is now a new connection, needs a new ioloop to run
self._connection.ioloop.start()
def open_channel(self):
"""Open a new channel with RabbitMQ by issuing the Channel.Open RPC
command. When RabbitMQ responds that the channel is open, the
on_channel_open callback will be invoked by pika.
"""
self.logger.info('Creating a new channel')
self._connection.channel(on_open_callback=self.on_channel_open)
def on_channel_open(self, channel):
"""This method is invoked by pika when the channel has been opened.
The channel object is passed in so we can make use of it.
Since the channel is now open, we'll declare the exchange to use.
:param pika.channel.Channel channel: The channel object
"""
self.logger.info('Channel opened')
self._channel = channel
self.add_on_channel_close_callback()
self._channel.basic_qos(prefetch_count=1)
self.setup_exchange(self.exchange)
def add_on_channel_close_callback(self):
"""This method tells pika to call the on_channel_closed method if
RabbitMQ unexpectedly closes the channel.
"""
self.logger.info('Adding channel close callback')
self._channel.add_on_close_callback(self.on_channel_closed)
def on_channel_closed(self, channel, reply_code, reply_text):
"""Invoked by pika when RabbitMQ unexpectedly closes the channel.
Channels are usually closed if you attempt to do something that
violates the protocol, such as re-declare an exchange or queue with
different parameters. In this case, we'll close the connection
to shutdown the object.
:param pika.channel.Channel: The closed channel
:param int reply_code: The numeric reason the channel was closed
:param str reply_text: The text reason the channel was closed
"""
self.logger.warning('Channel %i was closed: (%s) %s', channel, reply_code, reply_text)
self._connection.close()
def setup_exchange(self, exchange_name):
"""Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC
command. When it is complete, the on_exchange_declareok method will
be invoked by pika.
:param str|unicode exchange_name: The name of the exchange to declare
"""
self.logger.info('Declaring exchange %s', exchange_name)
self._channel.exchange_declare(self.on_exchange_declareok, exchange_name, self.exchange_type)
def on_exchange_declareok(self, unused_frame):
"""Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC
command.
:param pika.Frame.Method unused_frame: Exchange.DeclareOk response frame
"""
self.logger.info('Exchange declared')
self.setup_queue(self._qname)
def setup_queue(self, queue_name):
"""Setup the queue on RabbitMQ by invoking the Queue.Declare RPC
command. When it is complete, the on_queue_declareok method will
be invoked by pika.
:param str|unicode queue_name: The name of the queue to declare.
"""
self.logger.info('Declaring queue %s', queue_name)
self._channel.queue_declare(self.on_queue_declareok, queue_name,
durable=self._qdesc['durable'], arguments=self._qdesc['arguments'])
def on_queue_declareok(self, method_frame):
"""Method invoked by pika when the Queue.Declare RPC call made in
setup_queue has completed. In this method we will bind the queue
and exchange together with the routing key by issuing the Queue.Bind
RPC command. When this command is complete, the on_bindok method will
be invoked by pika.
:param pika.frame.Method method_frame: The Queue.DeclareOk frame
"""
self.logger.info('Binding %s to %s with %s', self.exchange, self._qname, self.routing_key)
self._channel.queue_bind(self.on_bindok, self._qname, self.exchange, self.routing_key)
def on_bindok(self, unused_frame):
"""Invoked by pika when the Queue.Bind method has completed. At this
point we will start consuming messages by calling start_consuming
which will invoke the needed RPC commands to start the process.
:param pika.frame.Method unused_frame: The Queue.BindOk response frame
"""
self.logger.info('Queue bound')
self.start_consuming()
def start_consuming(self):
"""This method sets up the consumer by first calling
add_on_cancel_callback so that the object is notified if RabbitMQ
cancels the consumer. It then issues the Basic.Consume RPC command
which returns the consumer tag that is used to uniquely identify the
consumer with RabbitMQ. We keep the value to use it when we want to
cancel consuming. The on_message method is passed in as a callback pika
will invoke when a message is fully received.
"""
self.logger.info('Issuing consumer related RPC commands')
self.add_on_cancel_callback()
self.logger.info(' [*] Waiting for messages...')
self._consumer_tag = self._channel.basic_consume(self.on_message, self._qname,
no_ack=self._no_ack, exclusive=True)
def add_on_cancel_callback(self):
"""Add a callback that will be invoked if RabbitMQ cancels the consumer
for some reason. If RabbitMQ does cancel the consumer,
on_consumer_cancelled will be invoked by pika.
"""
self.logger.info('Adding consumer cancellation callback')
self._channel.add_on_cancel_callback(self.on_consumer_cancelled)
def on_consumer_cancelled(self, method_frame):
"""Invoked by pika when RabbitMQ sends a Basic.Cancel for a consumer
receiving messages.
:param pika.frame.Method method_frame: The Basic.Cancel frame
"""
self.logger.info('Consumer was cancelled remotely, shutting down: %r', method_frame)
if self._channel:
self._channel.close()
def on_message(self, unused_channel, basic_deliver, properties, body):
"""Invoked by pika when a message is delivered from RabbitMQ. The
channel is passed for your convenience. The basic_deliver object that
is passed in carries the exchange, routing key, delivery tag and
a redelivered flag for the message. The properties passed in is an
instance of BasicProperties with the message properties and the body
is the message that was sent.
:param pika.channel.Channel unused_channel: The channel object
:param pika.Spec.Basic.Deliver: basic_deliver method
:param pika.Spec.BasicProperties: properties
:param str|byte body: The message body
"""
msg = None
try:
self.acknowledge_message(basic_deliver.delivery_tag)
body = body.decode('utf-8')
self.logger.info(' [v] Received message # %s from %s: %s', basic_deliver.delivery_tag, properties.app_id,
body)
msg = json.loads(body)
self._callback(msg)
except BaseException as e:
self.logger.error(' [x] Failed: {}'.format(body), exc_info=True)
self._on_failure(msg or body)
else:
self.logger.info(' [v] Succeeded: {}'.format(body))
self._on_success(msg)
def acknowledge_message(self, delivery_tag):
"""Acknowledge the message delivery from RabbitMQ by sending a
Basic.Ack RPC method for the delivery tag.
:param int delivery_tag: The delivery tag from the Basic.Deliver frame
"""
self.logger.info('Acknowledging message %s', delivery_tag)
self._channel.basic_ack(delivery_tag)
def stop_consuming(self):
"""Tell RabbitMQ that you would like to stop consuming by sending the
Basic.Cancel RPC command.
"""
if self._channel:
self.logger.info('Sending a Basic.Cancel RPC command to RabbitMQ')
self._channel.basic_cancel(self.on_cancelok, self._consumer_tag)
def on_cancelok(self, unused_frame):
"""This method is invoked by pika when RabbitMQ acknowledges the
cancellation of a consumer. At this point we will close the channel.
This will invoke the on_channel_closed method once the channel has been
closed, which will in-turn close the connection.
:param pika.frame.Method unused_frame: The Basic.CancelOk frame
"""
self.logger.info('RabbitMQ acknowledged the cancellation of the consumer')
self.close_channel()
def close_channel(self):
"""Call to close the channel with RabbitMQ cleanly by issuing the
Channel.Close RPC command.
"""
self.logger.info('Closing the channel')
self._channel.close()
def run(self):
"""Run the example consumer by connecting to RabbitMQ and then
starting the IOLoop to block and allow the SelectConnection to operate.
"""
self._connection = self.connect()
self._connection.ioloop.start()
def stop(self):
"""Cleanly shutdown the connection to RabbitMQ by stopping the consumer
with RabbitMQ. When RabbitMQ confirms the cancellation, on_cancelok
will be invoked by pika, which will then closing the channel and
connection. The IOLoop is started again because this method is invoked
when CTRL-C is pressed raising a KeyboardInterrupt exception. This
exception stops the IOLoop which needs to be running for pika to
communicate with RabbitMQ. All of the commands issued prior to starting
the IOLoop will be buffered but not processed.
"""
self.logger.info('Stopping')
self._closing = True
self.stop_consuming()
self._connection.ioloop.start()
self.logger.info(' [v] Stopped consuming')
def close_connection(self):
"""This method closes the connection to RabbitMQ."""
self.logger.info('Closing connection')
self._connection.close()
class QueueConsumer(Thread):
def __init__(self, config, qdesc, callback, on_success, on_failure, logger_name=None, poll_interval=1):
"""Create a new instance of the blocking consumer class
"""
super().__init__()
self._heartbeat = 3*60*60 # 3h
self._qdesc = qdesc
self._qname = self._qdesc['name']
self._no_ack = True # messages get redelivered with no_ack=False
self._connection = None
self._channel = None
self._url = "amqp://{}:{}@{}:5672/%2F?heartbeat={}".format(config['user'], config['password'],
config['host'], self._heartbeat)
self._poll_interval = poll_interval
self._stop_event = Event()
self._callback = callback
self._on_success = on_success
self._on_failure = on_failure
self.logger = logging.getLogger(logger_name)
# self.logger = logger if logger else logging.getLogger()
def on_message(self, method, properties, body):
"""Invoked by pika when a message is delivered from RabbitMQ. The
channel is passed for your convenience. The basic_deliver object that
is passed in carries the exchange, routing key, delivery tag and
a redelivered flag for the message. The properties passed in is an
instance of BasicProperties with the message properties and the body
is the message that was sent.
:param pika.Spec.Basic.Deliver: method
:param pika.Spec.BasicProperties: properties
:param str|byte body: The message body
"""
msg = None
try:
body = body.decode('utf-8')
self.logger.info(' [v] Received message # %s from %s: %s',
method.delivery_tag, properties.app_id, body)
msg = json.loads(body)
self._callback(msg)
except BaseException as e:
self.logger.error(' [x] Failed: {}'.format(body), exc_info=True)
self._on_failure(msg or body)
else:
self.logger.info(' [v] Succeeded: {}'.format(body))
self._on_success(msg)
def run(self):
""" Use `start` method to kick off message polling """
while True:
try:
self._poll()
except AMQPError as e:
self.logger.warning(' [x] Server disconnected: {}. Reconnecting...'.format(e))
except StopThread:
self.logger.info(' [x] Stop signal received. Stopping')
break
def _poll(self):
self.logger.info('Connecting to %s', self._url)
self._connection = pika.BlockingConnection(pika.URLParameters(self._url))
self._channel = self._connection.channel()
self._channel.queue_declare(queue=self._qname, durable=self._qdesc['durable'],
arguments=self._qdesc['arguments'])
self.logger.info(' [*] Waiting for messages...')
while True:
method, properties, body = self._channel.basic_get(queue=self._qname, no_ack=self._no_ack)
if body is not None:
self.on_message(method, properties, body)
else:
self.logger.debug('No messages in "{}" queue'.format(self._qname))
if self.stopped():
raise StopThread()
else:
sleep(self._poll_interval)
def stop(self):
""" After calling `stop`, method `join` must be called"""
self._stop_event.set()
def stopped(self):
return self._stop_event.is_set()
class QueuePublisher(object):
def __init__(self, config, qdesc, logger=None):
creds = pika.PlainCredentials(config['user'], config['password'])
self.qdesc = qdesc
self.qname = qdesc['name']
self.conn_params = pika.ConnectionParameters(host=config['host'], credentials=creds, heartbeat_interval=0)
self.conn = None
self.logger = logger if logger else logging.getLogger()
def __str__(self):
return '<QueuePublisher:{}>'.format(self.qname)
def delete_queue(self):
try:
self.conn = pika.BlockingConnection(self.conn_params)
ch = self.conn.channel()
ch.queue_delete(self.qname)
except AMQPError as e:
self.logger.error('Queue delete failed: %s - %s', self.qname, e)
finally:
self.conn.close()
def publish(self, msg, priority=0):
try:
self.conn = pika.BlockingConnection(self.conn_params)
ch = self.conn.channel()
ch.queue_declare(queue=self.qname, durable=self.qdesc['durable'], arguments=self.qdesc['arguments'])
ch.basic_publish(exchange='',
routing_key=self.qname,
body=json.dumps(msg),
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
priority=priority
))
self.logger.info(" [v] Sent {} to {}".format(json.dumps(msg), self.qname))
except AMQPError as e:
self.logger.error('Failed to publish a message: %s - %s', msg, e)
finally:
if self.conn:
self.conn.close()
SM_ANNOTATE = {
'name': 'sm_annotate',
'durable': True,
'arguments': {
'x-max-priority': 3
}
}
SM_DS_STATUS = {
'name': 'sm_dataset_status',
'durable': True,
'arguments': None
}
| {
"content_hash": "e07436ea3eb08a58fe582869d961567a",
"timestamp": "",
"source": "github",
"line_count": 505,
"max_line_length": 117,
"avg_line_length": 40.27920792079208,
"alnum_prop": 0.6235190010323977,
"repo_name": "SpatialMetabolomics/SM_distributed",
"id": "6a0bc49267c781aa6fee31bbfa42408b0c3dd82e",
"size": "20341",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sm/engine/queue.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1841"
},
{
"name": "HTML",
"bytes": "13584"
},
{
"name": "JavaScript",
"bytes": "33219"
},
{
"name": "Python",
"bytes": "189667"
},
{
"name": "Shell",
"bytes": "3940"
}
],
"symlink_target": ""
} |
"""
Virtual Machine Class
"""
### INCLUDES ###
import os
import sys
import commands
import glob
import filecmp
from py_knife import file_system
from py_knife.decorators import multiple_attempts
from default_settings import LOG_TS_FORMAT, MUTLIPLE_TAPE_SYSTEM
### FUNCTIONS ###
def execute_backup(settings):
""" Backup Machines """
vm_list = []
vm_path_list = glob.glob(os.path.join(settings['vms_path'], '*'))
for vm_path in vm_path_list:
vm_list.append(VirtualMachine(settings, vm_path))
for virtual_machine in vm_list:
virtual_machine.backup()
### CLASSES ###
class VirtualMachine(object):
""" Virtual Machine class """
def __init__(self, settings, vm_path):
self.settings = settings
self.path = vm_path
self.name = os.path.basename(vm_path)
self.vmware = None
self.base_backup_path = None
self.vm_backup_path = None
## Some generic internal methods ##
def _print(self, message):
# Time Stamp Options 1 and 2
if 'log_ts_format' in self.settings:
print file_system.create_time_stamp(self.settings['log_ts_format']), str(message)
else:
print file_system.create_time_stamp(LOG_TS_FORMAT), str(message)
def _exit(self, message):
self._print(message)
self.resume()
sys.exit()
## VMWare Communication Methods ##
# Internal #
def _fetch_vmware(self):
""" Fetches vmware path if machine is currently running """
vm_list = commands.getoutput(self.settings['vmrun_path'] + ' list').split('\n')
if 'Total running VMs:' in vm_list[0]:
self._print(vm_list.pop(0))
else:
self._exit('VMWare vmrun location is incorrect, please provide it manually via command prompt. '
'Use -h option for help!')
vm_dict = {}
for vm_path in vm_list:
vm_name = os.path.basename(os.path.dirname(vm_path))
vm_dict[vm_name] = vm_path
vmware_path = None
if self.name in vm_dict.keys():
vmware_path = vm_dict[self.name]
return vmware_path
@multiple_attempts
def _suspend(self, **kwargs):
""" Suspending Virtual Machine (inner function) """
kwargs['success'] = bool(self._fetch_vmware() is None)
if not kwargs['success']:
total_attempts = str(kwargs['total_attempts'])
self._print('Suspending virtual machine... (attempt #' + total_attempts + ')')
os.system(self.settings['vmrun_path'] + ' suspend "' + self.vmware + '" soft')
self._print('Suspend of virtual machine is completed! (attempt #' + total_attempts + ')')
kwargs['success'] = bool(self._fetch_vmware() is None)
return kwargs
@multiple_attempts
def _resume(self, **kwargs):
""" Resuming Virtual Machine (inner function) """
kwargs['success'] = bool(self._fetch_vmware() is not None)
if not kwargs['success']:
total_attempts = str(kwargs['total_attempts'])
self._print('Resuming virtual machine... (attempt #' + total_attempts + ')')
os.system(self.settings['vmrun_path'] + ' start "' + self.vmware + '" nogui')
self._print('Resume of virtual machine is completed! (attempt #' + total_attempts + ')')
kwargs['success'] = bool(self._fetch_vmware() is not None)
return kwargs
# External #
# Note: User has to fetch state of the machine before using suspend or resume
def state(self):
""" Tells if Virtual Machine is currently running """
self.vmware = self._fetch_vmware()
return bool(self.vmware)
def suspend(self):
""" Suspending Virtual Machine """
if self.vmware:
self._suspend()
def resume(self):
""" Resuming Virtual Machine """
if self.vmware:
self._resume()
## Tape Methods ##
def _space_available(self, tape):
""" Reads available space on particular tape """
space_available = file_system.get_free_space(tape)
tape_name = str(os.path.basename(tape))
self._print("Space available on tape '" + tape_name + "': " + file_system.print_memory_size(space_available))
return space_available
def _fetch_base_path(self, space_needed):
""" Figuring out what tape to use and generating path for the future backup location """
tape_to_use = None
if MUTLIPLE_TAPE_SYSTEM:
tape_list = glob.glob(os.path.join(self.settings['tape_path'], '*'))
else:
tape_list = [self.settings['tape_path']]
for tape in tape_list:
# Figure out how much space we have on this tape
space_available = self._space_available(tape)
# Is it enough space?
if space_available >= space_needed:
self._print('Space available: ' + file_system.print_memory_size(space_available))
tape_to_use = tape
break
else:
self._exit('Tapes are full or inaccessible! Please unmount tape drive, reload tapes,'
' format all of them and remount tape drive!')
vm_base_name = os.path.basename(self.settings['vms_path'])
vm_backup_name = vm_base_name + self.settings['_backup_ts']
return os.path.join(tape_to_use, vm_backup_name)
@multiple_attempts
def _creating_backup_folder(self, **kwargs):
""" Creating backup folder on tape """
kwargs['success'] = kwargs['output'] = os.path.isdir(self.vm_backup_path)
if not kwargs['success']:
total_attempts = str(kwargs['total_attempts'])
try:
self._print('Creating backup folder... (attempt #' + total_attempts + ')')
file_system.make_dir(self.vm_backup_path)
except OSError as e:
self._print('Could not create folder "' + self.vm_backup_path +
'" due to an OS error ({0}): {1}'.format(e.errno, e.strerror))
except IOError as e:
self._print('Could not create folder "' + self.vm_backup_path +
'" due to an IO error ({0}): {1}'.format(e.errno, e.strerror))
except:
self._print('Could not create folder "' + self.vm_backup_path + '" due to an error: ' +
str(sys.exc_info()[0]))
else:
self._print('Backup folder is successfully created!')
kwargs['success'] = kwargs['output'] = os.path.isdir(self.vm_backup_path)
return kwargs
@multiple_attempts
def _backup(self, **kwargs):
""" Backup (internal function)"""
kwargs['success'] = False
total_attempts = str(kwargs['total_attempts'])
try:
self._print('Starting Backup... (attempt #' + total_attempts + ')')
file_system.copy_dir(self.path, self.vm_backup_path)
except OSError as e:
self._print('Could not backup "' + self.name +
'" virtual machine due to an OS error ({0}): {1}'.format(e.errno, e.strerror))
except IOError as e:
self._print('Could not backup "' + self.name +
'" virtual machine due to an IO error ({0}): {1}'.format(e.errno, e.strerror))
except:
self._print('Could not backup "' + self.name + '" virtual machine due to an error: ' +
str(sys.exc_info()[0]))
else:
self._print('Backup Completed!')
kwargs['success'] = True
return kwargs
def backup_needed(self):
""" Determine if backup needed or not """
vmx_match = False
vmx_files = glob.glob(os.path.join(self.path, '*.vmx'))
if len(vmx_files) > 0:
for dir_path, dir_names, file_names in os.walk(self.settings['tape_path']):
for dir_name in dir_names:
if self.name in dir_name:
vm_path = os.path.join(dir_path, dir_name)
# Compare *.vmx files (size and access date)
_vmx_files = glob.glob(os.path.join(vm_path, '*.vmx'))
for vmx_file in vmx_files:
for _vmx_file in _vmx_files:
# Compare files
vmx_match = filecmp.cmp(_vmx_file, vmx_file, True)
if vmx_match:
self._print("Virtual Machine '" + self.name + "' have been backed up already!")
self._print('Backup Path: ' + str(vm_path))
break
else:
continue
break
else:
continue
break
else:
continue
break
else:
self._print("Virtual Machine '" + self.name + "' have not been backed up yet!")
else:
vmx_match = True
self._print("Virtual Machine '" + self.name + "' does not have any '*.vmx' files! Skipping backup!")
return not vmx_match
## VMWare Backup Method ##
def backup(self):
""" Execute backup of this virtual machine """
# Print some basic info about this Virtual Machine
self._print('VM Name: ' + self.name)
self._print('VM Path: ' + self.path)
# Figure out if Virtual Machine is currently running
vm_state = self.state()
self._print('VM Running: ' + str(vm_state))
if self.backup_needed():
# Suspend Virtual Machine (if needed)
self.suspend()
# Figure out how much space this Virtual Machine is taking up
space_needed = file_system.get_size(self.path)
self._print('Space needed: ' + file_system.print_memory_size(space_needed))
# Figure out what tape we will use to back up this Virtual Machine
self.base_backup_path = self._fetch_base_path(space_needed)
self.vm_backup_path = os.path.join(self.base_backup_path, self.name)
self._print('BackUp Location: ' + str(self.vm_backup_path))
# Creating backup folder
backup_folder_created = self._creating_backup_folder()
if backup_folder_created:
# Backup this Virtual Machine
# TODO: Add some compression options and functions
self._backup()
# Resume Virtual Machine (if needed)
self.resume()
| {
"content_hash": "09da4671c106259314337f0893160901",
"timestamp": "",
"source": "github",
"line_count": 287,
"max_line_length": 117,
"avg_line_length": 37.67595818815331,
"alnum_prop": 0.5487838712660686,
"repo_name": "Barmaley13/VMWare-Backup",
"id": "329b913216c0b6698581dc8a17bbccdce5181160",
"size": "10813",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vmware_backup/virtual_machine.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6480"
},
{
"name": "Makefile",
"bytes": "6797"
},
{
"name": "Python",
"bytes": "37421"
}
],
"symlink_target": ""
} |
import json
import os
import sys
if sys.version_info < (3, 0):
from unittest2 import TestCase
else:
from unittest import TestCase
import requests
import github3
try:
from unittest import mock
except ImportError:
import mock
from io import BytesIO
from requests.structures import CaseInsensitiveDict
is_py3 = sys.version_info > (3, 0)
def load(name):
with path(name) as f:
j = json.load(f)
return j
def path(name, mode='r'):
return open('tests/json/{0}'.format(name), mode)
class BaseCase(TestCase):
github_url = 'https://api.github.com/'
def setUp(self):
self.g = github3.GitHub()
self.session = self.g._session
if os.environ.get('GH_AUTH'):
self.g.login(token=os.environ['GH_AUTH'])
self.args = ()
self.conf = {'allow_redirects': True}
self.mock = mock.patch.object(requests.sessions.Session, 'request')
self.request = self.mock.start()
def tearDown(self):
self.mock.stop()
def login(self):
self.g.login('user', 'password')
def mock_assertions(self):
assert self.request.called is True
conf = self.conf.copy()
args, kwargs = self.request.call_args
assert self.args == args
if 'data' in self.conf:
if isinstance(self.conf['data'], dict):
for k, v in list(self.conf['data'].items()):
s = json.dumps({k: v})[1:-1]
assert s in kwargs['data']
else:
assert self.conf['data'] == kwargs['data']
del self.conf['data']
for k in self.conf:
assert k in kwargs
assert self.conf[k] == kwargs[k]
self.request.reset_mock()
self.conf = conf
def response(self, path_name, status_code=200, enc='utf-8',
_iter=False, **headers):
r = requests.Response()
r.status_code = status_code
r.encoding = enc
if path_name:
with path(path_name) as f:
content = f.read().strip()
if _iter:
content = '[{0}]'.format(content)
r.raw = RequestsBytesIO(content.encode())
elif is_py3:
r.raw = RequestsBytesIO(content.encode())
else:
r.raw = RequestsBytesIO(content)
else:
r.raw = RequestsBytesIO()
if headers:
r.headers = CaseInsensitiveDict(headers)
self.request.return_value = r
def delete(self, url):
self.args = ('DELETE', url)
self.conf = {}
def get(self, url):
self.args = ('GET', url)
def patch(self, url):
self.args = ('PATCH', url)
def post(self, url):
self.args = ('POST', url)
def put(self, url):
self.args = ('PUT', url)
def not_called(self):
assert self.request.called is False
def assertGitHubErrorRaised(self, func, *args, **kwargs):
return self.assertRaises(github3.GitHubError, func(*args, **kwargs))
class RequestsBytesIO(BytesIO):
def read(self, chunk_size, *args, **kwargs):
return super(RequestsBytesIO, self).read(chunk_size)
| {
"content_hash": "e92bef7e5a6080144f47008945701185",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 76,
"avg_line_length": 25.291338582677167,
"alnum_prop": 0.5616438356164384,
"repo_name": "adrianmoisey/github3.py",
"id": "3525fe01529c8ed1087418abd69ae9e69be422cc",
"size": "3212",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "tests/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from CIM14.CPSM.Equipment.Core.PowerSystemResource import PowerSystemResource
class ConnectivityNodeContainer(PowerSystemResource):
"""A base class for all objects that may contain ConnectivityNodes or TopologicalNodes.
"""
def __init__(self, ConnectivityNodes=None, *args, **kw_args):
"""Initialises a new 'ConnectivityNodeContainer' instance.
@param ConnectivityNodes: Connectivity nodes contained by this container.
"""
self._ConnectivityNodes = []
self.ConnectivityNodes = [] if ConnectivityNodes is None else ConnectivityNodes
super(ConnectivityNodeContainer, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["ConnectivityNodes"]
_many_refs = ["ConnectivityNodes"]
def getConnectivityNodes(self):
"""Connectivity nodes contained by this container.
"""
return self._ConnectivityNodes
def setConnectivityNodes(self, value):
for x in self._ConnectivityNodes:
x.ConnectivityNodeContainer = None
for y in value:
y._ConnectivityNodeContainer = self
self._ConnectivityNodes = value
ConnectivityNodes = property(getConnectivityNodes, setConnectivityNodes)
def addConnectivityNodes(self, *ConnectivityNodes):
for obj in ConnectivityNodes:
obj.ConnectivityNodeContainer = self
def removeConnectivityNodes(self, *ConnectivityNodes):
for obj in ConnectivityNodes:
obj.ConnectivityNodeContainer = None
| {
"content_hash": "79bdd82fbe898e50dd5f401efecd2799",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 91,
"avg_line_length": 34.888888888888886,
"alnum_prop": 0.6840764331210191,
"repo_name": "rwl/PyCIM",
"id": "04e020bf60bf2caebc279635474f00b9d0e4614e",
"size": "2670",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CIM14/CPSM/Equipment/Core/ConnectivityNodeContainer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7420564"
}
],
"symlink_target": ""
} |
import unittest
import simulation
def state(visitors_per_bucket, baseline_conversions, treatment_conversions):
return simulation.ExperimentState(
baseline_conversions,
visitors_per_bucket - baseline_conversions,
treatment_conversions,
visitors_per_bucket - treatment_conversions,
)
class ChisqDecisionTest(unittest.TestCase):
def test_sample_size_calculation(self):
# test values from http://www.stat.ubc.ca/~rollin/stats/ssize/b2.html
self.assertEqual(
14751,
simulation.ChisqDecision(0.95, 0.1).necessary_sample_size_per_bucket(0.1),
)
self.assertEqual(
9780,
simulation.ChisqDecision(0.85, 0.1).necessary_sample_size_per_bucket(0.1),
)
self.assertEqual(
2507,
simulation.ChisqDecision(0.95, 0.25).necessary_sample_size_per_bucket(0.1),
)
self.assertEqual(
6510,
simulation.ChisqDecision(0.95, 0.1).necessary_sample_size_per_bucket(0.2),
)
def test_decision(self):
baseline_rate = 0.5
chisq_decision = simulation.ChisqDecision(0.95, 0.1)
# sanity checks
self.assertEqual('keep running', chisq_decision.decision(state(20, 7, 10), baseline_rate))
self.assertEqual(
'baseline',
chisq_decision.decision(state(10000, 5000, 5000), baseline_rate),
)
self.assertEqual(
'baseline',
chisq_decision.decision(state(10000, 6000, 4000), baseline_rate),
)
self.assertEqual(
'treatment',
chisq_decision.decision(state(10000, 4000, 6000), baseline_rate),
)
# some close calls, using Chi-squared values from
# http://www.graphpad.com/quickcalcs/contingency1.cfm
self.assertEqual(
'baseline',
chisq_decision.decision(state(10000, 5000, 5100), baseline_rate),
)
self.assertEqual(
'treatment',
chisq_decision.decision(state(10000, 5000, 5150), baseline_rate),
)
class BayesianDecisionTest(unittest.TestCase):
def setUp(self):
self.decision = simulation.BayesianDecision(0.01)
def test_posterior_probability_treatment_is_better(self):
# sanity checks
self.assertAlmostEqual(
1,
self.decision.posterior_probability_treatment_is_better(state(1000, 1, 999)),
)
self.assertAlmostEqual(
0,
self.decision.posterior_probability_treatment_is_better(state(1000, 999, 1)),
)
self.assertAlmostEqual(
0.5,
self.decision.posterior_probability_treatment_is_better(state(100, 50, 50)),
)
self.assertGreater(
self.decision.posterior_probability_treatment_is_better(state(100, 50, 51)),
0.5,
)
self.assertLess(
self.decision.posterior_probability_treatment_is_better(state(100, 50, 49)),
0.5,
)
# some less obvious ones which might be wrong (generated using my own implementation), but
# useful for catching unintended changes at least
self.assertAlmostEqual(
0.92318343,
self.decision.posterior_probability_treatment_is_better(state(1000, 100, 120)),
)
self.assertAlmostEqual(
0.22343071,
self.decision.posterior_probability_treatment_is_better(state(1000, 100, 90)),
)
def test_expected_loss_from_choosing_treatment(self):
# sanity checks
self.assertAlmostEqual(
0.9,
self.decision.expected_loss_from_choosing_treatment(state(1000, 950, 50)),
places=2,
)
self.assertAlmostEqual(
0,
self.decision.expected_loss_from_choosing_treatment(state(1000, 1, 999)),
)
# some values from Chris Stucchio's numerical integration code
# https://gist.github.com/stucchio/9090456
# see stucchio.py in this repository
self.assertAlmostEqual(
0.017,
self.decision.expected_loss_from_choosing_treatment(state(100, 10, 10)),
places=3,
)
self.assertAlmostEqual(
0.0005,
self.decision.expected_loss_from_choosing_treatment(state(100, 10, 20)),
places=4,
)
self.assertAlmostEqual(
0.1,
self.decision.expected_loss_from_choosing_treatment(state(100, 20, 10)),
places=1,
)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "f25aadc332d233a3e764637e4706fedf",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 98,
"avg_line_length": 34.88721804511278,
"alnum_prop": 0.596551724137931,
"repo_name": "gostevehoward/absimulation",
"id": "fa8fbf759b7d512fcb1a656a8f4d7b74dc2d6c33",
"size": "4663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simulation_test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25049"
},
{
"name": "R",
"bytes": "5807"
}
],
"symlink_target": ""
} |
from __future__ import annotations
ERROR_FEATURE_NOT_SUPPORTED = '0A000'
ERROR_CARDINALITY_VIOLATION = '21000'
# Class 22 — Data Exception
ERROR_DATA_EXCEPTION = '22000'
ERROR_NUMERIC_VALUE_OUT_OF_RANGE = '22003'
ERROR_INVALID_DATETIME_FORMAT = '22007'
ERROR_DATETIME_FIELD_OVERFLOW = '22008'
ERROR_DIVISION_BY_ZERO = '22012'
ERROR_INTERVAL_FIELD_OVERFLOW = '22015'
ERROR_INVALID_PARAMETER_VALUE = '22023'
ERROR_INVALID_TEXT_REPRESENTATION = '22P02'
ERROR_INVALID_REGULAR_EXPRESSION = '2201B'
ERROR_INVALID_ROW_COUNT_IN_LIMIT_CLAUSE = '2201W'
ERROR_INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE = '2201X'
# Class 23 — Integrity Constraint Violation
ERROR_INTEGRITY_CONSTRAINT_VIOLATION = '23000'
ERROR_RESTRICT_VIOLATION = '23001'
ERROR_NOT_NULL_VIOLATION = '23502'
ERROR_FOREIGN_KEY_VIOLATION = '23503'
ERROR_UNIQUE_VIOLATION = '23505'
ERROR_CHECK_VIOLATION = '23514'
ERROR_EXCLUSION_VIOLATION = '23P01'
# Class 25 - Invalid Transaction State
ERROR_IDLE_IN_TRANSACTION_TIMEOUT = '25P03'
ERROR_READ_ONLY_SQL_TRANSACTION = '25006'
ERROR_INVALID_AUTHORIZATION_SPECIFICATION = '28000'
ERROR_INVALID_PASSWORD = '28P01'
ERROR_INVALID_CATALOG_NAME = '3D000'
ERROR_SERIALIZATION_FAILURE = '40001'
ERROR_DEADLOCK_DETECTED = '40P01'
ERROR_WRONG_OBJECT_TYPE = '42809'
ERROR_INSUFFICIENT_PRIVILEGE = '42501'
ERROR_DUPLICATE_DATABASE = '42P04'
ERROR_PROGRAM_LIMIT_EXCEEDED = '54000'
ERROR_OBJECT_IN_USE = '55006'
ERROR_QUERY_CANCELLED = '57014'
ERROR_CANNOT_CONNECT_NOW = '57P03'
ERROR_CONNECTION_CLIENT_CANNOT_CONNECT = '08001'
ERROR_CONNECTION_DOES_NOT_EXIST = '08003'
ERROR_CONNECTION_REJECTION = '08004'
ERROR_CONNECTION_FAILURE = '08006'
CONNECTION_ERROR_CODES = [
ERROR_CANNOT_CONNECT_NOW,
ERROR_CONNECTION_CLIENT_CANNOT_CONNECT,
ERROR_CONNECTION_DOES_NOT_EXIST,
ERROR_CONNECTION_REJECTION,
ERROR_CONNECTION_FAILURE,
]
class BackendError(Exception):
def __init__(self, *, fields: dict[str, str]) -> None:
msg = fields.get('M', f'error code {fields["C"]}')
self.fields = fields
super().__init__(msg)
def code_is(self, code: str) -> bool:
return self.fields["C"] == code
def get_field(self, field: str) -> str | None:
return self.fields.get(field)
def get_error_class(fields: dict[str, str]) -> type[BackendError]:
return error_class_map.get(fields["C"], BackendError)
class BackendQueryCancelledError(BackendError):
pass
class BackendConnectionError(BackendError):
pass
class BackendPrivilegeError(BackendError):
pass
class BackendCatalogNameError(BackendError):
pass
error_class_map = {
ERROR_CANNOT_CONNECT_NOW: BackendConnectionError,
ERROR_CONNECTION_CLIENT_CANNOT_CONNECT: BackendConnectionError,
ERROR_CONNECTION_DOES_NOT_EXIST: BackendConnectionError,
ERROR_CONNECTION_REJECTION: BackendConnectionError,
ERROR_CONNECTION_FAILURE: BackendConnectionError,
ERROR_INSUFFICIENT_PRIVILEGE: BackendPrivilegeError,
ERROR_QUERY_CANCELLED: BackendQueryCancelledError,
ERROR_INVALID_CATALOG_NAME: BackendCatalogNameError,
}
| {
"content_hash": "661a999d19e4538cc2d683904d419683",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 67,
"avg_line_length": 28.110091743119266,
"alnum_prop": 0.7402088772845953,
"repo_name": "edgedb/edgedb",
"id": "c1dadd885c0a92cbbaaa83932c497bb38d6f87fa",
"size": "3749",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "edb/server/pgcon/errors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Cython",
"bytes": "372837"
},
{
"name": "JavaScript",
"bytes": "7481"
},
{
"name": "Makefile",
"bytes": "1159"
},
{
"name": "Python",
"bytes": "9860929"
},
{
"name": "Rust",
"bytes": "238373"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import getpass
import json
import os
import argparse
import yaml
from elasticsearch.client import Elasticsearch
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--host', help='Elasticsearch host')
parser.add_argument('--port', type=int, help='Elasticsearch port')
parser.add_argument('--url-prefix', help='Elasticsearch URL prefix')
parser.add_argument('--no-auth', action='store_const', const=True, help='Suppress prompt for basic auth')
parser.add_argument('--ssl', action='store_true', default=None, help='Use SSL')
parser.add_argument('--no-ssl', dest='ssl', action='store_false', help='Do not use SSL')
parser.add_argument('--index', help='Index name to create')
parser.add_argument('--old-index', help='Old index name to copy')
args = parser.parse_args()
if os.path.isfile('../config.yaml'):
filename = '../config.yaml'
elif os.path.isfile('config.yaml'):
filename = 'config.yaml'
else:
filename = ''
username = None
password = None
use_ssl = None
url_prefix = None
http_auth = None
if filename:
with open(filename) as config_file:
data = yaml.load(config_file)
host = data.get('es_host')
port = data.get('es_port')
username = data.get('es_username')
password = data.get('es_password')
url_prefix = data.get('es_url_prefix', '')
use_ssl = data.get('use_ssl')
else:
host = args.host if args.host else raw_input('Enter elasticsearch host: ')
port = args.port if args.port else int(raw_input('Enter elasticsearch port: '))
use_ssl = (args.ssl if args.ssl is not None
else raw_input('Use SSL? t/f: ').lower() in ('t', 'true'))
if args.no_auth is None:
username = raw_input('Enter optional basic-auth username: ')
password = getpass.getpass('Enter optional basic-auth password: ')
url_prefix = (args.url_prefix if args.url_prefix is not None
else raw_input('Enter optional Elasticsearch URL prefix: '))
if username and password:
http_auth = username + ':' + password
es = Elasticsearch(host=host, port=port, use_ssl=use_ssl, http_auth=http_auth, url_prefix=url_prefix)
silence_mapping = {'silence': {'properties': {'rule_name': {'index': 'not_analyzed', 'type': 'string'},
'until': {'type': 'date', 'format': 'dateOptionalTime'}}}}
ess_mapping = {'elastalert_status': {'properties': {'rule_name': {'index': 'not_analyzed', 'type': 'string'},
'@timestamp': {'format': 'dateOptionalTime', 'type': 'date'}}}}
es_mapping = {'elastalert': {'properties': {'rule_name': {'index': 'not_analyzed', 'type': 'string'},
'match_body': {'enabled': False, 'type': 'object'},
'aggregate_id': {'index': 'not_analyzed', 'type': 'string'}}}}
error_mapping = {'elastalert_error': {'properties': {'data': {'type': 'object', 'enabled': False}}}}
index = args.index if args.index is not None else raw_input('New index name? (Default elastalert_status) ')
if not index:
index = 'elastalert_status'
old_index = (args.old_index if args.old_index is not None
else raw_input('Name of existing index to copy? (Default None) '))
res = None
if old_index:
print('Downloading existing data...')
res = es.search(index=old_index, body={}, size=500000)
print('Got %s documents' % (len(res['hits']['hits'])))
es.indices.create(index)
es.indices.put_mapping(index=index, doc_type='elastalert', body=es_mapping)
es.indices.put_mapping(index=index, doc_type='elastalert_status', body=ess_mapping)
es.indices.put_mapping(index=index, doc_type='silence', body=silence_mapping)
es.indices.put_mapping(index=index, doc_type='elastalert_error', body=error_mapping)
print('New index %s created' % (index))
if res:
bulk = ''.join(['%s\n%s\n' % (json.dumps({'create': {'_type': doc['_type'], '_index': index}}),
json.dumps(doc['_source'])) for doc in res['hits']['hits']])
print('Uploading data...')
es.bulk(body=bulk, index=index)
print('Done!')
if __name__ == '__main__':
main()
| {
"content_hash": "043a803a3c6b1400ae6af121a4d154f4",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 119,
"avg_line_length": 44.86,
"alnum_prop": 0.5889433794025858,
"repo_name": "megancarney/elastalert",
"id": "913c7a7b89474ab3f2716ac2918b219c395578ff",
"size": "4532",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "elastalert/create_index.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "365"
},
{
"name": "Python",
"bytes": "252072"
}
],
"symlink_target": ""
} |
"""
init.py
Starting script to run NetPyNE-based RxD model.
Usage:
python init.py # Run simulation, optionally plot a raster
MPI usage:
mpiexec -n 4 nrniv -python -mpi init.py
"""
from netpyne import sim
from netParams import netParams
from cfg import cfg
# --------------------------------
# Instantiate network
# --------------------------------
sim.initialize(netParams, cfg) # create network object and set cfg and net params
sim.net.createPops() # instantiate network populations
sim.net.createCells() # instantiate network cells based on defined populations
sim.net.connectCells() # create connections between cells based on params
sim.net.addStims() # add external stimulation to cells (IClamps etc)
sim.net.addRxD() # add reaction-diffusion (RxD)
sim.setupRecording() # setup variables to record for each cell (spikes, V traces, etc)
sim.simulate()
sim.analyze()
| {
"content_hash": "307eb75eafa5154e6828738c4e30d03a",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 98,
"avg_line_length": 35.17857142857143,
"alnum_prop": 0.6345177664974619,
"repo_name": "Neurosim-lab/netpyne",
"id": "ba996eddf32c143a3ac2dfc95396376b6d0947c0",
"size": "985",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "examples/rxd_net/init.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "25324"
},
{
"name": "Jupyter Notebook",
"bytes": "2588467"
},
{
"name": "Python",
"bytes": "1802020"
},
{
"name": "Shell",
"bytes": "915"
}
],
"symlink_target": ""
} |
from ._monitor import TMonitor, TqdmSynchronisationWarning
from ._tqdm_pandas import tqdm_pandas
from .cli import main # TODO: remove in v5.0.0
from .gui import tqdm as tqdm_gui # TODO: remove in v5.0.0
from .gui import trange as tgrange # TODO: remove in v5.0.0
from .std import (
TqdmDeprecationWarning, TqdmExperimentalWarning, TqdmKeyError, TqdmMonitorWarning,
TqdmTypeError, TqdmWarning, tqdm, trange)
from .version import __version__
__all__ = ['tqdm', 'tqdm_gui', 'trange', 'tgrange', 'tqdm_pandas',
'tqdm_notebook', 'tnrange', 'main', 'TMonitor',
'TqdmTypeError', 'TqdmKeyError',
'TqdmWarning', 'TqdmDeprecationWarning',
'TqdmExperimentalWarning',
'TqdmMonitorWarning', 'TqdmSynchronisationWarning',
'__version__']
def tqdm_notebook(*args, **kwargs): # pragma: no cover
"""See tqdm.notebook.tqdm for full documentation"""
from warnings import warn
from .notebook import tqdm as _tqdm_notebook
warn("This function will be removed in tqdm==5.0.0\n"
"Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`",
TqdmDeprecationWarning, stacklevel=2)
return _tqdm_notebook(*args, **kwargs)
def tnrange(*args, **kwargs): # pragma: no cover
"""
A shortcut for `tqdm.notebook.tqdm(xrange(*args), **kwargs)`.
On Python3+, `range` is used instead of `xrange`.
"""
from warnings import warn
from .notebook import trange as _tnrange
warn("Please use `tqdm.notebook.trange` instead of `tqdm.tnrange`",
TqdmDeprecationWarning, stacklevel=2)
return _tnrange(*args, **kwargs)
| {
"content_hash": "56463043f079d5eb795f19d52e586f9a",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 86,
"avg_line_length": 39.97560975609756,
"alnum_prop": 0.6741915802318487,
"repo_name": "kayhayen/Nuitka",
"id": "a021d16e9a54eb18384d2c678675eca917753848",
"size": "1639",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "nuitka/build/inline_copy/tqdm/tqdm/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1868"
},
{
"name": "C",
"bytes": "617681"
},
{
"name": "C++",
"bytes": "149777"
},
{
"name": "Python",
"bytes": "6603718"
},
{
"name": "Shell",
"bytes": "1088"
}
],
"symlink_target": ""
} |
import unittest
from wsd.algorithm import LinkDetector
from wsd.tests.database.databasemocks import *
class LinkDetectorTest(unittest.TestCase):
def setUp(self):
self._view = WorkViewMock()
def _detect_links(self, text):
links = []
article = {
'id': 1,
'title': 'myArticle',
'text': text,
'links': []
}
detector = LinkDetector(self._view)
detector.detect_links(article)
return article
def test_single_link(self):
self._view.occurrences['term'] = { 'occurrences': 7, 'as_link': 3 }
self._view.occurrences['another'] = { 'occurrences': 100, 'as_link': 0 }
article = self._detect_links('Here is another term.')
self.assertEqual(article['text'], 'Here is another [[term]].')
self.assertEqual(len(article['links']), 1)
self.assertEqual(article['links'][0], { 'target_article_id': None, 'target_article_name': None, 'phrase': 'term' })
def test_multiple_links(self):
self._view.occurrences['term'] = { 'occurrences': 7, 'as_link': 3 }
self._view.occurrences['is another'] = { 'occurrences': 100, 'as_link': 3 }
article = self._detect_links('Here is another term.')
self.assertEqual(article['text'], 'Here [[is another]] [[term]].')
self.assertEqual(len(article['links']), 2)
self.assertEqual(article['links'][0], { 'target_article_id': None, 'target_article_name': None, 'phrase': 'is another' })
self.assertEqual(article['links'][1], { 'target_article_id': None, 'target_article_name': None, 'phrase': 'term' })
def test_threshold(self):
self._view.occurrences['term'] = { 'occurrences': 100000, 'as_link': 2 }
article = self._detect_links('Here is another term.')
self.assertEqual(article['text'], 'Here is another term.')
self.assertEqual(len(article['links']), 0)
def test_encapsulated_link(self):
self._view.occurrences['term'] = { 'occurrences': 7, 'as_link': 3 }
self._view.occurrences['encapsulated term'] = { 'occurrences': 10, 'as_link': 10 }
article = self._detect_links('Here is another encapsulated term.')
self.assertEqual(article['text'], 'Here is another [[encapsulated term]].')
self.assertEqual(len(article['links']), 1)
self.assertEqual(article['links'][0], { 'target_article_id': None, 'target_article_name': None, 'phrase': 'encapsulated term' }) | {
"content_hash": "82a52de374e1ae0ab3077eff3f44bbe3",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 136,
"avg_line_length": 46.129629629629626,
"alnum_prop": 0.6057808109193095,
"repo_name": "plaufer/wikiwsd",
"id": "4561f03f406666df6a9b5ace6fbd254241b75711",
"size": "2491",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wsd/tests/algorithm/linkdetectortest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "195255"
}
],
"symlink_target": ""
} |
import sys
import atrshmlog
result = atrshmlog.attach()
version = atrshmlog.get_version()
minorversion = atrshmlog.get_minor_version()
patchversion = atrshmlog.get_patch_version()
print('version : ' + str(version) + ' : ')
print('version : ' + str(minorversion) + ' : ')
print('version : ' + str(patchversion) + ' : ')
print (' ')
exit(0)
# end of test
| {
"content_hash": "81bc33ad6667c37600497e35a71d8d2a",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 47,
"avg_line_length": 16.5,
"alnum_prop": 0.6556473829201102,
"repo_name": "atrsoftgmbh/atrshmlog",
"id": "5f9adac5033d1e53624488ee10f44f55af63a3b6",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/src/tests/t_version.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1262"
},
{
"name": "Batchfile",
"bytes": "15450"
},
{
"name": "C",
"bytes": "5425692"
},
{
"name": "C++",
"bytes": "183835"
},
{
"name": "CSS",
"bytes": "50850"
},
{
"name": "HTML",
"bytes": "451865"
},
{
"name": "Java",
"bytes": "896522"
},
{
"name": "JavaScript",
"bytes": "16280"
},
{
"name": "POV-Ray SDL",
"bytes": "1092"
},
{
"name": "Perl",
"bytes": "222533"
},
{
"name": "Python",
"bytes": "35540"
},
{
"name": "Roff",
"bytes": "350790"
},
{
"name": "Ruby",
"bytes": "33603"
},
{
"name": "Shell",
"bytes": "989385"
},
{
"name": "Tcl",
"bytes": "1071"
}
],
"symlink_target": ""
} |
#
# localDocker.py - Implements the Tango VMMS interface to run Tango jobs in
# docker containers. In this context, VMs are docker containers.
#
import random
import subprocess
import re
import time
import logging
import threading
import os
import sys
import shutil
import config
from tangoObjects import TangoMachine
def timeout(command, time_out=1):
"""timeout - Run a unix command with a timeout. Return -1 on
timeout, otherwise return the return value from the command, which
is typically 0 for success, 1-255 for failure.
"""
# Launch the command
p = subprocess.Popen(
command, stdout=open("/dev/null", "w"), stderr=subprocess.STDOUT
)
# Wait for the command to complete
t = 0.0
while t < time_out and p.poll() is None:
time.sleep(config.Config.TIMER_POLL_INTERVAL)
t += config.Config.TIMER_POLL_INTERVAL
# Determine why the while loop terminated
if p.poll() is None:
try:
os.kill(p.pid, 9)
except OSError:
pass
returncode = -1
else:
returncode = p.poll()
return returncode
def timeoutWithReturnStatus(command, time_out, returnValue=0):
"""timeoutWithReturnStatus - Run a Unix command with a timeout,
until the expected value is returned by the command; On timeout,
return last error code obtained from the command.
"""
p = subprocess.Popen(
command, stdout=open("/dev/null", "w"), stderr=subprocess.STDOUT
)
t = 0.0
while t < time_out:
ret = p.poll()
if ret is None:
time.sleep(config.Config.TIMER_POLL_INTERVAL)
t += config.Config.TIMER_POLL_INTERVAL
elif ret == returnValue:
return ret
else:
p = subprocess.Popen(
command, stdout=open("/dev/null", "w"), stderr=subprocess.STDOUT
)
return ret
#
# User defined exceptions
#
class LocalDocker(object):
def __init__(self):
"""Checks if the machine is ready to run docker containers.
Initialize boot2docker if running on OS X.
"""
try:
self.log = logging.getLogger("LocalDocker")
# Check import docker constants are defined in config
if len(config.Config.DOCKER_VOLUME_PATH) == 0:
raise Exception("DOCKER_VOLUME_PATH not defined in config.")
except Exception as e:
self.log.error(str(e))
exit(1)
def instanceName(self, id, name):
"""instanceName - Constructs a VM instance name. Always use
this function when you need a VM instance name. Never generate
instance names manually.
"""
return "%s-%s-%s" % (config.Config.PREFIX, id, name)
def getVolumePath(self, instanceName):
volumePath = config.Config.DOCKER_VOLUME_PATH
# Last empty string to cause trailing '/'
volumePath = os.path.join(volumePath, instanceName, "")
return volumePath
def getDockerVolumePath(self, dockerPath, instanceName):
# Last empty string to cause trailing '/'
volumePath = os.path.join(dockerPath, instanceName, "")
return volumePath
def domainName(self, vm):
"""Returns the domain name that is stored in the vm
instance.
"""
return vm.domain_name
#
# VMMS API functions
#
def initializeVM(self, vm):
"""initializeVM - Nothing to do for initializeVM"""
return vm
def waitVM(self, vm, max_secs):
"""waitVM - Nothing to do for waitVM"""
return
def copyIn(self, vm, inputFiles):
"""copyIn - Create a directory to be mounted as a volume
for the docker containers. Copy input files to this directory.
"""
instanceName = self.instanceName(vm.id, vm.image)
volumePath = self.getVolumePath(instanceName)
# Create a fresh volume
os.makedirs(volumePath)
for file in inputFiles:
# Create output directory if it does not exist
os.makedirs(os.path.dirname(volumePath), exist_ok=True)
shutil.copy(file.localFile, volumePath + file.destFile)
self.log.debug(
"Copied in file %s to %s" % (file.localFile, volumePath + file.destFile)
)
return 0
def runJob(self, vm, runTimeout, maxOutputFileSize):
"""runJob - Run a docker container by doing the follows:
- mount directory corresponding to this job to /home/autolab
in the container
- run autodriver with corresponding ulimits and timeout as
autolab user
"""
instanceName = self.instanceName(vm.id, vm.image)
volumePath = self.getVolumePath(instanceName)
if os.getenv("DOCKER_TANGO_HOST_VOLUME_PATH"):
volumePath = self.getDockerVolumePath(
os.getenv("DOCKER_TANGO_HOST_VOLUME_PATH"), instanceName
)
args = ["docker", "run", "--name", instanceName, "-v"]
args = args + ["%s:%s" % (volumePath, "/home/mount")]
args = args + [vm.image]
args = args + ["sh", "-c"]
autodriverCmd = (
"autodriver -u %d -f %d -t %d -o %d autolab > output/feedback 2>&1"
% (
config.Config.VM_ULIMIT_USER_PROC,
config.Config.VM_ULIMIT_FILE_SIZE,
runTimeout,
config.Config.MAX_OUTPUT_FILE_SIZE,
)
)
args = args + [
'cp -r mount/* autolab/; su autolab -c "%s"; \
cp output/feedback mount/feedback'
% autodriverCmd
]
self.log.debug("Running job: %s" % str(args))
ret = timeout(args, runTimeout * 2)
self.log.debug("runJob returning %d" % ret)
return ret
def copyOut(self, vm, destFile):
"""copyOut - Copy the autograder feedback from container to
destFile on the Tango host. Then, destroy that container.
Containers are never reused.
"""
instanceName = self.instanceName(vm.id, vm.image)
volumePath = self.getVolumePath(instanceName)
shutil.move(volumePath + "feedback", destFile)
self.log.debug("Copied feedback file to %s" % destFile)
self.destroyVM(vm)
return 0
def destroyVM(self, vm):
"""destroyVM - Delete the docker container."""
instanceName = self.instanceName(vm.id, vm.image)
volumePath = self.getVolumePath("")
# Do a hard kill on corresponding docker container.
# Return status does not matter.
timeout(["docker", "rm", "-f", instanceName], config.Config.DOCKER_RM_TIMEOUT)
# Destroy corresponding volume if it exists.
if instanceName in os.listdir(volumePath):
shutil.rmtree(volumePath + instanceName)
self.log.debug("Deleted volume %s" % instanceName)
return
def safeDestroyVM(self, vm):
"""safeDestroyVM - Delete the docker container and make
sure it is removed.
"""
start_time = time.time()
while self.existsVM(vm):
if time.time() - start_time > config.Config.DESTROY_SECS:
self.log.error("Failed to safely destroy container %s" % vm.name)
return
self.destroyVM(vm)
return
def getVMs(self):
"""getVMs - Executes and parses `docker ps`. This function
is a lot of parsing and can break easily.
"""
# Get all volumes of docker containers
machines = []
volumePath = self.getVolumePath("")
for volume in os.listdir(volumePath):
if re.match("%s-" % config.Config.PREFIX, volume):
machine = TangoMachine()
machine.vmms = "localDocker"
machine.name = volume
volume_l = volume.split("-")
machine.id = volume_l[1]
machine.image = volume_l[2]
machines.append(machine)
return machines
def existsVM(self, vm):
"""existsVM - Executes `docker inspect CONTAINER`, which returns
a non-zero status upon not finding a container.
"""
instanceName = self.instanceName(vm.id, vm.name)
ret = timeout(["docker", "inspect", instanceName])
return ret == 0
def getImages(self):
"""getImages - Executes `docker images` and returns a list of
images that can be used to boot a docker container with. This
function is a lot of parsing and so can break easily.
"""
result = set()
cmd = "docker images"
o = subprocess.check_output("docker images", shell=True).decode("utf-8")
o_l = o.split("\n")
o_l.pop()
o_l.reverse()
o_l.pop()
for row in o_l:
row_l = row.split(" ")
result.add(re.sub(r".*/([^/]*)", r"\1", row_l[0]))
return list(result)
| {
"content_hash": "e42ba039667ac5dfcfd196d713edcd18",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 88,
"avg_line_length": 34.06818181818182,
"alnum_prop": 0.589615299088281,
"repo_name": "autolab/Tango",
"id": "e2bc6e1ba2bcc7269365e3f2cf6353185b7bb009",
"size": "8994",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vmms/localDocker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "16426"
},
{
"name": "Dockerfile",
"bytes": "2480"
},
{
"name": "Makefile",
"bytes": "604"
},
{
"name": "Python",
"bytes": "181372"
},
{
"name": "Shell",
"bytes": "4806"
}
],
"symlink_target": ""
} |
"""Urls which need to be loaded at root level."""
from django.conf.urls import *
adminpatterns = patterns('',
(r'^admin/product/configurableproduct/(?P<id>\d+)/getoptions/',
'product.views.get_configurable_product_options', {},
'satchmo_admin_configurableproduct'),
)
adminpatterns += patterns('product.views.adminviews',
(r'^admin/inventory/edit/$',
'edit_inventory', {}, 'satchmo_admin_edit_inventory'),
(r'^inventory/export/$',
'export_products', {}, 'satchmo_admin_product_export'),
(r'^inventory/import/$',
'import_products', {}, 'satchmo_admin_product_import'),
# (r'^inventory/report/$',
# 'product_active_report', {}, 'satchmo_admin_product_report'),
(r'^admin/(?P<product_id>\d+)/variations/$',
'variation_manager', {}, 'satchmo_admin_variation_manager'),
(r'^admin/variations/$',
'variation_list', {}, 'satchmo_admin_variation_list'),
)
| {
"content_hash": "9d234d223e56601aabaf1f08db4936f5",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 71,
"avg_line_length": 41.21739130434783,
"alnum_prop": 0.6244725738396625,
"repo_name": "russellmayhew/satchmo",
"id": "f68082d05934768eaba017d198865e3e6f44591a",
"size": "948",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "satchmo/apps/product/urls/base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "26551"
},
{
"name": "JavaScript",
"bytes": "73019"
},
{
"name": "Makefile",
"bytes": "2272"
},
{
"name": "Python",
"bytes": "1908369"
}
],
"symlink_target": ""
} |
from google.appengine.ext import db
class Post(db.Model):
_use_memcache = True
_use_cache = True
stub = db.StringProperty(indexed=True)
title = db.StringProperty(indexed=True)
body = db.TextProperty()
publish_date = db.DateProperty(auto_now_add=True)
created = db.DateTimeProperty(auto_now_add=True)
created_by = db.UserProperty(auto_current_user_add=True)
modified = db.DateTimeProperty(auto_now=True)
modified_by = db.UserProperty(auto_current_user=True)
@classmethod
def find_by_properties(cls, **kwargs):
return cls.find_all_by_properties(**kwargs).get()
@classmethod
def find_all_by_properties(cls, **kwargs):
query = cls.all()
for name, value in kwargs.items():
query = query.filter('{prop_name} = '.format(prop_name=name), value)
return query
| {
"content_hash": "7e0ff435e84e732a60a2031fe5a2874f",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 80,
"avg_line_length": 31.703703703703702,
"alnum_prop": 0.6647196261682243,
"repo_name": "iandouglas/flask-gae-skeleton",
"id": "01df2d988b3a1aff384caaaebd879e04a6a1df03",
"size": "872",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/models/post.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5264"
},
{
"name": "HTML",
"bytes": "7302"
},
{
"name": "Python",
"bytes": "18336"
}
],
"symlink_target": ""
} |
"""Structural variant detection with GRIDSS
The Genomic Rearrangement IDentification Software Suite
https://github.com/PapenfussLab/gridss
"""
import os
import toolz as tz
from bcbio import utils
from bcbio.distributed.transaction import file_transaction
from bcbio.pipeline import datadict as dd
from bcbio.pipeline import config_utils
from bcbio.provenance import do
from bcbio.structural import shared as sshared
from bcbio.variation import effects, vcfutils
def run(items, background=None):
"""Perform detection of structural variations with Manta.
"""
paired = vcfutils.get_paired(items)
if paired:
inputs = [paired.tumor_data]
background = [paired.normal_data] if paired.normal_bam else []
else:
assert not background
inputs, background = sshared.find_case_control(items)
work_dir = _sv_workdir(inputs[0])
variant_file = _run_gridss(inputs, background, work_dir)
out = []
for data in items:
sample_file = variant_file
if "sv" not in data:
data["sv"] = []
effects_vcf, _ = effects.add_to_vcf(sample_file, data, "snpeff")
data["sv"].append({"variantcaller": "gridss",
"vrn_file": effects_vcf or sample_file})
out.append(data)
return out
def _run_gridss(inputs, background, work_dir):
out_file = os.path.join(work_dir, "%s-gridss.sv.vcf" % (dd.get_batch(inputs[0]) or
dd.get_sample_name(inputs[0])))
if not utils.file_exists(out_file) and not utils.file_exists(out_file + ".gz"):
with file_transaction(inputs[0], out_file) as tx_out_file:
htsjdk_opts = ["-Dsamjdk.create_index=true", "-Dsamjdk.use_async_io_read_samtools=true",
"-Dsamjdk.use_async_io_write_samtools=true", "-Dsamjdk.use_async_io_write_tribble=true"]
cores = dd.get_cores(inputs[0])
resources = config_utils.get_resources("gridss", inputs[0]["config"])
jvm_opts = resources.get("jvm_opts", ["-Xms750m", "-Xmx4g"])
jvm_opts = config_utils.adjust_opts(jvm_opts, {"algorithm": {"memory_adjust":
{"direction": "increase",
"magnitude": cores}}})
jvm_opts = _finalize_memory(jvm_opts)
tx_ref_file = _setup_reference_files(inputs[0], os.path.dirname(tx_out_file))
blacklist_bed = sshared.prepare_exclude_file(inputs + background, out_file)
cmd = ["gridss"] + jvm_opts + htsjdk_opts + ["gridss.CallVariants"] + \
["THREADS=%s" % cores,
"TMP_DIR=%s" % os.path.dirname(tx_out_file), "WORKING_DIR=%s" % os.path.dirname(tx_out_file),
"OUTPUT=%s" % tx_out_file,
"ASSEMBLY=%s" % tx_out_file.replace(".sv.vcf", ".gridss.assembly.bam"),
"REFERENCE_SEQUENCE=%s" % tx_ref_file, "BLACKLIST=%s" % blacklist_bed]
for data in inputs + background:
cmd += ["INPUT=%s" % dd.get_align_bam(data), "INPUT_LABEL=%s" % dd.get_sample_name(data)]
exports = utils.local_path_export()
cmd = exports + " ".join(cmd)
do.run(cmd, "GRIDSS SV analysis")
return vcfutils.bgzip_and_index(out_file, inputs[0]["config"])
def _finalize_memory(jvm_opts):
"""GRIDSS does not recommend setting memory between 32 and 48Gb.
https://github.com/PapenfussLab/gridss#memory-usage
"""
avoid_min = 32
avoid_max = 48
out_opts = []
for opt in jvm_opts:
if opt.startswith("-Xmx"):
spec = opt[4:]
val = int(spec[:-1])
mod = spec[-1]
if mod.upper() == "M":
adjust = 1024
min_val = avoid_min * 1024
max_val = avoid_max * 1024
else:
adjust = 1
min_val, max_val = avoid_min, avoid_max
if val >= min_val and val < max_val:
val = min_val - adjust
opt = "%s%s%s" % (opt[:4], val, mod)
out_opts.append(opt)
return out_opts
def _setup_reference_files(data, tx_out_dir):
"""Create a reference directory with fasta and bwa indices.
GRIDSS requires all files in a single directory, so setup with symlinks.
This needs bwa aligner indices available, which we ensure with `get_aligner_with_aliases`
during YAML sample setup.
"""
aligner = dd.get_aligner(data) or "bwa"
out_dir = utils.safe_makedir(os.path.join(tx_out_dir, aligner))
ref_fasta = dd.get_ref_file(data)
ref_files = ["%s%s" % (utils.splitext_plus(ref_fasta)[0], ext) for ext in [".fa", ".fa.fai", ".dict"]]
for orig_file in ref_files + tz.get_in(("reference", aligner, "indexes"), data):
utils.symlink_plus(orig_file, os.path.join(out_dir, os.path.basename(orig_file)))
return os.path.join(out_dir, os.path.basename(ref_fasta))
def _sv_workdir(data):
return os.path.join(
data["dirs"]["work"], "structural", dd.get_sample_name(data), "gridss")
| {
"content_hash": "8e4aafc3491f1ae5338b0338b7b7c041",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 115,
"avg_line_length": 45.796460176991154,
"alnum_prop": 0.5816425120772947,
"repo_name": "vladsaveliev/bcbio-nextgen",
"id": "7ae8044827985679a576bdcaf22b2ba30d5f7727",
"size": "5175",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "bcbio/structural/gridss.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "3620"
},
{
"name": "Lua",
"bytes": "7695"
},
{
"name": "Python",
"bytes": "2544841"
},
{
"name": "Ruby",
"bytes": "624"
},
{
"name": "Shell",
"bytes": "16730"
}
],
"symlink_target": ""
} |
"""Retrieves urls that have content keywords related to a given website.
Tags: TargetingIdeaService.get
"""
__author__ = ('api.kwinter@gmail.com (Kevin Winter)'
'Joseph DiLallo')
from googleads import adwords
PAGE_SIZE = 100
def main(client):
# Initialize appropriate service.
targeting_idea_service = client.GetService(
'TargetingIdeaService', version='v201309')
# Construct selector object and retrieve related placements.
offset = 0
url = 'http://mars.google.com'
selector = {
'searchParameters': [{
'xsi_type': 'RelatedToUrlSearchParameter',
'urls': [url],
'includeSubUrls': 'false'
}],
'ideaType': 'PLACEMENT',
'requestType': 'IDEAS',
'requestedAttributeTypes': ['SAMPLE_URL'],
'paging': {
'startIndex': str(offset),
'numberResults': str(PAGE_SIZE)
}
}
more_pages = True
while more_pages:
page = targeting_idea_service.get(selector)
# Display results.
if 'entries' in page:
for result in page['entries']:
result = result['data'][0]['value']
print 'Related content keywords were found at \'%s\' url.' % result
print
print ('Total urls found with content keywords related to keywords at '
'\'%s\': %s' % (url, page['totalNumEntries']))
else:
print 'No content keywords were found at \'%s\'.' % url
offset += PAGE_SIZE
selector['paging']['startIndex'] = str(offset)
more_pages = offset < int(page['totalNumEntries'])
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client)
| {
"content_hash": "dbce4c4778d01f58e2771a7a259eb092",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 77,
"avg_line_length": 28.016666666666666,
"alnum_prop": 0.6234384295062463,
"repo_name": "jdilallo/jdilallo-test",
"id": "a40aac4cddea4ebf0cab773e3d274b2bb2050380",
"size": "2299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/adwords/v201309/optimization/get_placement_ideas.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "722738"
}
],
"symlink_target": ""
} |
import logging
import random
from collections import namedtuple
import gevent
from gevent.event import AsyncResult
from ethereum import slogging
from raiden.tasks import (
StartMediatedTransferTask,
MediateTransferTask,
EndMediatedTransferTask,
ExchangeTask,
)
from raiden.utils import pex, sha3
log = slogging.get_logger(__name__) # pylint: disable=invalid-name
Exchange = namedtuple('Exchange', (
'identifier',
'from_asset',
'from_amount',
'from_nodeaddress', # the node' address of the owner of the `from_asset`
'to_asset',
'to_amount',
'to_nodeaddress', # the node' address of the owner of the `to_asset`
))
ExchangeKey = namedtuple('ExchangeKey', (
'from_asset',
'from_amount',
))
class UnknownAddress(Exception):
pass
class TransferWhenClosed(Exception):
pass
class UnknownAssetAddress(Exception):
def __init__(self, address):
Exception.__init__(
self,
'Message with unknown asset address {} received'.format(pex(address))
)
class TransferManager(object):
""" Manages all transfers done through this node. """
def __init__(self, assetmanager):
self.assetmanager = assetmanager
self.transfertasks = dict()
self.exchanges = dict() #: mapping for pending exchanges
self.endtask_transfer_mapping = dict()
self.on_task_completed_callbacks = list()
self.on_result_callbacks = list()
# TODO: Move registration to raiden_service.py:Raiden. This is used to
# dispatch messages by hashlock and to expose callbacks to applications
# built on top of raiden, since hashlocks can be shared among assets this
# should be moved to an upper layer.
def register_task_for_hashlock(self, task, hashlock):
""" Register the task to receive messages based on hashlock.
Registration is required otherwise the task won't receive any messages
from the protocol, un-registering is done by the `on_hashlock_result`
function.
Note:
Messages are dispatched solely on the hashlock value (being part of
the message, eg. SecretRequest, or calculated from the message
content, eg. RevealSecret), this means the sender needs to be
checked for the received messages.
"""
self.transfertasks[hashlock] = task
def on_hashlock_result(self, hashlock, success):
""" Set the result for a transfer based on hashlock.
This function will also call the registered callbacks and de-register
the task.
"""
task = self.transfertasks[hashlock]
del self.transfertasks[hashlock]
callbacks_to_remove = list()
for callback in self.on_task_completed_callbacks:
result = callback(task, success)
if result is True:
callbacks_to_remove.append(callback)
for callback in callbacks_to_remove:
self.on_task_completed_callbacks.remove(callback)
if task in self.endtask_transfer_mapping:
if task in self.endtask_transfer_mapping:
transfer = self.endtask_transfer_mapping[task]
for callback in self.on_result_callbacks:
gevent.spawn(
callback(
transfer.asset,
transfer.recipient,
transfer.initiator,
transfer.transferred_amount,
hashlock
)
)
del self.endtask_transfer_mapping[task]
def register_callback_for_result(self, callback):
self.on_result_callbacks.append(callback)
def create_default_identifier(self, target):
"""
The default message identifier value is the first 8 bytes of the sha3 of:
- Our Address
- Our target address
- The asset address
- A random 8 byte number for uniqueness
"""
hash_ = sha3("{}{}{}{}".format(
self.assetmanager.raiden.address,
target,
self.assetmanager.asset_address,
random.randint(0, 18446744073709551614L)
))
return int(hash_[0:8].encode('hex'), 16)
def transfer_async(self, amount, target, identifier=None, callback=None):
""" Transfer `amount` between this node and `target`.
This method will start an asyncronous transfer, the transfer might fail
or succeed depending on a couple of factors:
- Existence of a path that can be used, through the usage of direct
or intermediary channels.
- Network speed, making the transfer suficiently fast so it doesn't
timeout.
"""
# Create a default identifier value
if identifier is None:
identifier = self.create_default_identifier(target)
direct_channel = self.assetmanager.partneraddress_channel.get(target)
if direct_channel:
async_result = self._direct_or_mediated_transfer(
amount,
identifier,
direct_channel,
callback,
)
return async_result
else:
async_result = self._mediated_transfer(
amount,
identifier,
target,
callback,
)
return async_result
def _direct_or_mediated_transfer(self, amount, identifier, direct_channel, callback):
""" Check the direct channel and if possible use it, otherwise start a
mediated transfer.
"""
if not direct_channel.isopen:
log.info(
'DIRECT CHANNEL %s > %s is closed',
pex(direct_channel.our_state.address),
pex(direct_channel.partner_state.address),
)
async_result = self._mediated_transfer(
amount,
identifier,
direct_channel.partner_state.address,
callback,
)
return async_result
elif amount > direct_channel.distributable:
log.info(
'DIRECT CHANNEL %s > %s doesnt have enough funds [%s]',
pex(direct_channel.our_state.address),
pex(direct_channel.partner_state.address),
amount,
)
async_result = self._mediated_transfer(
amount,
identifier,
direct_channel.partner_state.address,
callback,
)
return async_result
else:
direct_transfer = direct_channel.create_directtransfer(amount, identifier)
self.assetmanager.raiden.sign(direct_transfer)
direct_channel.register_transfer(direct_transfer)
if callback:
direct_channel.on_task_completed_callbacks.append(callback)
async_result = self.assetmanager.raiden.protocol.send_async(
direct_channel.partner_state.address,
direct_transfer,
)
return async_result
def _mediated_transfer(self, amount, identifier, target, callback):
asunc_result = AsyncResult()
task = StartMediatedTransferTask(
self.assetmanager.raiden,
self.assetmanager.asset_address,
amount,
identifier,
target,
asunc_result,
)
task.start()
if callback:
self.on_task_completed_callbacks.append(callback)
return asunc_result
def on_mediatedtransfer_message(self, transfer):
if transfer.sender not in self.assetmanager.partneraddress_channel:
if log.isEnabledFor(logging.WARN):
log.warn(
'Received mediated transfer message from unknown channel.'
'Sender: %s',
pex(transfer.sender),
)
raise UnknownAddress
raiden = self.assetmanager.raiden
asset_address = self.assetmanager.asset_address
channel = self.assetmanager.get_channel_by_partner_address(transfer.sender)
if not channel.isopen:
if log.isEnabledFor(logging.WARN):
log.warn(
'Received mediated transfer message from %s after channel closing',
pex(transfer.sender),
)
raise TransferWhenClosed
channel.register_transfer(transfer) # raises if the transfer is invalid
exchange_key = ExchangeKey(transfer.asset, transfer.lock.amount)
if exchange_key in self.exchanges:
exchange = self.exchanges[exchange_key]
if log.isEnabledFor(logging.DEBUG):
log.debug(
'EXCHANGE TRANSFER RECEIVED node:%s %s > %s hashlock:%s'
' from_asset:%s from_amount:%s to_asset:%s to_amount:%s [%s]',
pex(self.assetmanager.raiden.address),
pex(transfer.sender),
pex(self.assetmanager.raiden.address),
pex(transfer.lock.hashlock),
pex(exchange.from_asset),
exchange.from_amount,
pex(exchange.to_asset),
exchange.to_amount,
repr(transfer),
)
exchange_task = ExchangeTask(
raiden,
from_mediated_transfer=transfer,
to_asset=exchange.to_asset,
to_amount=exchange.to_amount,
target=exchange.from_nodeaddress,
)
exchange_task.start()
elif transfer.target == self.assetmanager.raiden.address:
if log.isEnabledFor(logging.DEBUG):
log.debug(
'MEDIATED TRANSFER RECEIVED node:%s %s > %s hashlock:%s [%s]',
pex(self.assetmanager.raiden.address),
pex(transfer.sender),
pex(self.assetmanager.raiden.address),
pex(transfer.lock.hashlock),
repr(transfer),
)
try:
self.assetmanager.raiden.message_for_task(
transfer,
transfer.lock.hashlock
)
except UnknownAddress:
# assumes that the registered task(s) tooks care of the message
# (used for exchanges)
secret_request_task = EndMediatedTransferTask(
raiden,
asset_address,
transfer,
)
secret_request_task.start()
else:
if log.isEnabledFor(logging.DEBUG):
log.debug(
'TRANSFER TO BE MEDIATED RECEIVED node:%s %s > %s hashlock:%s [%s]',
pex(self.assetmanager.raiden.address),
pex(transfer.sender),
pex(self.assetmanager.raiden.address),
pex(transfer.lock.hashlock),
repr(transfer),
)
transfer_task = MediateTransferTask(
raiden,
asset_address,
transfer,
0, # TODO: calculate the fee
)
transfer_task.start()
def on_directtransfer_message(self, transfer):
if transfer.sender not in self.assetmanager.partneraddress_channel:
if log.isEnabledFor(logging.WARN):
log.warn(
'Received direct transfer message from unknown sender %s',
pex(transfer.sender),
)
raise UnknownAddress
channel = self.assetmanager.partneraddress_channel[transfer.sender]
if not channel.isopen:
if log.isEnabledFor(logging.WARN):
log.warn(
'Received direct transfer message from %s after channel closing',
pex(transfer.sender),
)
raise TransferWhenClosed
channel.register_transfer(transfer)
def on_exchangerequest_message(self, message):
# if matches any own order
# if signed for me and fee:
# broadcast
pass
| {
"content_hash": "062d8ceaf067cde999483da0c9333db6",
"timestamp": "",
"source": "github",
"line_count": 357,
"max_line_length": 89,
"avg_line_length": 34.91596638655462,
"alnum_prop": 0.5604492579221821,
"repo_name": "tomaaron/raiden",
"id": "9f29dbf9ad860f06cc6e40ed2f41133872813a8e",
"size": "12489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "raiden/transfermanager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3099"
},
{
"name": "JavaScript",
"bytes": "5202"
},
{
"name": "Makefile",
"bytes": "1978"
},
{
"name": "Python",
"bytes": "672885"
},
{
"name": "Shell",
"bytes": "4384"
}
],
"symlink_target": ""
} |
from graphviz import Digraph
import yaml
def isa(o, key):
return key in o and o[key]
def add_node(g, name, **attrs):
g.node(name, label=name, **attrs)
def eprops(n):
"""Extract edge properties"""
if isinstance(n, basestring):
return n, {}
if isinstance(n, dict):
return n.pop('node'), n
assert False, n
def add_edge(g, out, in_):
outs, outp = eprops(out)
ins, inp = eprops(in_)
kwargs = (outp or inp)
kwargs.pop('style', {})
g.edge(outs, ins, **kwargs)
def build(desc, g):
"""Function to take a descriptor and populate a graph."""
for node in desc:
if isinstance(node, basestring):
add_node(g, node)
elif isinstance(node, list):
pass
elif isa(node, 'subgraph'):
sg = Digraph()
build(node['nodes'], sg)
g.subgraph(sg)
elif isa(node, 'style'):
for k, v in node['style'].items():
getattr(g, '%s_attr' % k).update(**v)
else:
add_node(g, node['name'], **node.pop('attrs', {}))
for node in desc:
if isinstance(node, dict) and 'name' in node:
for e in node.get('out', []):
add_edge(g, node['name'], e)
for e in node.get('in', []):
add_edge(g, e, node['name'])
| {
"content_hash": "0e02db855ac8039cd6528c1a18b69bce",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 62,
"avg_line_length": 26.431372549019606,
"alnum_prop": 0.5148367952522255,
"repo_name": "ninowalker/ymlgraph",
"id": "f48b867c6dca9054cc4efea548d5d9b7d5ae71bb",
"size": "1348",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ymlgraph/transformer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "133"
},
{
"name": "Python",
"bytes": "4902"
}
],
"symlink_target": ""
} |
import os
import sys
from collections import OrderedDict
import yaml
from packaging.specifiers import SpecifierSet, InvalidSpecifier
from packaging.version import Version, InvalidVersion
from hokusai import CWD
from hokusai.lib.constants import YAML_HEADER
from hokusai.lib.exceptions import HokusaiError
from hokusai.version import VERSION
HOKUSAI_GLOBAL_CONFIG_FILE = os.path.join(os.environ.get('HOME', '/'), '.hokusai', 'config.yml')
class HokusaiGlobalConfig(object):
def is_present(self):
return os.path.isfile(HOKUSAI_GLOBAL_CONFIG_FILE)
def get(self, key, default=None, use_env=False, _type=str):
value = self._config_value_for(key, _type)
if value is not None:
return value
return default
def _config_value_for(self, key, _type):
try:
with open(HOKUSAI_GLOBAL_CONFIG_FILE, 'r') as config_file:
config_struct = yaml.safe_load(config_file.read())
try:
val = config_struct[key]
except KeyError:
return None
if not isinstance(val, _type):
raise HokusaiError("Config key %s is not of %s" % (key, _type))
return val
except IOError:
return None
@property
def kubectl_version(self):
return self.get('kubectl-version')
@property
def kubectl_config_file(self):
return self.get('kubectl-config-file')
global_config = HokusaiGlobalConfig()
| {
"content_hash": "0663b93681edf7403cb5bceaf8eae15d",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 96,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.6940836940836941,
"repo_name": "izakp/hokusai",
"id": "93e3bf10688d6b6e75e97f6a6e5ae59516604a19",
"size": "1386",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hokusai/lib/global_config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "599"
},
{
"name": "Makefile",
"bytes": "3484"
},
{
"name": "Python",
"bytes": "134404"
}
],
"symlink_target": ""
} |
"""Tests for the attention layer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras.layers import multi_head_attention
from tensorflow.python.platform import test
# This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It
# guarantees forward compatibility of this code for the V2 switchover.
@keras_parameterized.run_all_keras_modes
class MultiHeadAttentionTest(keras_parameterized.TestCase):
@parameterized.named_parameters(
("key_value_same_proj", None, None, [40, 80]),
("key_value_different_proj", 32, 60, [40, 60]),
)
def test_non_masked_attention(self, value_dim, output_shape, output_dims):
"""Test that the attention layer can be created without a mask tensor."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=12,
key_dim=64,
value_dim=value_dim,
output_shape=output_shape)
# Create a 3-dimensional input (the first dimension is implicit).
query = keras.Input(shape=(40, 80))
value = keras.Input(shape=(20, 80))
output = test_layer(query=query, value=value)
self.assertEqual(output.shape.as_list(), [None] + output_dims)
def test_non_masked_self_attention(self):
"""Test with one input (self-attenntion) and no mask tensor."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=12, key_dim=64)
# Create a 3-dimensional input (the first dimension is implicit).
query = keras.Input(shape=(40, 80))
output = test_layer(query, query)
self.assertEqual(output.shape.as_list(), [None, 40, 80])
def test_attention_scores(self):
"""Test attention outputs with coefficients."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=12, key_dim=64)
# Create a 3-dimensional input (the first dimension is implicit).
query = keras.Input(shape=(40, 80))
output, coef = test_layer(query, query, return_attention_scores=True)
self.assertEqual(output.shape.as_list(), [None, 40, 80])
self.assertEqual(coef.shape.as_list(), [None, 12, 40, 40])
def test_attention_scores_with_values(self):
"""Test attention outputs with coefficients."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=12, key_dim=64)
# Create a 3-dimensional input (the first dimension is implicit).
query = keras.Input(shape=(40, 80))
value = keras.Input(shape=(60, 80))
output, coef = test_layer(query, value, return_attention_scores=True)
self.assertEqual(output.shape.as_list(), [None, 40, 80])
self.assertEqual(coef.shape.as_list(), [None, 12, 40, 60])
@parameterized.named_parameters(("with_bias", True), ("no_bias", False))
def test_masked_attention(self, use_bias):
"""Test with a mask tensor."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=2, key_dim=2, use_bias=use_bias)
# Create a 3-dimensional input (the first dimension is implicit).
batch_size = 3
query = keras.Input(shape=(4, 8))
value = keras.Input(shape=(2, 8))
mask_tensor = keras.Input(shape=(4, 2))
output = test_layer(query=query, value=value, attention_mask=mask_tensor)
# Create a model containing the test layer.
model = keras.Model([query, value, mask_tensor], output)
# Generate data for the input (non-mask) tensors.
from_data = 10 * np.random.random_sample((batch_size, 4, 8))
to_data = 10 * np.random.random_sample((batch_size, 2, 8))
# Invoke the data with a random set of mask data. This should mask at least
# one element.
mask_data = np.random.randint(2, size=(batch_size, 4, 2))
masked_output_data = model.predict([from_data, to_data, mask_data])
# Invoke the same data, but with a null mask (where no elements are masked).
null_mask_data = np.ones((batch_size, 4, 2))
unmasked_output_data = model.predict([from_data, to_data, null_mask_data])
# Because one data is masked and one is not, the outputs should not be the
# same.
self.assertNotAllClose(masked_output_data, unmasked_output_data)
# Tests the layer with three inputs: Q, K, V.
key = keras.Input(shape=(2, 8))
output = test_layer(query, value=value, key=key, attention_mask=mask_tensor)
model = keras.Model([query, value, key, mask_tensor], output)
masked_output_data = model.predict([from_data, to_data, to_data, mask_data])
unmasked_output_data = model.predict(
[from_data, to_data, to_data, null_mask_data])
# Because one data is masked and one is not, the outputs should not be the
# same.
self.assertNotAllClose(masked_output_data, unmasked_output_data)
if use_bias:
self.assertLen(test_layer._query_dense.trainable_variables, 2)
self.assertLen(test_layer._output_dense.trainable_variables, 2)
else:
self.assertLen(test_layer._query_dense.trainable_variables, 1)
self.assertLen(test_layer._output_dense.trainable_variables, 1)
def test_initializer(self):
"""Test with a specified initializer."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=12,
key_dim=64,
kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.02))
# Create a 3-dimensional input (the first dimension is implicit).
query = keras.Input(shape=(40, 80))
output = test_layer(query, query)
self.assertEqual(output.shape.as_list(), [None, 40, 80])
def test_masked_attention_with_scores(self):
"""Test with a mask tensor."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=2, key_dim=2)
# Create a 3-dimensional input (the first dimension is implicit).
batch_size = 3
query = keras.Input(shape=(4, 8))
value = keras.Input(shape=(2, 8))
mask_tensor = keras.Input(shape=(4, 2))
output = test_layer(query=query, value=value, attention_mask=mask_tensor)
# Create a model containing the test layer.
model = keras.Model([query, value, mask_tensor], output)
# Generate data for the input (non-mask) tensors.
from_data = 10 * np.random.random_sample((batch_size, 4, 8))
to_data = 10 * np.random.random_sample((batch_size, 2, 8))
# Invoke the data with a random set of mask data. This should mask at least
# one element.
mask_data = np.random.randint(2, size=(batch_size, 4, 2))
masked_output_data = model.predict([from_data, to_data, mask_data])
# Invoke the same data, but with a null mask (where no elements are masked).
null_mask_data = np.ones((batch_size, 4, 2))
unmasked_output_data = model.predict([from_data, to_data, null_mask_data])
# Because one data is masked and one is not, the outputs should not be the
# same.
self.assertNotAllClose(masked_output_data, unmasked_output_data)
# Create a model containing attention scores.
output, scores = test_layer(
query=query, value=value, attention_mask=mask_tensor,
return_attention_scores=True)
model = keras.Model([query, value, mask_tensor], [output, scores])
masked_output_data_score, masked_score = model.predict(
[from_data, to_data, mask_data])
unmasked_output_data_score, unmasked_score = model.predict(
[from_data, to_data, null_mask_data])
self.assertNotAllClose(masked_output_data_score, unmasked_output_data_score)
self.assertAllClose(masked_output_data, masked_output_data_score)
self.assertAllClose(unmasked_output_data, unmasked_output_data_score)
self.assertNotAllClose(masked_score, unmasked_score)
@parameterized.named_parameters(
("4d_inputs_1freebatch_mask2", [3, 4], [3, 2], [4, 2],
(2,)), ("4d_inputs_1freebatch_mask3", [3, 4], [3, 2], [3, 4, 2], (2,)),
("4d_inputs_1freebatch_mask4", [3, 4], [3, 2], [3, 2, 4, 2],
(2,)), ("4D_inputs_2D_attention", [3, 4], [3, 2], [3, 4, 3, 2], (1, 2)),
("5D_inputs_2D_attention", [5, 3, 4], [5, 3, 2], [3, 4, 3, 2], (2, 3)),
("5D_inputs_2D_attention_fullmask", [5, 3, 4], [5, 3, 2], [5, 3, 4, 3, 2],
(2, 3)))
def test_high_dim_attention(self, q_dims, v_dims, mask_dims, attention_axes):
"""Test with a mask tensor."""
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=2, key_dim=2, attention_axes=attention_axes)
batch_size, hidden_size = 3, 8
# Generate data for the input (non-mask) tensors.
query_shape = [batch_size] + q_dims + [hidden_size]
value_shape = [batch_size] + v_dims + [hidden_size]
mask_shape = [batch_size] + mask_dims
query = 10 * np.random.random_sample(query_shape)
value = 10 * np.random.random_sample(value_shape)
# Invoke the data with a random set of mask data. This should mask at least
# one element.
mask_data = np.random.randint(2, size=mask_shape).astype("bool")
# Invoke the same data, but with a null mask (where no elements are masked).
null_mask_data = np.ones(mask_shape)
# Because one data is masked and one is not, the outputs should not be the
# same.
query_tensor = keras.Input(query_shape[1:], name="query")
value_tensor = keras.Input(value_shape[1:], name="value")
mask_tensor = keras.Input(mask_shape[1:], name="mask")
output = test_layer(query=query_tensor, value=value_tensor,
attention_mask=mask_tensor)
model = keras.Model([query_tensor, value_tensor, mask_tensor], output)
self.assertNotAllClose(
model.predict([query, value, mask_data]),
model.predict([query, value, null_mask_data]))
def test_dropout(self):
test_layer = multi_head_attention.MultiHeadAttention(
num_heads=2, key_dim=2, dropout=0.5)
# Generate data for the input (non-mask) tensors.
from_data = keras.backend.ones(shape=(32, 4, 8))
to_data = keras.backend.ones(shape=(32, 2, 8))
train_out = test_layer(from_data, to_data, None, None, None, True)
test_out = test_layer(from_data, to_data, None, None, None, False)
# Output should be close when not in training mode,
# and should not be close when enabling dropout in training mode.
self.assertNotAllClose(
keras.backend.eval(train_out),
keras.backend.eval(test_out))
class SubclassAttention(multi_head_attention.MultiHeadAttention):
def _build_attention(self, qkv_rank):
pass
def _compute_attention(self,
query_tensor,
key_tensor,
value_tensor,
attention_mask=None,
training=None):
return value_tensor, None
@keras_parameterized.run_all_keras_modes
class AttentionSubclassTest(keras_parameterized.TestCase):
def test_initializer(self):
"""Test with a specified initializer."""
test_layer = SubclassAttention(num_heads=12, key_dim=64)
# Create a 3-dimensional input (the first dimension is implicit).
query = keras.Input(shape=(40, 80))
output = test_layer(query, query)
self.assertEqual(output.shape.as_list(), [None, 40, 80])
if __name__ == "__main__":
test.main()
| {
"content_hash": "9740c401e7dd5807506b1e56d260a257",
"timestamp": "",
"source": "github",
"line_count": 258,
"max_line_length": 80,
"avg_line_length": 43.56201550387597,
"alnum_prop": 0.6683868671589999,
"repo_name": "aam-at/tensorflow",
"id": "4c957b8973bffdd20d63b25c5cf7936ff2699ad7",
"size": "11928",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/layers/multi_head_attention_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3568"
},
{
"name": "Batchfile",
"bytes": "16049"
},
{
"name": "C",
"bytes": "784149"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "69481042"
},
{
"name": "CMake",
"bytes": "204596"
},
{
"name": "Dockerfile",
"bytes": "73667"
},
{
"name": "Go",
"bytes": "1670128"
},
{
"name": "HTML",
"bytes": "4680118"
},
{
"name": "Java",
"bytes": "844222"
},
{
"name": "Jupyter Notebook",
"bytes": "1665601"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "101287"
},
{
"name": "Objective-C",
"bytes": "104023"
},
{
"name": "Objective-C++",
"bytes": "182460"
},
{
"name": "PHP",
"bytes": "17733"
},
{
"name": "Pascal",
"bytes": "3407"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "49451363"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "4697"
},
{
"name": "Shell",
"bytes": "495434"
},
{
"name": "Smarty",
"bytes": "27495"
},
{
"name": "Swift",
"bytes": "56155"
},
{
"name": "TSQL",
"bytes": "921"
}
],
"symlink_target": ""
} |
"""Module for Slack Tests."""
| {
"content_hash": "0b2b38ec31205d961b012408765623ff",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 29,
"avg_line_length": 30,
"alnum_prop": 0.6333333333333333,
"repo_name": "jacobtomlinson/opsdroid",
"id": "b44787274f440b508768ed2b68df648a0e714b5f",
"size": "30",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "opsdroid/connector/slack/tests/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1755"
},
{
"name": "Jinja",
"bytes": "2320"
},
{
"name": "Jupyter Notebook",
"bytes": "848"
},
{
"name": "Python",
"bytes": "1178799"
}
],
"symlink_target": ""
} |
from .sub_resource import SubResource
class Subnet(SubResource):
"""Subnet in a virtual network resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:param address_prefix: The address prefix for the subnet.
:type address_prefix: str
:param network_security_group: The reference of the NetworkSecurityGroup
resource.
:type network_security_group: :class:`NetworkSecurityGroup
<azure.mgmt.network.v2016_12_01.models.NetworkSecurityGroup>`
:param route_table: The reference of the RouteTable resource.
:type route_table: :class:`RouteTable
<azure.mgmt.network.v2016_12_01.models.RouteTable>`
:ivar ip_configurations: Gets an array of references to the network
interface IP configurations using subnet.
:vartype ip_configurations: list of :class:`IPConfiguration
<azure.mgmt.network.v2016_12_01.models.IPConfiguration>`
:param resource_navigation_links: Gets an array of references to the
external resources using subnet.
:type resource_navigation_links: list of :class:`ResourceNavigationLink
<azure.mgmt.network.v2016_12_01.models.ResourceNavigationLink>`
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'ip_configurations': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'network_security_group': {'key': 'properties.networkSecurityGroup', 'type': 'NetworkSecurityGroup'},
'route_table': {'key': 'properties.routeTable', 'type': 'RouteTable'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[IPConfiguration]'},
'resource_navigation_links': {'key': 'properties.resourceNavigationLinks', 'type': '[ResourceNavigationLink]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, address_prefix=None, network_security_group=None, route_table=None, resource_navigation_links=None, provisioning_state=None, name=None, etag=None):
super(Subnet, self).__init__(id=id)
self.address_prefix = address_prefix
self.network_security_group = network_security_group
self.route_table = route_table
self.ip_configurations = None
self.resource_navigation_links = resource_navigation_links
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
| {
"content_hash": "08a4bc89c56747883012555ca3295eda",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 179,
"avg_line_length": 47.0625,
"alnum_prop": 0.6819389110225763,
"repo_name": "v-iam/azure-sdk-for-python",
"id": "2aa53c9eae2baf04b76e201dbab2995db9c3f4b8",
"size": "3486",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2016_12_01/models/subnet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19856874"
}
],
"symlink_target": ""
} |
"""Checks import position rule"""
# pylint: disable=unused-import,relative-import,ungrouped-imports,import-error,no-name-in-module,relative-beyond-top-level,unused-variable
def method1():
"""Method 1"""
import x
| {
"content_hash": "a506d660a5885ccf7001d833c6515f18",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 138,
"avg_line_length": 44,
"alnum_prop": 0.7454545454545455,
"repo_name": "arju88nair/projectCulminate",
"id": "3f1174f9dc2e661e287694fef80de49d49e7c62a",
"size": "220",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "venv/lib/python3.5/site-packages/pylint/test/functional/wrong_import_position4.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "365921"
},
{
"name": "C++",
"bytes": "237910"
},
{
"name": "CSS",
"bytes": "6270"
},
{
"name": "JavaScript",
"bytes": "6264"
},
{
"name": "Makefile",
"bytes": "90112"
},
{
"name": "Python",
"bytes": "15199371"
},
{
"name": "Shell",
"bytes": "17795"
}
],
"symlink_target": ""
} |
from application import server
server.debug = True
server.run(host='0.0.0.0', port=5000) | {
"content_hash": "e006e53cb884eacb34bcf44a2e62022d",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 37,
"avg_line_length": 22.25,
"alnum_prop": 0.7528089887640449,
"repo_name": "Markcial/alembic",
"id": "ce2ee3336d0d9d221edc27091cabe339c043da92",
"size": "89",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2240"
},
{
"name": "HTML",
"bytes": "4119"
},
{
"name": "JavaScript",
"bytes": "4676"
},
{
"name": "Python",
"bytes": "7782"
},
{
"name": "Shell",
"bytes": "609"
}
],
"symlink_target": ""
} |
"""So that we can modularize our application, we will use this as our
our master file for application endpoints"""
from .modules.todo import views
| {
"content_hash": "3ff93c1fd694de71fa53135a7f2742e0",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 69,
"avg_line_length": 49,
"alnum_prop": 0.782312925170068,
"repo_name": "configuresystems/restful-api-with-flask",
"id": "87d4f9c826f08464fc765d8905f55854046c868e",
"size": "147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12306"
},
{
"name": "Shell",
"bytes": "3690"
},
{
"name": "VimL",
"bytes": "34373"
}
],
"symlink_target": ""
} |
import time
import clodius.fast
import numpy as np
import pyximport
pyximport.install(setup_args={"include_dirs": np.get_include()})
x = np.array(range(2 ** 16))
t1 = time.time()
t2 = time.time()
print(clodius.fast.aggregate(x, 8))
print("t2:", t2 - t1)
| {
"content_hash": "57e2acdb5a6e4fccc97d8d05d7a0d94f",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 64,
"avg_line_length": 19.76923076923077,
"alnum_prop": 0.6964980544747081,
"repo_name": "hms-dbmi/clodius",
"id": "3469199947ffbe66153e683b2ab10b31a1fde781",
"size": "257",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "scripts/temp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "146177"
},
{
"name": "Makefile",
"bytes": "293"
},
{
"name": "Python",
"bytes": "232375"
},
{
"name": "Shell",
"bytes": "489"
}
],
"symlink_target": ""
} |
"""
A connection to the VMware vCenter platform.
"""
import re
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import versionutils as v_utils
from oslo_vmware import api
from oslo_vmware import exceptions as vexc
from oslo_vmware import pbm
from oslo_vmware import vim
from oslo_vmware import vim_util
from nova.compute import task_states
import nova.conf
from nova import exception
from nova.i18n import _, _LI, _LE, _LW
from nova.virt import driver
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import host
from nova.virt.vmwareapi import vim_util as nova_vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
from nova.virt.vmwareapi import volumeops
LOG = logging.getLogger(__name__)
CONF = nova.conf.CONF
TIME_BETWEEN_API_CALL_RETRIES = 1.0
class VMwareVCDriver(driver.ComputeDriver):
"""The VC host connection object."""
capabilities = {
"has_imagecache": True,
"supports_recreate": False,
"supports_migrate_to_same_host": True,
"supports_attach_interface": True
}
# Legacy nodename is of the form: <mo id>(<cluster name>)
# e.g. domain-26(TestCluster)
# We assume <mo id> consists of alphanumeric, _ and -.
# We assume cluster name is everything between the first ( and the last ).
# We pull out <mo id> for re-use.
LEGACY_NODENAME = re.compile('([\w-]+)\(.+\)')
# The vCenter driver includes API that acts on ESX hosts or groups
# of ESX hosts in clusters or non-cluster logical-groupings.
#
# vCenter is not a hypervisor itself, it works with multiple
# hypervisor host machines and their guests. This fact can
# subtly alter how vSphere and OpenStack interoperate.
def __init__(self, virtapi, scheme="https"):
super(VMwareVCDriver, self).__init__(virtapi)
if (CONF.vmware.host_ip is None or
CONF.vmware.host_username is None or
CONF.vmware.host_password is None):
raise Exception(_("Must specify host_ip, host_username and "
"host_password to use vmwareapi.VMwareVCDriver"))
self._datastore_regex = None
if CONF.vmware.datastore_regex:
try:
self._datastore_regex = re.compile(CONF.vmware.datastore_regex)
except re.error:
raise exception.InvalidInput(reason=
_("Invalid Regular Expression %s")
% CONF.vmware.datastore_regex)
self._session = VMwareAPISession(scheme=scheme)
self._check_min_version()
# Update the PBM location if necessary
if CONF.vmware.pbm_enabled:
self._update_pbm_location()
self._validate_configuration()
self._cluster_name = CONF.vmware.cluster_name
self._cluster_ref = vm_util.get_cluster_ref_by_name(self._session,
self._cluster_name)
if self._cluster_ref is None:
raise exception.NotFound(_("The specified cluster '%s' was not "
"found in vCenter")
% self._cluster_name)
self._vcenter_uuid = self._get_vcenter_uuid()
self._nodename = self._create_nodename(self._cluster_ref.value)
self._volumeops = volumeops.VMwareVolumeOps(self._session,
self._cluster_ref)
self._vmops = vmops.VMwareVMOps(self._session,
virtapi,
self._volumeops,
self._cluster_ref,
datastore_regex=self._datastore_regex)
self._vc_state = host.VCState(self._session,
self._nodename,
self._cluster_ref,
self._datastore_regex)
# Register the OpenStack extension
self._register_openstack_extension()
def _check_min_version(self):
min_version = v_utils.convert_version_to_int(constants.MIN_VC_VERSION)
next_min_ver = v_utils.convert_version_to_int(
constants.NEXT_MIN_VC_VERSION)
vc_version = vim_util.get_vc_version(self._session)
LOG.info(_LI("VMware vCenter version: %s"), vc_version)
if v_utils.convert_version_to_int(vc_version) < min_version:
raise exception.NovaException(
_('Detected vCenter version %(version)s. Nova requires VMware '
'vCenter version %(min_version)s or greater.') % {
'version': vc_version,
'min_version': constants.MIN_VC_VERSION})
elif v_utils.convert_version_to_int(vc_version) < next_min_ver:
LOG.warning(_LW('Running Nova with a VMware vCenter version less '
'than %(version)s is deprecated. The required '
'minimum version of vCenter will be raised to '
'%(version)s in the 16.0.0 release.'),
{'version': constants.NEXT_MIN_VC_VERSION})
@property
def need_legacy_block_device_info(self):
return False
def _update_pbm_location(self):
if CONF.vmware.pbm_wsdl_location:
pbm_wsdl_loc = CONF.vmware.pbm_wsdl_location
else:
version = vim_util.get_vc_version(self._session)
pbm_wsdl_loc = pbm.get_pbm_wsdl_location(version)
self._session.pbm_wsdl_loc_set(pbm_wsdl_loc)
def _validate_configuration(self):
if CONF.vmware.pbm_enabled:
if not CONF.vmware.pbm_default_policy:
raise error_util.PbmDefaultPolicyUnspecified()
if not pbm.get_profile_id_by_name(
self._session,
CONF.vmware.pbm_default_policy):
raise error_util.PbmDefaultPolicyDoesNotExist()
if CONF.vmware.datastore_regex:
LOG.warning(_LW(
"datastore_regex is ignored when PBM is enabled"))
self._datastore_regex = None
def init_host(self, host):
vim = self._session.vim
if vim is None:
self._session._create_session()
def cleanup_host(self, host):
self._session.logout()
def _register_openstack_extension(self):
# Register an 'OpenStack' extension in vCenter
LOG.debug('Registering extension %s with vCenter',
constants.EXTENSION_KEY)
os_extension = self._session._call_method(vim_util, 'find_extension',
constants.EXTENSION_KEY)
if os_extension is None:
LOG.debug('Extension does not exist. Registering type %s.',
constants.EXTENSION_TYPE_INSTANCE)
self._session._call_method(vim_util, 'register_extension',
constants.EXTENSION_KEY,
constants.EXTENSION_TYPE_INSTANCE)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup after instance being destroyed by Hypervisor."""
pass
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted."""
# Check if the instance is running already and avoid doing
# anything if it is.
state = vm_util.get_vm_state(self._session, instance)
ignored_states = ['poweredon', 'suspended']
if state.lower() in ignored_states:
return
# Instance is not up and could be in an unknown state.
# Be as absolute as possible about getting it back into
# a known and running state.
self.reboot(context, instance, network_info, 'hard',
block_device_info)
def list_instance_uuids(self):
"""List VM instance UUIDs."""
return self._vmops.list_instances()
def list_instances(self):
"""List VM instances from the single compute node."""
return self._vmops.list_instances()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
"""
# TODO(PhilDay): Add support for timeout (clean shutdown)
return self._vmops.migrate_disk_and_power_off(context, instance,
dest, flavor)
def confirm_migration(self, context, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
self._vmops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize, powering back on the instance."""
self._vmops.finish_revert_migration(context, instance, network_info,
block_device_info, power_on)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize, turning on the migrated instance."""
self._vmops.finish_migration(context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info, power_on)
def get_instance_disk_info(self, instance, block_device_info=None):
pass
def get_vnc_console(self, context, instance):
"""Return link to instance's VNC console using vCenter logic."""
# vCenter does not actually run the VNC service
# itself. You must talk to the VNC host underneath vCenter.
return self._vmops.get_vnc_console(instance)
def get_mks_console(self, context, instance):
return self._vmops.get_mks_console(instance)
def _get_vcenter_uuid(self):
"""Retrieves the vCenter UUID."""
about = self._session._call_method(nova_vim_util, 'get_about_info')
return about.instanceUuid
def _create_nodename(self, mo_id):
"""Return a nodename which uniquely describes a cluster.
The name will be of the form:
<mo id>.<vcenter uuid>
e.g.
domain-26.9d51f082-58a4-4449-beed-6fd205a5726b
"""
return '%s.%s' % (mo_id, self._vcenter_uuid)
def _get_available_resources(self, host_stats):
return {'vcpus': host_stats['vcpus'],
'memory_mb': host_stats['host_memory_total'],
'local_gb': host_stats['disk_total'],
'vcpus_used': 0,
'memory_mb_used': host_stats['host_memory_total'] -
host_stats['host_memory_free'],
'local_gb_used': host_stats['disk_used'],
'hypervisor_type': host_stats['hypervisor_type'],
'hypervisor_version': host_stats['hypervisor_version'],
'hypervisor_hostname': host_stats['hypervisor_hostname'],
# The VMWare driver manages multiple hosts, so there are
# likely many different CPU models in use. As such it is
# impossible to provide any meaningful info on the CPU
# model of the "host"
'cpu_info': None,
'supported_instances': host_stats['supported_instances'],
'numa_topology': None,
}
def get_available_resource(self, nodename):
"""Retrieve resource info.
This method is called when nova-compute launches, and
as part of a periodic task.
:returns: dictionary describing resources
"""
host_stats = self._vc_state.get_host_stats(refresh=True)
stats_dict = self._get_available_resources(host_stats)
return stats_dict
def get_available_nodes(self, refresh=False):
"""Returns nodenames of all nodes managed by the compute service.
This driver supports only one compute node.
"""
return [self._nodename]
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create VM instance."""
self._vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach volume storage to VM instance."""
return self._volumeops.attach_volume(connection_info, instance)
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach volume storage to VM instance."""
return self._volumeops.detach_volume(connection_info, instance)
def get_volume_connector(self, instance):
"""Return volume connector information."""
return self._volumeops.get_volume_connector(instance)
def get_host_ip_addr(self):
"""Returns the IP address of the vCenter host."""
return CONF.vmware.host_ip
def snapshot(self, context, instance, image_id, update_task_state):
"""Create snapshot from a running VM instance."""
self._vmops.snapshot(context, instance, image_id, update_task_state)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot VM instance."""
self._vmops.reboot(instance, network_info, reboot_type)
def _detach_instance_volumes(self, instance, block_device_info):
# We need to detach attached volumes
block_device_mapping = driver.block_device_info_get_mapping(
block_device_info)
if block_device_mapping:
# Certain disk types, for example 'IDE' do not support hot
# plugging. Hence we need to power off the instance and update
# the instance state.
self._vmops.power_off(instance)
for disk in block_device_mapping:
connection_info = disk['connection_info']
try:
self.detach_volume(connection_info, instance,
disk.get('device_name'))
except exception.DiskNotFound:
LOG.warning(_LW('The volume %s does not exist!'),
disk.get('device_name'),
instance=instance)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to detach %(device_name)s. "
"Exception: %(exc)s"),
{'device_name': disk.get('device_name'),
'exc': e},
instance=instance)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
"""Destroy VM instance."""
# Destroy gets triggered when Resource Claim in resource_tracker
# is not successful. When resource claim is not successful,
# node is not set in instance. Perform destroy only if node is set
if not instance.node:
return
# A resize uses the same instance on the VC. We do not delete that
# VM in the event of a revert
if instance.task_state == task_states.RESIZE_REVERTING:
return
# We need to detach attached volumes
if block_device_info is not None:
try:
self._detach_instance_volumes(instance, block_device_info)
except vexc.ManagedObjectNotFoundException:
LOG.warning(_LW('Instance does not exists. Proceeding to '
'delete instance properties on datastore'),
instance=instance)
self._vmops.destroy(instance, destroy_disks)
def pause(self, instance):
"""Pause VM instance."""
self._vmops.pause(instance)
def unpause(self, instance):
"""Unpause paused VM instance."""
self._vmops.unpause(instance)
def suspend(self, context, instance):
"""Suspend the specified instance."""
self._vmops.suspend(instance)
def resume(self, context, instance, network_info, block_device_info=None):
"""Resume the suspended VM instance."""
self._vmops.resume(instance)
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance."""
self._vmops.rescue(context, instance, network_info, image_meta)
def unrescue(self, instance, network_info):
"""Unrescue the specified instance."""
self._vmops.unrescue(instance)
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance."""
# TODO(PhilDay): Add support for timeout (clean shutdown)
self._vmops.power_off(instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance."""
self._vmops.power_on(instance)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
self._vmops.poll_rebooting_instances(timeout, instances)
def get_info(self, instance):
"""Return info about the VM instance."""
return self._vmops.get_info(instance)
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_diagnostics(instance)
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_instance_diagnostics(instance)
def host_power_action(self, action):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def host_maintenance_mode(self, host, mode):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def set_host_enabled(self, enabled):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def get_host_uptime(self):
"""Host uptime operation not supported by VC driver."""
msg = _("Multiple hosts may be managed by the VMWare "
"vCenter driver; therefore we do not return "
"uptime for just one host.")
raise NotImplementedError(msg)
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
self._vmops.inject_network_info(instance, nw_info)
def manage_image_cache(self, context, all_instances):
"""Manage the local cache of images."""
self._vmops.manage_image_cache(context, all_instances)
def instance_exists(self, instance):
"""Efficient override of base instance_exists method."""
return self._vmops.instance_exists(instance)
def attach_interface(self, context, instance, image_meta, vif):
"""Attach an interface to the instance."""
self._vmops.attach_interface(context, instance, image_meta, vif)
def detach_interface(self, context, instance, vif):
"""Detach an interface from the instance."""
self._vmops.detach_interface(context, instance, vif)
class VMwareAPISession(api.VMwareAPISession):
"""Sets up a session with the VC/ESX host and handles all
the calls made to the host.
"""
def __init__(self, host_ip=CONF.vmware.host_ip,
host_port=CONF.vmware.host_port,
username=CONF.vmware.host_username,
password=CONF.vmware.host_password,
retry_count=CONF.vmware.api_retry_count,
scheme="https",
cacert=CONF.vmware.ca_file,
insecure=CONF.vmware.insecure):
super(VMwareAPISession, self).__init__(
host=host_ip,
port=host_port,
server_username=username,
server_password=password,
api_retry_count=retry_count,
task_poll_interval=CONF.vmware.task_poll_interval,
scheme=scheme,
create_session=True,
cacert=cacert,
insecure=insecure)
def _is_vim_object(self, module):
"""Check if the module is a VIM Object instance."""
return isinstance(module, vim.Vim)
def _call_method(self, module, method, *args, **kwargs):
"""Calls a method within the module specified with
args provided.
"""
if not self._is_vim_object(module):
return self.invoke_api(module, method, self.vim, *args, **kwargs)
else:
return self.invoke_api(module, method, *args, **kwargs)
def _wait_for_task(self, task_ref):
"""Return a Deferred that will give the result of the given task.
The task is polled until it completes.
"""
return self.wait_for_task(task_ref)
| {
"content_hash": "bdeaa10b3b6b9a6399c6d067839fd19d",
"timestamp": "",
"source": "github",
"line_count": 528,
"max_line_length": 79,
"avg_line_length": 42.02651515151515,
"alnum_prop": 0.5918431726002704,
"repo_name": "hanlind/nova",
"id": "4ddcbc3d2738f35945c28f9e9c7e3a964cffb229",
"size": "22942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/virt/vmwareapi/driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "3325"
},
{
"name": "Python",
"bytes": "18681206"
},
{
"name": "Shell",
"bytes": "32127"
},
{
"name": "Smarty",
"bytes": "306159"
}
],
"symlink_target": ""
} |
"""This module contains the user-facing API for AutoGraph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from enum import Enum
from tensorflow.python.autograph.core import config
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import conversion
from tensorflow.python.autograph.operators import py_builtins
from tensorflow.python.autograph.pyct import compiler
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.autograph.utils import py_func
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
# TODO(mdan): Properly document the type hints.
# TODO(mdan): Reduce the type hint information to (module, type).
# (currently we require (module + class name, type))
# TODO(mdan): This should behave like to_graph (e.g. convert statically).
def convert(recursive=False, verbose=False):
"""Decorator that compiles a function to use TensorFlow ops.
The decorator is dynamic - it recompiles the target whenever the decorated
function is called. This means the parameter values are known at conversion.
It also means that repeated calls with different types of parameters will be
correctly processed.
Args:
recursive: bool, whether to recursively convert any functions or classes
that the converted function may use.
verbose: bool, whether to output the compiled code in the logs.
Returns:
Callable, a decorator that converts the given function into an equivalent
function that uses TensorFlow ops.
"""
def decorator(f):
"""Decorator implementation."""
@functools.wraps(f)
def wrapper(*args, **kwargs):
return converted_call(
f, None,
converter.ConversionOptions(
recursive=recursive,
verbose=verbose,
force_conversion=True,
), *args, **kwargs)
wrapper = tf_decorator.make_decorator(f, wrapper)
# Sometimes the decorator is just desugared, making it impossible to detect.
# This attribute makes detection easier.
setattr(wrapper, '__pyct_is_compile_decorator', True)
return wrapper
return decorator
class RunMode(Enum):
"""Specifies the way a converted function or method should be executed in TF.
The enum values have the following semantics:
* GRAPH: Call this function directly, as-is. This is suitable for functions
that were already designed for TF graphs and contain ops.
* PY_FUNC: Wrap this function into a py_func op. This is suitable for code
that will only run correctly in Python, for example code that renders
to the display, reads keyboard input, etc.
"""
GRAPH = 1
PY_FUNC = 2
def do_not_convert(run_as=RunMode.GRAPH, return_dtypes=None):
"""Decorator that suppresses the conversion of a function.
See also: docs/pyfunc_dtypes.md
Args:
run_as: RunMode, specifies how to use the function in TensorFlow.
return_dtypes: Optional[Iterable[ Union[tf.DType,
utils.py_func.MatchDType]]], the return data types of the converted
function, if run_as is RunMode.PY_FUNC. Ignored otherwise. May be set to
None if the function has no return values.
Returns:
Callable, a decorator that wraps the original function.
"""
def decorator(f):
"""Decorator implementation."""
@functools.wraps(f)
def graph_wrapper(*args, **kwargs):
return f(*args, **kwargs)
@functools.wraps(f)
def py_func_wrapper(*args, **kwargs):
if kwargs:
raise NotImplementedError('RunMode.PY_FUNC does not yet support kwargs')
# TODO(mdan): Add support for kwargs.
return py_func.wrap_py_func(
f, return_dtypes, args, kwargs, use_dummy_return=not return_dtypes)
if run_as == RunMode.GRAPH:
wrapper = graph_wrapper
elif run_as == RunMode.PY_FUNC:
wrapper = py_func_wrapper
else:
raise ValueError('unknown value for run_as: %s' % run_as)
# Sometimes the decorator is just desugared, making it impossible to detect.
# This attribute makes detection easier.
setattr(wrapper, '__pyct_is_compile_decorator', True)
return wrapper
return decorator
# TODO(mdan): Move to a private, undocumented module.
def converted_call(f, owner, options, *args, **kwargs):
"""Compiles a function call inline. For internal use only."""
if owner is not None:
if not isinstance(f, str):
raise ValueError(
'When owner is specified, the function name must be specified as'
' a string: {}'.format(f))
# Special case when the owner is a 'super' object. In that case lookups of
# dynamic attributes won't work. See
# inspect_utils.SuperWrapperForDynamicAttrs.
if isinstance(owner, super):
owner = inspect_utils.SuperWrapperForDynamicAttrs(owner)
f = getattr(owner, f)
# TODO(mdan): This needs cleanup.
# In particular, we may want to avoid renaming functions altogether.
if not options.force_conversion and conversion.is_whitelisted_for_graph(f):
return f(*args, **kwargs)
unknown_arg_value = object() # Sentinel for arguments of unknown value
if inspect_utils.isbuiltin(f):
return py_builtins.overload_of(f)(*args, **kwargs)
if tf_inspect.isfunction(f) or tf_inspect.ismethod(f):
# Regular functions
target_entity = f
arg_map_target = f
f_class = inspect_utils.getmethodclass(f)
if f_class is not None:
# If this is a method call, it may or may not include self.
#
# Example when self is included:
# converted_call(to_graph(foo.bar), foo)
#
# Example when self is not included:
# super(...).foo(args)
#
if owner is not None and (not args or args[0] is not owner):
effective_args = (owner,) + args
else:
effective_args = args
partial_types = (f_class,)
else:
effective_args = args
partial_types = ()
elif tf_inspect.isclass(f):
# Constructors
target_entity = f
arg_map_target = f.__init__
effective_args = args
partial_types = ()
elif hasattr(f, '__call__') and hasattr(f, '__class__'):
# Callable objects
target_entity = f.__call__
arg_map_target = f.__call__
effective_args = (f,) + args
partial_types = (f.__class__,)
else:
NotImplementedError('unknown callable type "%s"' % type(f))
arg_values = tf_inspect.getcallargs(arg_map_target, *args, **kwargs)
arg_types = {}
for name, arg in arg_values.items():
if arg is unknown_arg_value:
continue
arg_class = arg.__class__
arg_types[name] = (arg_class.__name__, arg_class)
# When called from within a decorator, this is the only indication that
# the function is a method - it appears that the decorator is applied
# before the method is bound.
if not partial_types:
if 'self' in arg_values:
if tf_inspect.isclass(arg_values['self'].__class__):
partial_types = (arg_values['self'].__class__,)
elif 'cls' in arg_values:
if tf_inspect.isclass(arg_values['cls']):
partial_types = (arg_values['cls'],)
converted_f = to_graph(
target_entity,
recursive=options.recursive,
verbose=options.verbose,
arg_values=arg_values,
arg_types=arg_types,
partial_types=partial_types,
strip_decorators=options.strip_decorators)
return converted_f(*effective_args, **kwargs)
# TODO(mdan): Rename: to_ops?
# TODO(mdan): Look into overloading as function and decorator, like tfe.defun?
# TODO(mdan): Remove partial_types.
def to_graph(e,
recursive=True,
verbose=False,
arg_values=None,
arg_types=None,
partial_types=None,
strip_decorators=None):
"""Converts a Python entity into equivalent code that uses TensorFlow ops.
Supported Python entities include:
* functions
* classes
Classes are converted by converting all their methods into a new class.
Args:
e: Union[Callable, Type], the Python entity to convert.
recursive: bool, whether to recursively convert any functions that the
converted function may call.
verbose: bool, whether to output the compiled code in the logs.
arg_values: Optional[Dict[Text, Any]], value hints for symbols including
function arguments.
arg_types: Optional[Dict[Text, Type]], type hints for symbols including
function arguments.
partial_types: Set[Type], reserved for internal use.
strip_decorators: Tuple[Callable], same as
ConversionOptions.strip_decorators.
Returns:
Union[Callable, Type], the converted entity, which is the same kind as e
(that is, a function is e is a function, a class if e is a class, etc.) but
its code has been converted to use TF ops.
Raises:
ValueError: If the entity could not be converted.
"""
if strip_decorators is None:
strip_decorators = ()
strip_decorators += (convert, do_not_convert, converted_call)
program_ctx = converter.ProgramContext(
options=converter.ConversionOptions(
recursive=recursive,
verbose=verbose,
strip_decorators=strip_decorators),
partial_types=partial_types,
autograph_module=tf_inspect.getmodule(to_graph),
uncompiled_modules=config.DEFAULT_UNCOMPILED_MODULES)
_, name, namespace = conversion.entity_to_graph(e, program_ctx, arg_values,
arg_types)
nodes = []
for dep in reversed(program_ctx.conversion_order):
nodes.extend(program_ctx.dependency_cache[dep])
compiled_module, compiled_src = compiler.ast_to_object(
nodes,
source_prefix=program_ctx.required_imports,
include_source_map=True)
# The compiled code should see everything the entry entity saw.
# TODO(mdan): This might not work well if the call tree spans modules?
for key, val in namespace.items():
# Avoid overwriting entities that have been transformed.
if key not in compiled_module.__dict__:
compiled_module.__dict__[key] = val
compiled = getattr(compiled_module, name)
# Need this so the source_mapping attribute is available for the context
# manager to access for runtime errors.
#
# Note that compiler.ast_to_object attaches the source map 'ag_source_map__'
# symbol to the compiled module.
# TODO(mdan): Record this statically in the generated code.
# TODO(mdan): Rename this attribute to 'autograph_info__'
source_map_attribute_name = 'ag_source_map'
if getattr(compiled, source_map_attribute_name, None) is not None:
raise ValueError('cannot convert %s because is has an attribute '
'"%s", which is reserved for AutoGraph.' %
(compiled, source_map_attribute_name))
setattr(compiled, source_map_attribute_name,
compiled_module.__dict__['ag_source_map__'])
return compiled
def to_code(e,
recursive=True,
arg_values=None,
arg_types=None,
partial_types=None,
indentation=' '):
"""Returns the equivalent code that uses TensorFlow ops.
Also see: `to_graph`, `convert`
Args:
e: Union[Callable, Type], the Python entity to convert.
recursive: bool, whether to recursively convert any functions that the
converted function may call.
arg_values: Optional[Dict[Text, Any]], value hints for symbols including
function arguments.
arg_types: Optional[Dict[Text, Type]], type hints for symbols including
function arguments.
partial_types: Set[Type], reserved for internal use.
indentation: Text, when to use for each level of indentation.
Returns:
Text, the converted code.
"""
program_ctx = converter.ProgramContext(
options=converter.ConversionOptions(
recursive=recursive,
strip_decorators=(convert, do_not_convert, converted_call)),
partial_types=partial_types,
autograph_module=tf_inspect.getmodule(to_graph),
uncompiled_modules=config.DEFAULT_UNCOMPILED_MODULES)
conversion.entity_to_graph(e, program_ctx, arg_values, arg_types)
code = '\n'.join(
compiler.ast_to_source(program_ctx.dependency_cache[dep], indentation)
for dep in reversed(program_ctx.conversion_order))
return program_ctx.required_imports + '\n\n' + code
| {
"content_hash": "83e282ef0591f617089bb567557c0dd7",
"timestamp": "",
"source": "github",
"line_count": 352,
"max_line_length": 80,
"avg_line_length": 35.20738636363637,
"alnum_prop": 0.6826434277414669,
"repo_name": "girving/tensorflow",
"id": "b3f056965c6c225426323bda7c9f918e180acfcd",
"size": "13082",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/autograph/impl/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3325"
},
{
"name": "Batchfile",
"bytes": "10132"
},
{
"name": "C",
"bytes": "343258"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "50036869"
},
{
"name": "CMake",
"bytes": "196127"
},
{
"name": "Dockerfile",
"bytes": "36386"
},
{
"name": "Go",
"bytes": "1254086"
},
{
"name": "HTML",
"bytes": "4681865"
},
{
"name": "Java",
"bytes": "867313"
},
{
"name": "Jupyter Notebook",
"bytes": "2604735"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "58787"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99243"
},
{
"name": "PHP",
"bytes": "1357"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "42041620"
},
{
"name": "Ruby",
"bytes": "553"
},
{
"name": "Shell",
"bytes": "477299"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
} |
import json,os,twitter, dropbox, gzip
from datetime import datetime
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from pprint import pprint
from optparse import OptionParser
from progress.bar import Bar
"""
Example call:
python twitter_fetch.py --t keys.json --k "heart attack_palpitations" --o "../data/twitter" --m 1000000
"""
#--Load input from command line
op = OptionParser()
op.add_option('--t', dest='keys', type='str', help='Path of key files')
op.add_option('--k', dest='keywords',type='str',help='Path of keywords')
op.add_option('--o',type="str", dest="outpath")
op.add_option('--m',type='int',dest="MAX_NUMBER_OF_TWEETS",default=100)
op.print_help()
opts,args = op.parse_args()
if len(args) > 0:
op.error('This script only takes arguments preceded by command line options.')
if not opts.outpath:
opts.corpus = os.getcwd()
print 'No output path specified. Using current working directory.'
search_terms = opts.keywords.split('_')
if not os.path.exists(opts.outpath):
os.makedirs(opts.outpath)
for search_term in search_terms:
if not os.path.isdir(os.path.join(opts.outpath,search_term)):
os.makedirs(os.path.join(opts.outpath,search_term))
if not opts.keys:
opts.keys = json.load(open('../../data/keys.json','rb'))
print 'No access token specified. Searching for default tokens'
else:
opts.keys = json.load(open(opts.keys,'rb'))
client= dropbox.client.DropboxClient(opts.keys['dropbox']['access_token'])
class listener(StreamListener):
def __init__(self, api=None, path=None,outname='output',MAX_NUMBER_OF_TWEETS=100,TWEETS_PER_FILE=10,progress_bar=None):
#I don't remember exactly why I defined this.
self.api = api
#We'll need this later.
self.path = path
self.count = 0
self.outname = outname
self.progress_bar = progress_bar
self.MAX_NUMBER_OF_TWEETS = MAX_NUMBER_OF_TWEETS
self.TWEETS_PER_FILE = TWEETS_PER_FILE
def on_data(self, data):
all_data = json.loads(data)
tweet_text = ' '.join(word for word in all_data["text"].split() if all(ord(ch)<128 for ch in word))
tweet_id = all_data["id"]
filename = os.path.join(self.path,'%s_%s.txt'%(self.outname,datetime.now().strftime('%Y-%m-%d-%H')))
with open(filename,"a") as fid: #This open and closes the same file a lot of times. Hack for now.
print>>fid, ' %s | %s'%(tweet_text,tweet_id)
self.count += 1
if self.progress_bar:
self.progress_bar.next()
if self.count < self.MAX_NUMBER_OF_TWEETS:
return True
else:
if self.progress_bar:
self.progress_bar.finish()
return False
def on_error(self, status):
return True #I believe this functions like pass in a try-except blocks
def on_timeout(self):
return True # Don't kill the stream
auth = OAuthHandler(opts.keys['twitter']['consumer_key'], opts.keys['twitter']['consumer_secret'])
auth.set_access_token(opts.keys['twitter']['access_token'],opts.keys['twitter']['access_token_secret'])
TWEETS_PER_FILE = 10000
'''
bar = Bar('Acquiring control tweets', max=opts.MAX_NUMBER_OF_TWEETS)
control_stream = twitter.TwitterStream(
auth=twitter.OAuth(opts.keys['twitter']['access_token'], opts.keys['twitter']['access_token_secret'],
opts.keys['twitter']['consumer_key'], opts.keys['twitter']['consumer_secret']), timeout=False, heartbeat_timeout=1000000)
iterator = control_stream.statuses.sample()
counter = 0
for tweet in iterator:
filename = os.path.join(control_path,'control_%d'%(counter/TWEETS_PER_FILE))
with gzip.open(filename,'a') as fid:
print>>fid,tweet
counter += 1
bar.next()
if counter > opts.MAX_NUMBER_OF_TWEETS:
break
bar.finish()
'''
for search_term in search_terms:
bar = Bar('Acquiring tweets mentioning %s'%search_term, max=opts.MAX_NUMBER_OF_TWEETS)
try:
caseStream = Stream(auth, listener(path=os.path.join(opts.outpath,search_term),
outname=search_term, MAX_NUMBER_OF_TWEETS=opts.MAX_NUMBER_OF_TWEETS,TWEETS_PER_FILE=TWEETS_PER_FILE,
progress_bar = bar))
caseStream.filter(track=search_terms)
except Exception as e:
print e
| {
"content_hash": "8b691e79ce0d50f5adee1d09f7750659",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 129,
"avg_line_length": 36.7,
"alnum_prop": 0.6546321525885559,
"repo_name": "nik7273/computational-medical-knowledge",
"id": "418776bef03460f134c698e48be068afb0813e9e",
"size": "4404",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "twitter_fetch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gnuplot",
"bytes": "1102"
},
{
"name": "Python",
"bytes": "47287"
},
{
"name": "Shell",
"bytes": "1391"
}
],
"symlink_target": ""
} |
from cherrypy.test import test
test.prefer_parent_path()
def setup_server():
import os
curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
import cherrypy
def test_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
output = ['Hello, world!\n',
'This is a wsgi app running within CherryPy!\n\n']
keys = environ.keys()
keys.sort()
for k in keys:
output.append('%s: %s\n' % (k,environ[k]))
return output
def test_empty_string_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
return ['Hello', '', ' ', '', 'world']
class WSGIResponse(object):
def __init__(self, appresults):
self.appresults = appresults
self.iter = iter(appresults)
def __iter__(self):
return self
def next(self):
return self.iter.next()
def close(self):
if hasattr(self.appresults, "close"):
self.appresults.close()
class ReversingMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
results = app(environ, start_response)
class Reverser(WSGIResponse):
def next(this):
line = list(this.iter.next())
line.reverse()
return "".join(line)
return Reverser(results)
class Root:
def index(self):
return "I'm a regular CherryPy page handler!"
index.exposed = True
cherrypy.config.update({'environment': 'test_suite'})
cherrypy.tree.mount(Root())
cherrypy.tree.graft(test_app, '/hosted/app1')
cherrypy.tree.graft(test_empty_string_app, '/hosted/app3')
# Set script_name explicitly to None to signal CP that it should
# be pulled from the WSGI environ each time.
app = cherrypy.Application(Root(), script_name=None)
cherrypy.tree.graft(ReversingMiddleware(app), '/hosted/app2')
from cherrypy.test import helper
class WSGIGraftTests(helper.CPWebCase):
wsgi_output = '''Hello, world!
This is a wsgi app running within CherryPy!'''
def test_01_standard_app(self):
self.getPage("/")
self.assertBody("I'm a regular CherryPy page handler!")
def test_04_pure_wsgi(self):
import cherrypy
if not cherrypy.server.using_wsgi:
print "skipped (not using WSGI)...",
return
self.getPage("/hosted/app1")
self.assertHeader("Content-Type", "text/plain")
self.assertInBody(self.wsgi_output)
def test_05_wrapped_cp_app(self):
import cherrypy
if not cherrypy.server.using_wsgi:
print "skipped (not using WSGI)...",
return
self.getPage("/hosted/app2/")
body = list("I'm a regular CherryPy page handler!")
body.reverse()
body = "".join(body)
self.assertInBody(body)
def test_06_empty_string_app(self):
import cherrypy
if not cherrypy.server.using_wsgi:
print "skipped (not using WSGI)...",
return
self.getPage("/hosted/app3")
self.assertHeader("Content-Type", "text/plain")
self.assertInBody('Hello world')
if __name__ == '__main__':
setup_server()
helper.testmain()
| {
"content_hash": "9dab3ae0e431f8cc98a53924c17fac3c",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 68,
"avg_line_length": 30.278688524590162,
"alnum_prop": 0.5644288034650785,
"repo_name": "VHAINNOVATIONS/DmD",
"id": "66baed412ac44fbf65b3fd2306d3fddd31dc53be",
"size": "3694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scrubber/MIST_2_0_4/src/CherryPy-3.1.2/cherrypy/test/test_wsgiapps.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "258262"
},
{
"name": "HTML",
"bytes": "3057541"
},
{
"name": "Java",
"bytes": "363296"
},
{
"name": "JavaScript",
"bytes": "8682388"
},
{
"name": "Perl",
"bytes": "294110"
},
{
"name": "Perl6",
"bytes": "14166"
},
{
"name": "Prolog",
"bytes": "782419"
},
{
"name": "Python",
"bytes": "3569206"
},
{
"name": "Shell",
"bytes": "6422"
},
{
"name": "XS",
"bytes": "120883"
}
],
"symlink_target": ""
} |
from cms.sitemaps import CMSSitemap
from django.conf import settings
from django.conf.urls.i18n import i18n_patterns
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.urls import include, path
admin.autodiscover()
urlpatterns = [
path("sitemap.xml", sitemap, {"sitemaps": {"cmspages": CMSSitemap}}),
]
urlpatterns += i18n_patterns(path("admin/", admin.site.urls), path("", include("cms.urls")))
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| {
"content_hash": "167816d773e44e0aed5732333a214110",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 92,
"avg_line_length": 34.23809523809524,
"alnum_prop": 0.7649513212795549,
"repo_name": "nephila/djangocms-installer",
"id": "8d6432e68aade6867d5016a631a9ea9183768e5e",
"size": "719",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "djangocms_installer/config/urls_i18n.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "7122"
},
{
"name": "Python",
"bytes": "160394"
}
],
"symlink_target": ""
} |
from typing import Typing
from errors import AbstractClassError
from errors import NominalTypeInitError
class PydytyType(object):
""" Abstract type to represent a type. """
def __init__(self, loc=None, **kwargs):
self.loc = loc
def __str__(self):
raise AbstractClassError()
def __eq__(self, other):
raise AbstractClassError()
def __ne__(self, other):
return not self.__eq__(other)
def add_loc(self, loc):
if self.loc is not None:
self.loc.add_loc(loc)
else:
self.loc = loc
class TopType(PydytyType):
""" Represents the top type of all. Should not be used in real life.
Only used to represent untyped/uninferred methods/properties."""
def __str__(self):
return '<<Top>>'
def __eq__(self, other):
return isinstance(other, TopType)
class BottomType(PydytyType):
""" Represents the bottom type. Should not be used in real life. Only
used to represent untyped/uninferred methods/properties."""
def __str__(self):
return '<<Bottom>>'
def __eq__(self, other):
return isinstance(other, BottomType)
class CompositeType(PydytyType):
""" Abstract type that consists of one or more types."""
def __init__(self, types, **kwargs):
super(CompositeType, self).__init__(**kwargs)
self.types = types
def add_type(self, _type):
""" Adds another type to the list of possible types. """
self.types.append(_type)
self.add_loc(_type.loc)
return self
def __str__(self):
raise AbstractClassError()
def __eq__(self, other):
result = isinstance(other, self.__class__)
if result:
if len(self.types) != len(other.types):
result = False
if result:
for i, t in enumerate(self.types):
if t != other.types[i]:
result = False
break
return result
class UnionType(CompositeType):
""" Represents an union type. This can be any of the types expressed in
the union type. To avoid complicated typing, we will not be using this
with method types."""
def __str__(self):
return ' or '.join(['(%s)' % str(t) for t in self.types])
class IntersectionType(CompositeType):
""" Represents an intersection type. This means ALL of the types
expressed in this type must be true. This is a complicated type. So we
apply this only to methods."""
def __str__(self):
s = ' and '.join(['(%s)' % str(t) for t in self.types])
return s
class MethodType(PydytyType):
""" Represents a method (function) type. In type theory, there is
actually no difference. One is bound and the other is unbound. Since
Python keeps track of this by having 'self' or 'cls' in the signature,
that's how we are going to do as well.
"""
def __init__(self, arg_types=[], kwarg_types={}, ret_type=None, **kwargs):
super(MethodType, self).__init__(**kwargs)
self.arg_types = arg_types
self.kwarg_types = kwarg_types
self.ret_type = ret_type
@classmethod
def create_empty(self, num_of_args, kwarg_keys, **kwargs):
""" Creates an empty method type using top types for args and kwargs
and a bottom type for the return type. Used as an initial type for
inferring methods in a class."""
arg_types = [TopType(**kwargs) for i in range(0, num_of_args)]
kwargs_types = {}
for key in kwarg_keys:
kwargs_types[key] = TopType(**kwargs)
ret_type = BottomType(**kwargs)
return MethodType(arg_types, kwargs_types, ret_type, **kwargs)
def __str__(self):
args = [str(arg_type) for arg_type in self.arg_types]
if args:
args = ', '.join(args)
sorted_keys = sorted(self.kwarg_types)
kwargs = ['%s:%s' % (k, self.kwarg_types[k]) for k in sorted_keys]
if kwargs:
kwargs = ', '.join(kwargs)
if args and kwargs:
all_args = '%s, %s' % (args, kwargs)
elif args:
all_args = args
elif kwargs:
all_args = kwargs
else:
all_args = ''
return ('(%s) -> %s' % (all_args, self.ret_type))
def __eq__(self, other):
result = isinstance(other, self.__class__)
if len(self.arg_types) != len(other.arg_types):
result = False
if result:
if len(self.kwarg_types) != len(other.kwarg_types):
result = False
if result:
for i, t in enumerate(self.arg_types):
if t != other.arg_types[i]:
result = False
break
if result:
sorted_keys = sorted(self.kwarg_types)
for k in sorted_keys:
if self.kwarg_types[k] != other.kwarg_types.get(k):
result = False
break
return result
class NominalType(PydytyType):
""" Represents a nominal type. """
def __init__(self, name_or_obj, is_object=False, **kwargs):
""" For convenience, we allow either name of the nominal type or an
object from which you'd like to retrive type information."""
super(NominalType, self).__init__(**kwargs)
if is_object:
if hasattr(name_or_obj, '__pydyty__'): # object wrapper
self.name = name_or_obj.__pydyty_obj__.__class__.__name__
elif hasattr(name_or_obj, '__class__'):
self.name = name_or_obj.__class__.__name__
else:
self.name = type(name_or_obj).__name__
elif not isinstance(name_or_obj, basestring):
raise NominalTypeInitError()
else:
self.name = name_or_obj
def __str__(self):
return self.name
def __eq__(self, other):
result = isinstance(other, self.__class__)
return result and (self.name == other.name)
class ObjectType(PydytyType):
""" Represents a structural type. It only represents a single layer
without a meta layer. In other words, class level type information is
not part of an object type. Use ClassType. Or use InferredNominalType to
do both at the same time."""
def __init__(self, attrs=None, **kwargs):
super(ObjectType, self).__init__(**kwargs)
self.attrs = attrs or {} # NOTE: Python bug??
def add_attr(self, name, attr_type):
""" Adds a method to the object type. If there exists an entry
with the same name, then we do subtyping comparisons to see if we
can consolidate. If not, we create an intersection type."""
if name in self.attrs:
exist_attr_type = self.attrs[name]
if isinstance(exist_attr_type, IntersectionType):
exist_attr_type.add_type(attr_type)
elif Typing.is_subtype(attr_type, exist_attr_type):
self.attrs[name] = attr_type
elif Typing.is_subtype(exist_attr_type, attr_type):
pass
else:
attr_type = IntersectionType([exist_attr_type, attr_type])
self.attrs[name] = attr_type
else:
self.attrs[name] = attr_type
def add_empty_method(self, name, num_of_args, kwarg_keys):
""" Adds an empty method type to the list. This WILL overwrite the
existing type for the method if there exists one already."""
attr_type = MethodType.create_empty(num_of_args, kwarg_keys)
self.attrs[name] = attr_type
def __str__(self):
types = sorted(self.attrs)
tlist = ['%s: %s' % (n, self.attrs[n]) for n in types]
return '[%s]' % ', '.join(tlist) if tlist else '[]'
def __eq__(self, other):
result = isinstance(other, self.__class__)
if result:
if len(self.attrs) != len(other.attrs):
result = False
if result:
for n, t in self.attrs.iteritems():
other_method = other.attrs.get(n, None)
if (other_method is None) or t != other.attrs.get(n, None):
result = False
break
return result
class FusionType(NominalType, ObjectType):
""" Similar to ObjectType except it has a name. For instance A[foo, bar]
can be understood as an object with foo and bar methods like A."""
def __init__(self, name_or_obj, attrs={},
is_object=False, **kwargs):
NominalType.__init__(self, name_or_obj, is_object)
ObjectType.__init__(self, attrs=attrs, **kwargs)
def __str__(self):
types = sorted(self.attrs)
tlist = ['%s: %s' % (n, self.attrs[n]) for n in types]
return '%s[%s]' % (self.name, ', '.join(tlist))
def __eq__(self, other):
result = isinstance(other, self.__class__)
if result:
result = self.name == other.name
if result:
if len(self.attrs) != len(other.attrs):
result = False
if result:
for n, t in self.attrs.iteritems():
if t != other.attrs.get(n, None):
result = False
break
return result
| {
"content_hash": "9e6eb733b2bd6d1c8b504090b119075d",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 78,
"avg_line_length": 34.561338289962826,
"alnum_prop": 0.5645907281918898,
"repo_name": "davidan42/pydyty",
"id": "90bebb0f3e7a8ff9085b01e5b59c7b9c27b6c49d",
"size": "9314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pydyty/types.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "38267"
}
],
"symlink_target": ""
} |
"""
====================================================================
Extract epochs for multiple conditions, save evoked response to disk
====================================================================
This script shows how to read the epochs for multiple conditions from
a raw file given a list of events. The epochs are averaged to produce
evoked data and then saved to disk.
"""
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Eric Larson <larson.eric.d@gmail.com>
# Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import mne
from mne import io
from mne.datasets import sample
from mne.epochs import combine_event_ids
import matplotlib.pyplot as plt
print(__doc__)
data_path = sample.data_path()
###############################################################################
# Set parameters
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
event_ids = {'AudL': 1, 'AudR': 2, 'VisL': 3, 'VisR': 4}
tmin = -0.2
tmax = 0.5
# Setup for reading the raw data
raw = io.Raw(raw_fname)
events = mne.read_events(event_fname)
# Set up pick list: EEG + STI 014 - bad channels (modify to your needs)
include = [] # or stim channels ['STI 014']
raw.info['bads'] += ['EEG 053'] # bads + 1 more
# pick EEG channels
picks = mne.pick_types(raw.info, meg=False, eeg=True, stim=False, eog=True,
include=include, exclude='bads')
# Read epochs
epochs = mne.Epochs(raw, events, event_ids, tmin, tmax, picks=picks,
baseline=(None, 0), reject=dict(eeg=80e-6, eog=150e-6))
# Let's equalize the trial counts in each condition
epochs.equalize_event_counts(['AudL', 'AudR', 'VisL', 'VisR'], copy=False)
# Now let's combine some conditions
combine_event_ids(epochs, ['AudL', 'AudR'], {'Auditory': 12}, copy=False)
combine_event_ids(epochs, ['VisL', 'VisR'], {'Visual': 34}, copy=False)
# average epochs and get Evoked datasets
evokeds = [epochs[cond].average() for cond in ['Auditory', 'Visual']]
# save evoked data to disk
mne.write_evokeds('sample_auditory_and_visual_eeg-ave.fif', evokeds)
###############################################################################
# View evoked response
plt.clf()
ax = plt.subplot(2, 1, 1)
evokeds[0].plot(axes=ax)
plt.title('EEG evoked potential, auditory trials')
plt.ylabel('Potential (uV)')
ax = plt.subplot(2, 1, 2)
evokeds[1].plot(axes=ax)
plt.title('EEG evoked potential, visual trials')
plt.ylabel('Potential (uV)')
plt.show()
| {
"content_hash": "025320ba7d5259f41cc84e99ad4c0020",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 79,
"avg_line_length": 35.90277777777778,
"alnum_prop": 0.6135396518375241,
"repo_name": "agramfort/mne-python",
"id": "4102390320d70bdddda487a43f021ec74389e950",
"size": "2585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/plot_from_raw_to_multiple_epochs_to_evoked.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "PowerShell",
"bytes": "2986"
},
{
"name": "Python",
"bytes": "3751581"
},
{
"name": "Shell",
"bytes": "4011"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models
from django.db.migrations.operations.base import Operation
from django.db.migrations.state import ModelState
from django.db.models.options import normalize_together
from django.utils import six
from django.utils.functional import cached_property
from .fields import (
AddField, AlterField, FieldOperation, RemoveField, RenameField,
)
def _check_for_duplicates(arg_name, objs):
used_vals = set()
for val in objs:
if val in used_vals:
raise ValueError(
"Found duplicate value %s in CreateModel %s argument." % (val, arg_name)
)
used_vals.add(val)
class ModelOperation(Operation):
def __init__(self, name):
self.name = name
@cached_property
def name_lower(self):
return self.name.lower()
def references_model(self, name, app_label=None):
return name.lower() == self.name_lower
def reduce(self, operation, in_between, app_label=None):
return (
super(ModelOperation, self).reduce(operation, in_between, app_label=app_label) or
not operation.references_model(self.name, app_label)
)
class CreateModel(ModelOperation):
"""
Create a model's table.
"""
serialization_expand_args = ['fields', 'options', 'managers']
def __init__(self, name, fields, options=None, bases=None, managers=None):
self.fields = fields
self.options = options or {}
self.bases = bases or (models.Model,)
self.managers = managers or []
super(CreateModel, self).__init__(name)
# Sanity-check that there are no duplicated field names, bases, or
# manager names
_check_for_duplicates('fields', (name for name, _ in self.fields))
_check_for_duplicates('bases', (
base._meta.label_lower if hasattr(base, '_meta') else
base.lower() if isinstance(base, six.string_types) else base
for base in self.bases
))
_check_for_duplicates('managers', (name for name, _ in self.managers))
def deconstruct(self):
kwargs = {
'name': self.name,
'fields': self.fields,
}
if self.options:
kwargs['options'] = self.options
if self.bases and self.bases != (models.Model,):
kwargs['bases'] = self.bases
if self.managers and self.managers != [('objects', models.Manager())]:
kwargs['managers'] = self.managers
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.add_model(ModelState(
app_label,
self.name,
list(self.fields),
dict(self.options),
tuple(self.bases),
list(self.managers),
))
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def describe(self):
return "Create %smodel %s" % ("proxy " if self.options.get("proxy", False) else "", self.name)
def references_model(self, name, app_label=None):
strings_to_check = [self.name]
# Check we didn't inherit from the model
for base in self.bases:
if isinstance(base, six.string_types):
strings_to_check.append(base.split(".")[-1])
# Check we have no FKs/M2Ms with it
for fname, field in self.fields:
if field.remote_field:
if isinstance(field.remote_field.model, six.string_types):
strings_to_check.append(field.remote_field.model.split(".")[-1])
# Now go over all the strings and compare them
for string in strings_to_check:
if string.lower() == name.lower():
return True
return False
def model_to_key(self, model):
"""
Take either a model class or an "app_label.ModelName" string
and return (app_label, object_name).
"""
if isinstance(model, six.string_types):
return model.split(".", 1)
else:
return model._meta.app_label, model._meta.object_name
def reduce(self, operation, in_between, app_label=None):
if (isinstance(operation, DeleteModel) and
self.name_lower == operation.name_lower and
not self.options.get("proxy", False)):
return []
elif isinstance(operation, RenameModel) and self.name_lower == operation.old_name_lower:
return [
CreateModel(
operation.new_name,
fields=self.fields,
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, FieldOperation) and self.name_lower == operation.model_name_lower:
if isinstance(operation, AddField):
# Don't allow optimizations of FKs through models they reference
if hasattr(operation.field, "remote_field") and operation.field.remote_field:
for between in in_between:
# Check that it doesn't point to the model
app_label, object_name = self.model_to_key(operation.field.remote_field.model)
if between.references_model(object_name, app_label):
return False
# Check that it's not through the model
if getattr(operation.field.remote_field, "through", None):
app_label, object_name = self.model_to_key(operation.field.remote_field.through)
if between.references_model(object_name, app_label):
return False
return [
CreateModel(
self.name,
fields=self.fields + [(operation.name, operation.field)],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, AlterField):
return [
CreateModel(
self.name,
fields=[
(n, operation.field if n == operation.name else v)
for n, v in self.fields
],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, RemoveField):
return [
CreateModel(
self.name,
fields=[
(n, v)
for n, v in self.fields
if n.lower() != operation.name_lower
],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, RenameField):
return [
CreateModel(
self.name,
fields=[
(operation.new_name if n == operation.old_name else n, v)
for n, v in self.fields
],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
return super(CreateModel, self).reduce(operation, in_between, app_label=app_label)
class DeleteModel(ModelOperation):
"""
Drops a model's table.
"""
def deconstruct(self):
kwargs = {
'name': self.name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.remove_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def describe(self):
return "Delete model %s" % (self.name, )
class RenameModel(ModelOperation):
"""
Renames a model.
"""
def __init__(self, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
super(RenameModel, self).__init__(old_name)
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
'old_name': self.old_name,
'new_name': self.new_name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
apps = state.apps
model = apps.get_model(app_label, self.old_name)
model._meta.apps = apps
# Get all of the related objects we need to repoint
all_related_objects = (
f for f in model._meta.get_fields(include_hidden=True)
if f.auto_created and not f.concrete and (not f.hidden or f.many_to_many)
)
# Rename the model
state.models[app_label, self.new_name_lower] = state.models[app_label, self.old_name_lower]
state.models[app_label, self.new_name_lower].name = self.new_name
state.remove_model(app_label, self.old_name_lower)
# Repoint the FKs and M2Ms pointing to us
for related_object in all_related_objects:
if related_object.model is not model:
# The model being renamed does not participate in this relation
# directly. Rather, a superclass does.
continue
# Use the new related key for self referential related objects.
if related_object.related_model == model:
related_key = (app_label, self.new_name_lower)
else:
related_key = (
related_object.related_model._meta.app_label,
related_object.related_model._meta.model_name,
)
new_fields = []
for name, field in state.models[related_key].fields:
if name == related_object.field.name:
field = field.clone()
field.remote_field.model = "%s.%s" % (app_label, self.new_name)
new_fields.append((name, field))
state.models[related_key].fields = new_fields
state.reload_model(*related_key)
# Repoint M2Ms with through pointing to us
related_models = {
f.remote_field.model for f in model._meta.fields
if getattr(f.remote_field, 'model', None)
}
model_name = '%s.%s' % (app_label, self.old_name)
for related_model in related_models:
if related_model == model:
related_key = (app_label, self.new_name_lower)
else:
related_key = (related_model._meta.app_label, related_model._meta.model_name)
new_fields = []
changed = False
for name, field in state.models[related_key].fields:
if field.is_relation and field.many_to_many and field.remote_field.through == model_name:
field = field.clone()
field.remote_field.through = '%s.%s' % (app_label, self.new_name)
changed = True
new_fields.append((name, field))
if changed:
state.models[related_key].fields = new_fields
state.reload_model(*related_key)
state.reload_model(app_label, self.new_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.new_name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.old_name)
# Move the main table
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Alter the fields pointing to us
for related_object in old_model._meta.related_objects:
if related_object.related_model == old_model:
model = new_model
related_key = (app_label, self.new_name_lower)
else:
model = related_object.related_model
related_key = (
related_object.related_model._meta.app_label,
related_object.related_model._meta.model_name,
)
to_field = to_state.apps.get_model(
*related_key
)._meta.get_field(related_object.field.name)
schema_editor.alter_field(
model,
related_object.field,
to_field,
)
# Rename M2M fields whose name is based on this model's name.
fields = zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many)
for (old_field, new_field) in fields:
# Skip self-referential fields as these are renamed above.
if new_field.model == new_field.related_model or not new_field.remote_field.through._meta.auto_created:
continue
# Rename the M2M table that's based on this model's name.
old_m2m_model = old_field.remote_field.through
new_m2m_model = new_field.remote_field.through
schema_editor.alter_db_table(
new_m2m_model,
old_m2m_model._meta.db_table,
new_m2m_model._meta.db_table,
)
# Rename the column in the M2M table that's based on this
# model's name.
schema_editor.alter_field(
new_m2m_model,
old_m2m_model._meta.get_field(old_model._meta.model_name),
new_m2m_model._meta.get_field(new_model._meta.model_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower
self.new_name, self.old_name = self.old_name, self.new_name
self.database_forwards(app_label, schema_editor, from_state, to_state)
self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower
self.new_name, self.old_name = self.old_name, self.new_name
def references_model(self, name, app_label=None):
return (
name.lower() == self.old_name_lower or
name.lower() == self.new_name_lower
)
def describe(self):
return "Rename model %s to %s" % (self.old_name, self.new_name)
def reduce(self, operation, in_between, app_label=None):
if (isinstance(operation, RenameModel) and
self.new_name_lower == operation.old_name_lower):
return [
RenameModel(
self.old_name,
operation.new_name,
),
]
# Skip `ModelOperation.reduce` as we want to run `references_model`
# against self.new_name.
return (
super(ModelOperation, self).reduce(operation, in_between, app_label=app_label) or
not operation.references_model(self.new_name, app_label)
)
class AlterModelTable(ModelOperation):
"""
Renames a model's table
"""
def __init__(self, name, table):
self.table = table
super(AlterModelTable, self).__init__(name)
def deconstruct(self):
kwargs = {
'name': self.name,
'table': self.table,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.models[app_label, self.name_lower].options["db_table"] = self.table
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Rename M2M fields whose name is based on this model's db_table
for (old_field, new_field) in zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many):
if new_field.remote_field.through._meta.auto_created:
schema_editor.alter_db_table(
new_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return "Rename table for %s to %s" % (self.name, self.table)
def reduce(self, operation, in_between, app_label=None):
if isinstance(operation, (AlterModelTable, DeleteModel)) and self.name_lower == operation.name_lower:
return [operation]
return super(AlterModelTable, self).reduce(operation, in_between, app_label=app_label)
class ModelOptionOperation(ModelOperation):
def reduce(self, operation, in_between, app_label=None):
if isinstance(operation, (self.__class__, DeleteModel)) and self.name_lower == operation.name_lower:
return [operation]
return super(ModelOptionOperation, self).reduce(operation, in_between, app_label=app_label)
class FieldRelatedOptionOperation(ModelOptionOperation):
def reduce(self, operation, in_between, app_label=None):
if (isinstance(operation, FieldOperation) and
self.name_lower == operation.model_name_lower and
not self.references_field(operation.model_name, operation.name)):
return [operation, self]
return super(FieldRelatedOptionOperation, self).reduce(operation, in_between, app_label=app_label)
class AlterUniqueTogether(FieldRelatedOptionOperation):
"""
Changes the value of unique_together to the target one.
Input value of unique_together must be a set of tuples.
"""
option_name = "unique_together"
def __init__(self, name, unique_together):
unique_together = normalize_together(unique_together)
self.unique_together = set(tuple(cons) for cons in unique_together)
super(AlterUniqueTogether, self).__init__(name)
def deconstruct(self):
kwargs = {
'name': self.name,
'unique_together': self.unique_together,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options[self.option_name] = self.unique_together
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_unique_together(
new_model,
getattr(old_model._meta, self.option_name, set()),
getattr(new_model._meta, self.option_name, set()),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label=None):
return (
self.references_model(model_name, app_label) and
(
not self.unique_together or
any((name in together) for together in self.unique_together)
)
)
def describe(self):
return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.unique_together or ''))
class AlterIndexTogether(FieldRelatedOptionOperation):
"""
Changes the value of index_together to the target one.
Input value of index_together must be a set of tuples.
"""
option_name = "index_together"
def __init__(self, name, index_together):
index_together = normalize_together(index_together)
self.index_together = set(tuple(cons) for cons in index_together)
super(AlterIndexTogether, self).__init__(name)
def deconstruct(self):
kwargs = {
'name': self.name,
'index_together': self.index_together,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options[self.option_name] = self.index_together
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_index_together(
new_model,
getattr(old_model._meta, self.option_name, set()),
getattr(new_model._meta, self.option_name, set()),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label=None):
return (
self.references_model(model_name, app_label) and
(
not self.index_together or
any((name in together) for together in self.index_together)
)
)
def describe(self):
return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.index_together or ''))
class AlterOrderWithRespectTo(FieldRelatedOptionOperation):
"""
Represents a change with the order_with_respect_to option.
"""
def __init__(self, name, order_with_respect_to):
self.order_with_respect_to = order_with_respect_to
super(AlterOrderWithRespectTo, self).__init__(name)
def deconstruct(self):
kwargs = {
'name': self.name,
'order_with_respect_to': self.order_with_respect_to,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options['order_with_respect_to'] = self.order_with_respect_to
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.name)
# Remove a field if we need to
if from_model._meta.order_with_respect_to and not to_model._meta.order_with_respect_to:
schema_editor.remove_field(from_model, from_model._meta.get_field("_order"))
# Add a field if we need to (altering the column is untouched as
# it's likely a rename)
elif to_model._meta.order_with_respect_to and not from_model._meta.order_with_respect_to:
field = to_model._meta.get_field("_order")
if not field.has_default():
field.default = 0
schema_editor.add_field(
from_model,
field,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label=None):
return (
self.references_model(model_name, app_label) and
(
self.order_with_respect_to is None or
name == self.order_with_respect_to
)
)
def describe(self):
return "Set order_with_respect_to on %s to %s" % (self.name, self.order_with_respect_to)
class AlterModelOptions(ModelOptionOperation):
"""
Sets new model options that don't directly affect the database schema
(like verbose_name, permissions, ordering). Python code in migrations
may still need them.
"""
# Model options we want to compare and preserve in an AlterModelOptions op
ALTER_OPTION_KEYS = [
"base_manager_name",
"default_manager_name",
"get_latest_by",
"managed",
"ordering",
"permissions",
"default_permissions",
"select_on_save",
"verbose_name",
"verbose_name_plural",
]
def __init__(self, name, options):
self.options = options
super(AlterModelOptions, self).__init__(name)
def deconstruct(self):
kwargs = {
'name': self.name,
'options': self.options,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options = dict(model_state.options)
model_state.options.update(self.options)
for key in self.ALTER_OPTION_KEYS:
if key not in self.options and key in model_state.options:
del model_state.options[key]
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def describe(self):
return "Change Meta options on %s" % (self.name, )
class AlterModelManagers(ModelOptionOperation):
"""
Alters the model's managers
"""
serialization_expand_args = ['managers']
def __init__(self, name, managers):
self.managers = managers
super(AlterModelManagers, self).__init__(name)
def deconstruct(self):
return (
self.__class__.__name__,
[self.name, self.managers],
{}
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.managers = list(self.managers)
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def describe(self):
return "Change managers on %s" % (self.name, )
| {
"content_hash": "dda951083ef39517246f77fb5318422d",
"timestamp": "",
"source": "github",
"line_count": 742,
"max_line_length": 119,
"avg_line_length": 39.118598382749326,
"alnum_prop": 0.5672155998070695,
"repo_name": "hosseinmh/Django_learning",
"id": "061d6927736fe65b624a03471372c8f22a1ce359",
"size": "29026",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "djmod/.venv/lib/python3.5/site-packages/django/db/migrations/operations/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "45820"
},
{
"name": "HTML",
"bytes": "65962"
},
{
"name": "JavaScript",
"bytes": "126104"
},
{
"name": "Python",
"bytes": "7605266"
},
{
"name": "Shell",
"bytes": "3314"
}
],
"symlink_target": ""
} |
from datetime import datetime, time
from lxml import etree
import xmltodict
class ResourceElement(object):
"""
The Resource Element of a Control (fr-form-resources)
"""
def __init__(self, control):
self.control = control
def __getattr__(self, name):
if self.control._resource and hasattr(self.control._resource, 'element'):
return self.control._resource.element.get(name, None)
else:
return None
class Control(object):
def __init__(self, builder, bind, element):
self._builder = builder
self._bind = bind
self._element = element
self._context = None
self._parent = None
self.set_parent()
# XXX Maybe set_refs is obsolete by following
self._resource = None
self.set_resource()
# model_instance is like raw default_value.
# Still called model_instance, because of FB terminology.
self._model_instance = None
self.set_model_instance()
self.default_raw_value = None
self.set_default_raw_value()
self.default_value = None
self.set_default_value()
self._resource_element = ResourceElement(self)
# Attributes via Element (which get these dynamically)
self.label = None
self.hint = None
self.alert = None
if self._resource:
self.label = self._resource.element.get('label', None)
self.hint = self._resource.element.get('hint', None)
self.alert = self._resource.element.get('alert', None)
self._raw_value = None
self.set_raw_value()
self.init()
def init(self):
""" This method is called after :meth:`~._init__`."""
pass
def add_context(self, context):
self._context = context
def set_parent(self):
if self._bind.parent and self._bind.parent.name in self._builder.controls:
self._parent = self._builder.controls[self._bind.parent.name]
def set_model_instance(self):
if not self._bind.parent:
return
# TODO namespace prefix Error
# query = "//xf:model/xf:instance/form/%s/%s" % (
query = "//form/%s/%s" % (
self._bind.parent.name,
self._bind.name
)
res = self._builder.xml_root.xpath(query)
if len(res) > 0:
self._model_instance = res[0]
def set_resource(self):
if self._bind.name in self._builder.resource:
self._resource = self._builder.resource[self._bind.name]
def init_runner_form_attrs(self, runner_element):
raise NotImplementedError
def set_default_raw_value(self):
raise NotImplementedError
def set_default_value(self):
raise NotImplementedError
def set_raw_value(self):
raise NotImplementedError
def encode(self, value):
"""
By the self.datatype (handler):
>> self.datetype.encode(value)
"""
raise NotImplementedError
def decode(self, element):
"""
By the self.datatype (handler):
>> self.datetype.decode(value)
"""
raise NotImplementedError
class StringControl(Control):
def init_runner_form_attrs(self, runner_element):
self.value = self.decode(runner_element)
self.raw_value = runner_element.text
def set_default_raw_value(self):
self.default_raw_value = getattr(self._model_instance, 'text', None)
def set_default_value(self):
self.default_value = self.decode(self._model_instance)
def set_raw_value(self):
self._raw_value = self._element.text
def decode(self, element):
if element is None or not hasattr(element, 'text') or element.text is None:
return None
else:
return element.text
def encode(self, value):
return value
class DateControl(Control):
def init_runner_form_attrs(self, runner_element):
self.value = self.decode(runner_element)
self.raw_value = runner_element.text
def set_default_raw_value(self):
self.default_raw_value = getattr(self._model_instance, 'text', None)
def set_default_value(self):
self.default_value = self.decode(self._model_instance)
def set_raw_value(self):
self._raw_value = self._element.text
def decode(self, element):
if element is None or not hasattr(element, 'text') or element.text is None:
return None
else:
try:
return datetime.strptime(element.text, '%Y-%m-%d').date()
except:
return "%s (!)" % element.text
def encode(self, value):
return datetime.strftime(value, '%Y-%m-%d')
class TimeControl(Control):
def init_runner_form_attrs(self, runner_element):
self.value = self.decode(runner_element)
self.raw_value = runner_element.text
def set_default_raw_value(self):
self.default_raw_value = getattr(self._model_instance, 'text', None)
def set_default_value(self):
self.default_value = self.decode(self._model_instance)
def set_raw_value(self):
self._raw_value = self._element.text
def decode(self, element):
if element is None or not hasattr(element, 'text') or element.text is None:
return None
else:
try:
return datetime.strptime(element.text, '%H:%M:%S').time()
except:
return "%s (!)" % element.text
def encode(self, value):
return time.strftime(value, '%H:%M:%S')
class DateTimeControl(Control):
def init_runner_form_attrs(self, runner_element):
self.value = self.decode(runner_element)
self.raw_value = runner_element.text
def set_default_raw_value(self):
self.default_raw_value = getattr(self._model_instance, 'text', None)
def set_default_value(self):
self.default_value = self.decode(self._model_instance)
def set_raw_value(self):
self._raw_value = self._element.text
def decode(self, element):
if element is None or not hasattr(element, 'text') or element.text is None:
return None
else:
try:
return datetime.strptime(element.text, '%Y-%m-%dT%H:%M:%S')
except:
return "%s (!)" % element.text
def encode(self, value):
return datetime.strftime(value, '%Y-%m-%dT%H:%M:%S')
class BooleanControl(Control):
def init_runner_form_attrs(self, runner_element):
self.choice_value = self.decode(runner_element)
# TODO translations
self.choice_label = 'Yes' if self.choice_value else 'No'
self.choice = {self.choice_label: self.choice_value}
self.raw_value = runner_element.text
def set_default_raw_value(self):
self.default_raw_value = getattr(self._model_instance, 'text', None)
def set_default_value(self):
self.default_value = self.decode(self._model_instance)
def set_raw_value(self):
self._raw_value = self._element.text
def decode(self, element):
if element is None or not hasattr(element, 'text') or element.text is None:
return None
else:
if element.text == 'true':
return True
elif element.text == 'false':
return False
def encode(self, value):
# TODO isinstance(value, bool) validate?
if value:
return 'true'
else:
return 'false'
class Select1Control(StringControl):
def init_runner_form_attrs(self, runner_element):
self.choice_value = self.decode(runner_element)
self.choice_label = None
if not hasattr(self._resource_element, 'element'):
return
for item in self._resource.element['item']:
# XXX Seems a buggy assumption. Things like 'label'.
if isinstance(item, basestring):
continue
elif item['value'] == self.choice_value:
self.choice_label = item['label']
self.choice = {self.choice_label: self.choice_value}
self.raw_value = runner_element.text
def set_raw_value(self):
self._raw_value = self._element.text
class OpenSelect1Control(Select1Control):
def init_runner_form_attrs(self, runner_element):
super(OpenSelect1Control, self).init_runner_form_attrs(runner_element)
if self.choice_label is None:
self.choice_label = self.choice_value
self.choice = {self.choice_label: self.choice_value}
class SelectControl(StringControl):
def init_runner_form_attrs(self, runner_element):
self.raw_value = runner_element.text
self.choices_values = self.decode(runner_element)
self.choices_labels = []
self.choices = {}
if not self.choices_values:
return
for item in self._resource.element['item']:
label = None
value = None
if isinstance(item, basestring):
if item == 'label':
label = item
if item == 'value':
value = item
else:
label = item['label']
value = item['value']
if value in self.choices_values:
self.choices_labels.append(label)
self.choices[label] = value
def decode(self, element):
if element is None or not hasattr(element, 'text') or element.text is None:
return []
else:
return element.text.split(' ')
def encode(self, value):
return ' '.join(value)
class AnyUriControl(Control):
def init_runner_form_attrs(self, runner_element):
self.raw_value = runner_element.text
decoded = self.decode(runner_element)
self.uri = decoded['uri']
self.value = decoded['value']
# if decoded.get('element', False) and decoded.get('element').get('@filename', False):
# self.filename = decoded.get('element', None).get('@filename')
def set_default_raw_value(self):
self.default_raw_value = self._model_instance
def set_default_value(self):
if self._model_instance is not None:
self.default_value = self.decode(self._model_instance)
def set_raw_value(self):
self._raw_value = self._element.text
def decode(self, element):
# TODO: Quick and dirty, I don't like it! (Because of deadline).
# This needs to be revised!
if element is None or not hasattr(element, 'text') or element.text is None:
res = {'uri': None, 'value': None, 'element': None}
else:
res = {'uri': element.text, 'value': element.text}
element_dict = xmltodict.parse(etree.tostring(element, encoding='UTF-8'))
if self._bind.name in element_dict:
res['element'] = element_dict[self._bind.name]
return res
def encode(self, value):
return value
class ImageAnnotationControl(Control):
def init_runner_form_attrs(self, runner_element):
self.raw_value = runner_element.text
decoded = self.decode(runner_element)
if decoded:
self.image = decoded['image']['image']
self.annotation = decoded['annotation']['annotation']
def set_default_raw_value(self):
# self.default_raw_value = getattr(self._model_instance, 'text', None)
self.default_raw_value = self._model_instance
def set_default_value(self):
if self._model_instance is not None:
self.default_value = self.decode(self._model_instance)
def set_raw_value(self):
self._raw_value = self._element.text
def decode(self, element):
res = {}
if element is None:
return res
for el in element.getchildren():
res[el.tag] = xmltodict.parse(etree.tostring(el, encoding='UTF-8'))
return res
def encode(self, value):
return value
class DecimalControl(Control):
def init_runner_form_attrs(self, runner_element):
self.value = self.decode(runner_element)
self.raw_value = runner_element.text
def set_default_raw_value(self):
self.default_raw_value = getattr(self._model_instance, 'text', None)
def set_default_value(self):
self.default_value = self.decode(self._model_instance)
def set_raw_value(self):
self._raw_value = self._element.text
def decode(self, element):
if element is None or not hasattr(element, 'text') or element.text is None:
return None
else:
precision = int(self._element.get('digits-after-decimal', 1))
if precision > 0 and hasattr(element, 'text'):
return float(element.text)
elif hasattr(element, 'text'):
return int(element.text)
def encode(self, value):
return str(value)
class EmailControl(StringControl):
pass
| {
"content_hash": "75f5c7508c3df494b318d3e4d99f9c5b",
"timestamp": "",
"source": "github",
"line_count": 445,
"max_line_length": 94,
"avg_line_length": 29.476404494382024,
"alnum_prop": 0.5959441945566821,
"repo_name": "bobslee/orbeon-xml-api",
"id": "befce1611866875c0c4018768064228eb31a1b72",
"size": "13246",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "orbeon_xml_api/controls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "108136"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2015 RasDom by Aeondave
See the file 'LICENSE' for copying permission
"""
import SocketServer, time
from lib import settings
ACK = "\x06"
SIZE = 1024
class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler):
def handle(self):
print "Client connected with ", self.client_address
x = ['W','e','l','c','o','m','e', ' ','t','o',' ','R','a','s','D','o','m',' ','i','n','t','e','r','f','a','c','e\r\n']
for i in x:
self.request.send(i)
time.sleep(0.1)
self.request.send("Insert Password\r\n")
password = self.request.recv(SIZE)
self.request.send(ACK)
if password == settings.PASSWORD:
while self.request.recv(SIZE) != "e":
data = None
self.request.send("\n\n---------------------\r\n")
self.request.send("Listening for commands\r\n")
self.request.send("1: Connected devices list\r\n")
self.request.send("2: Select device\r\n")
self.request.send("3: Device functions\r\n")
self.request.send("e: Close connection\r\n")
self.request.send("---------------------\r\n\n")
data = self.request.recv(SIZE)
self.request.send(ACK)
self.request.send("\n")
if data == "1":
self.request.send(str(len(self.server.get_componentlist()))+" Components loaded\r\n")
c=0
for i in self.server.get_componentlist():
self.request.send(str(str(c)+": "+i.get_note()+"\r\n"))
c=c+1
if data == "2":
if self.server.get_current_component() is None:
current_component = "None"
else:
current_component = self.server.get_current_component()
self.request.send("Selected component: "+current_component+"\r\n")
self.request.send("Select component id form connected devices\r\n")
component = self.request.recv(SIZE)
component = self.request.recv(SIZE)
self.request.send(ACK)
self.request.send("\n")
if component is not None:
self.server.set_current_component(str(component))
if data == "3":
if self.server.get_current_component() is not None:
components = self.server.get_componentlist()
current_component = self.server.get_current_component()
component = components[int(current_component)]
self.request.send("Device selected: "+current_component+ " "+ component.get_note()+"\n\r")
self.request.send("Select id function:\n\r")
c = 0
for i in component.get_functions():
self.request.send(str(c)+": "+i+"\n\r")
c = c+1
function = self.request.recv(SIZE)
function = self.request.recv(SIZE)
self.request.send(ACK)
self.request.send("\n")
if function is not None:
self.server.set_current_function(str(function))
if len(component.get_pins_name())>1:
self.request.send("Select id object to " +str(component.get_functions()[int(function)])+ "\n\r")
c = 0
for i in component.get_pins_name():
self.request.send(str(c)+": "+str(i)+"\n\r")
c = c+1
pin = self.request.recv(SIZE)
pin = self.request.recv(SIZE)
self.request.send(ACK)
self.request.send("\n")
else:
pin = 0
rpin = component.get_pins()[int(pin)]
component.launch_function(str(self.server.get_current_function()), int(rpin))
self.request.send("Function Launched\n\r")
else:
self.request.send("No divice selected\n\r")
if data == "0":
break
else:
print "Password Wrong"
self.request.close()
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
allow_reuse_address = True
def set_componentlist(self, ComponentList):
self.ComponentList = ComponentList
def get_componentlist(self):
return self.ComponentList
def set_current_component(self, current_component):
self.current_component = current_component
def get_current_component(self):
return self.current_component
def set_current_function(self, current_function):
self.current_function = str(current_function)
def get_current_function(self):
return self.current_function | {
"content_hash": "f94d649cd383f6355d5f8cfeaa60498b",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 126,
"avg_line_length": 42.78294573643411,
"alnum_prop": 0.4660264540677659,
"repo_name": "AeonDave/RasDom",
"id": "ed3bc9c29821faa4133b7a26e79eb78cb961c60a",
"size": "5542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "classes/Socket.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11838"
}
],
"symlink_target": ""
} |
from __future__ import annotations
import bz2
from functools import wraps
import gzip
import io
import socket
import tarfile
from typing import (
TYPE_CHECKING,
Any,
Callable,
)
import zipfile
from pandas._typing import (
FilePath,
ReadPickleBuffer,
)
from pandas.compat import get_lzma_file
from pandas.compat._optional import import_optional_dependency
import pandas as pd
from pandas._testing._random import rands
from pandas._testing.contexts import ensure_clean
from pandas.io.common import urlopen
if TYPE_CHECKING:
from pandas import (
DataFrame,
Series,
)
# skip tests on exceptions with these messages
_network_error_messages = (
# 'urlopen error timed out',
# 'timeout: timed out',
# 'socket.timeout: timed out',
"timed out",
"Server Hangup",
"HTTP Error 503: Service Unavailable",
"502: Proxy Error",
"HTTP Error 502: internal error",
"HTTP Error 502",
"HTTP Error 503",
"HTTP Error 403",
"HTTP Error 400",
"Temporary failure in name resolution",
"Name or service not known",
"Connection refused",
"certificate verify",
)
# or this e.errno/e.reason.errno
_network_errno_vals = (
101, # Network is unreachable
111, # Connection refused
110, # Connection timed out
104, # Connection reset Error
54, # Connection reset by peer
60, # urllib.error.URLError: [Errno 60] Connection timed out
)
# Both of the above shouldn't mask real issues such as 404's
# or refused connections (changed DNS).
# But some tests (test_data yahoo) contact incredibly flakey
# servers.
# and conditionally raise on exception types in _get_default_network_errors
def _get_default_network_errors():
# Lazy import for http.client & urllib.error
# because it imports many things from the stdlib
import http.client
import urllib.error
return (
OSError,
http.client.HTTPException,
TimeoutError,
urllib.error.URLError,
socket.timeout,
)
def optional_args(decorator):
"""
allows a decorator to take optional positional and keyword arguments.
Assumes that taking a single, callable, positional argument means that
it is decorating a function, i.e. something like this::
@my_decorator
def function(): pass
Calls decorator with decorator(f, *args, **kwargs)
"""
@wraps(decorator)
def wrapper(*args, **kwargs):
def dec(f):
return decorator(f, *args, **kwargs)
is_decorating = not kwargs and len(args) == 1 and callable(args[0])
if is_decorating:
f = args[0]
args = ()
return dec(f)
else:
return dec
return wrapper
# error: Untyped decorator makes function "network" untyped
@optional_args # type: ignore[misc]
def network(
t,
url: str = "https://www.google.com",
raise_on_error: bool = False,
check_before_test: bool = False,
error_classes=None,
skip_errnos=_network_errno_vals,
_skip_on_messages=_network_error_messages,
):
"""
Label a test as requiring network connection and, if an error is
encountered, only raise if it does not find a network connection.
In comparison to ``network``, this assumes an added contract to your test:
you must assert that, under normal conditions, your test will ONLY fail if
it does not have network connectivity.
You can call this in 3 ways: as a standard decorator, with keyword
arguments, or with a positional argument that is the url to check.
Parameters
----------
t : callable
The test requiring network connectivity.
url : path
The url to test via ``pandas.io.common.urlopen`` to check
for connectivity. Defaults to 'https://www.google.com'.
raise_on_error : bool
If True, never catches errors.
check_before_test : bool
If True, checks connectivity before running the test case.
error_classes : tuple or Exception
error classes to ignore. If not in ``error_classes``, raises the error.
defaults to OSError. Be careful about changing the error classes here.
skip_errnos : iterable of int
Any exception that has .errno or .reason.erno set to one
of these values will be skipped with an appropriate
message.
_skip_on_messages: iterable of string
any exception e for which one of the strings is
a substring of str(e) will be skipped with an appropriate
message. Intended to suppress errors where an errno isn't available.
Notes
-----
* ``raise_on_error`` supersedes ``check_before_test``
Returns
-------
t : callable
The decorated test ``t``, with checks for connectivity errors.
Example
-------
Tests decorated with @network will fail if it's possible to make a network
connection to another URL (defaults to google.com)::
>>> from pandas import _testing as tm
>>> @tm.network
... def test_network():
... with pd.io.common.urlopen("rabbit://bonanza.com"):
... pass
>>> test_network() # doctest: +SKIP
Traceback
...
URLError: <urlopen error unknown url type: rabbit>
You can specify alternative URLs::
>>> @tm.network("https://www.yahoo.com")
... def test_something_with_yahoo():
... raise OSError("Failure Message")
>>> test_something_with_yahoo() # doctest: +SKIP
Traceback (most recent call last):
...
OSError: Failure Message
If you set check_before_test, it will check the url first and not run the
test on failure::
>>> @tm.network("failing://url.blaher", check_before_test=True)
... def test_something():
... print("I ran!")
... raise ValueError("Failure")
>>> test_something() # doctest: +SKIP
Traceback (most recent call last):
...
Errors not related to networking will always be raised.
"""
import pytest
if error_classes is None:
error_classes = _get_default_network_errors()
t.network = True
@wraps(t)
def wrapper(*args, **kwargs):
if (
check_before_test
and not raise_on_error
and not can_connect(url, error_classes)
):
pytest.skip(
f"May not have network connectivity because cannot connect to {url}"
)
try:
return t(*args, **kwargs)
except Exception as err: # pylint: disable=broad-except
errno = getattr(err, "errno", None)
if not errno and hasattr(errno, "reason"):
# error: "Exception" has no attribute "reason"
errno = getattr(err.reason, "errno", None) # type: ignore[attr-defined]
if errno in skip_errnos:
pytest.skip(f"Skipping test due to known errno and error {err}")
e_str = str(err)
if any(m.lower() in e_str.lower() for m in _skip_on_messages):
pytest.skip(
f"Skipping test because exception message is known and error {err}"
)
if not isinstance(err, error_classes) or raise_on_error:
raise
pytest.skip(f"Skipping test due to lack of connectivity and error {err}")
return wrapper
def can_connect(url, error_classes=None) -> bool:
"""
Try to connect to the given url. True if succeeds, False if OSError
raised
Parameters
----------
url : basestring
The URL to try to connect to
Returns
-------
connectable : bool
Return True if no OSError (unable to connect) or URLError (bad url) was
raised
"""
if error_classes is None:
error_classes = _get_default_network_errors()
try:
with urlopen(url, timeout=20) as response:
# Timeout just in case rate-limiting is applied
if response.status != 200:
return False
except error_classes:
return False
else:
return True
# ------------------------------------------------------------------
# File-IO
def round_trip_pickle(
obj: Any, path: FilePath | ReadPickleBuffer | None = None
) -> DataFrame | Series:
"""
Pickle an object and then read it again.
Parameters
----------
obj : any object
The object to pickle and then re-read.
path : str, path object or file-like object, default None
The path where the pickled object is written and then read.
Returns
-------
pandas object
The original object that was pickled and then re-read.
"""
_path = path
if _path is None:
_path = f"__{rands(10)}__.pickle"
with ensure_clean(_path) as temp_path:
pd.to_pickle(obj, temp_path)
return pd.read_pickle(temp_path)
def round_trip_pathlib(writer, reader, path: str | None = None):
"""
Write an object to file specified by a pathlib.Path and read it back
Parameters
----------
writer : callable bound to pandas object
IO writing function (e.g. DataFrame.to_csv )
reader : callable
IO reading function (e.g. pd.read_csv )
path : str, default None
The path where the object is written and then read.
Returns
-------
pandas object
The original object that was serialized and then re-read.
"""
import pytest
Path = pytest.importorskip("pathlib").Path
if path is None:
path = "___pathlib___"
with ensure_clean(path) as path:
writer(Path(path))
obj = reader(Path(path))
return obj
def round_trip_localpath(writer, reader, path: str | None = None):
"""
Write an object to file specified by a py.path LocalPath and read it back.
Parameters
----------
writer : callable bound to pandas object
IO writing function (e.g. DataFrame.to_csv )
reader : callable
IO reading function (e.g. pd.read_csv )
path : str, default None
The path where the object is written and then read.
Returns
-------
pandas object
The original object that was serialized and then re-read.
"""
import pytest
LocalPath = pytest.importorskip("py.path").local
if path is None:
path = "___localpath___"
with ensure_clean(path) as path:
writer(LocalPath(path))
obj = reader(LocalPath(path))
return obj
def write_to_compressed(compression, path, data, dest: str = "test"):
"""
Write data to a compressed file.
Parameters
----------
compression : {'gzip', 'bz2', 'zip', 'xz', 'zstd'}
The compression type to use.
path : str
The file path to write the data.
data : str
The data to write.
dest : str, default "test"
The destination file (for ZIP only)
Raises
------
ValueError : An invalid compression value was passed in.
"""
args: tuple[Any, ...] = (data,)
mode = "wb"
method = "write"
compress_method: Callable
if compression == "zip":
compress_method = zipfile.ZipFile
mode = "w"
args = (dest, data)
method = "writestr"
elif compression == "tar":
compress_method = tarfile.TarFile
mode = "w"
file = tarfile.TarInfo(name=dest)
bytes = io.BytesIO(data)
file.size = len(data)
args = (file, bytes)
method = "addfile"
elif compression == "gzip":
compress_method = gzip.GzipFile
elif compression == "bz2":
compress_method = bz2.BZ2File
elif compression == "zstd":
compress_method = import_optional_dependency("zstandard").open
elif compression == "xz":
compress_method = get_lzma_file()
else:
raise ValueError(f"Unrecognized compression type: {compression}")
with compress_method(path, mode=mode) as f:
getattr(f, method)(*args)
# ------------------------------------------------------------------
# Plotting
def close(fignum=None) -> None:
from matplotlib.pyplot import (
close as _close,
get_fignums,
)
if fignum is None:
for fignum in get_fignums():
_close(fignum)
else:
_close(fignum)
| {
"content_hash": "919d1a3ae86dcce8297b350825514f43",
"timestamp": "",
"source": "github",
"line_count": 435,
"max_line_length": 88,
"avg_line_length": 28.427586206896553,
"alnum_prop": 0.6050460941290635,
"repo_name": "pandas-dev/pandas",
"id": "527e8c1d0d0908d708f37c09186ef0dc2ad6926a",
"size": "12366",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pandas/_testing/_io.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "512"
},
{
"name": "C",
"bytes": "366145"
},
{
"name": "CSS",
"bytes": "1800"
},
{
"name": "Cython",
"bytes": "1186787"
},
{
"name": "Dockerfile",
"bytes": "1411"
},
{
"name": "HTML",
"bytes": "456531"
},
{
"name": "Python",
"bytes": "18778786"
},
{
"name": "Shell",
"bytes": "10369"
},
{
"name": "Smarty",
"bytes": "8486"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
} |
from PySide import QtCore, QtGui
class Ui_create_asset(object):
def setupUi(self, create_asset):
create_asset.setObjectName("create_asset")
create_asset.setWindowModality(QtCore.Qt.ApplicationModal)
create_asset.resize(326, 115)
self.verticalLayout = QtGui.QVBoxLayout(create_asset)
self.verticalLayout.setObjectName("verticalLayout")
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.label_2 = QtGui.QLabel(create_asset)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.label = QtGui.QLabel(create_asset)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.asset_name_lineEdit = QtGui.QLineEdit(create_asset)
self.asset_name_lineEdit.setObjectName("asset_name_lineEdit")
self.gridLayout.addWidget(self.asset_name_lineEdit, 0, 1, 1, 1)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.asset_types_comboBox = QtGui.QComboBox(create_asset)
self.asset_types_comboBox.setEditable(True)
self.asset_types_comboBox.setObjectName("asset_types_comboBox")
self.horizontalLayout.addWidget(self.asset_types_comboBox)
self.gridLayout.addLayout(self.horizontalLayout, 1, 1, 1, 1)
self.verticalLayout.addLayout(self.gridLayout)
self.buttonBox = QtGui.QDialogButtonBox(create_asset)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(create_asset)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), create_asset.reject)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), create_asset.accept)
QtCore.QMetaObject.connectSlotsByName(create_asset)
def retranslateUi(self, create_asset):
create_asset.setWindowTitle(QtGui.QApplication.translate("create_asset", "Create Asset", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("create_asset", "Asset Type", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("create_asset", "Asset Name", None, QtGui.QApplication.UnicodeUTF8))
| {
"content_hash": "7329ecababa69b557f0902f6d86af256",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 135,
"avg_line_length": 57.72727272727273,
"alnum_prop": 0.7188976377952756,
"repo_name": "jonntd/oyprojectmanager",
"id": "143cac017ce31c9ba93ae28aba36c3e51c051cc9",
"size": "2866",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "oyProjectManager/ui/create_asset_dialog_UI_pyside.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Perl",
"bytes": "171"
},
{
"name": "Python",
"bytes": "1091868"
}
],
"symlink_target": ""
} |
import os;
import sys;
import traceback;
#####################################################################
## Create MQ QCF at cluster level
#####################################################################
def createConnectionFactories(clusterName, connectionFactoryName, jndiName, queueMgrName, transportType, queueMgrHostname, queueMgrSvrConnectionChannel, port
):
cftype = 'QCF';
clusterid=AdminConfig.getid('/ServerCluster:'+clusterName+'/');
AdminTask.createWMQConnectionFactory(clusterid,["-name "+connectionFactoryName+" -jndiName "+jndiName+" -qmgrName "+queueMgrName+" -wmqTransportType
"+transportType+" -qmgrHostname "+queueMgrHostname+" -qmgrPortNumber "+port+" -qmgrSvrconnChannel "+queueMgrSvrConnectionChannel+" -type "+cftype]);
print "\n Saving Configuration \n"
AdminConfig.save()
print "/n connection factory created \n"
return
#####################################################################
## Main
#####################################################################
if len(sys.argv) != 8:
print "This script requires ClusterName, Connection Factory Name, JNDI Name, QMGR name, binding type, hostname, channel and port"
sys.exit(1)
else:
clusterName = sys.argv[0]
connectionFactoryName = sys.argv[1]
jndiName = sys.argv[2]
queueMgrName = sys.argv[3]
transportType = sys.argv[4]
queueMgrHostname = sys.argv[5]
queueMgrSvrConnectionChannel = sys.argv[6]
port = sys.argv[7]
createConnectionFactories(clusterName, connectionFactoryName, jndiName, queueMgrName, transportType, queueMgrHostname, queueMgrSvrConnectionChannel,
port)
| {
"content_hash": "115c11eb3b90699be3964552dd50753e",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 157,
"avg_line_length": 45.05263157894737,
"alnum_prop": 0.594626168224299,
"repo_name": "muthu-s/chef-repo",
"id": "796a8b0bd5da1d4fbc065fca22c6c555b0fd9c32",
"size": "1712",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cookbooks/wsi/files/createqcf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "30563"
},
{
"name": "HTML",
"bytes": "70605"
},
{
"name": "Python",
"bytes": "48000"
},
{
"name": "Ruby",
"bytes": "35159"
},
{
"name": "Shell",
"bytes": "33038"
}
],
"symlink_target": ""
} |
from collections import defaultdict
from nose.tools import eq_, ok_
from ycmd.completers import completer_utils as cu
def FiletypeTriggerDictFromSpec_Works_test():
eq_( defaultdict( set, {
'foo': set( [ cu._PrepareTrigger( 'zoo'),
cu._PrepareTrigger( 'bar' ) ] ),
'goo': set( [ cu._PrepareTrigger( 'moo' ) ] ),
'moo': set( [ cu._PrepareTrigger( 'moo' ) ] ),
'qux': set( [ cu._PrepareTrigger( 'q' ) ] )
} ),
cu._FiletypeTriggerDictFromSpec( {
'foo': ['zoo', 'bar'],
'goo,moo': ['moo'],
'qux': ['q']
} ) )
def FiletypeDictUnion_Works_test():
eq_( defaultdict( set, {
'foo': set(['zoo', 'bar', 'maa']),
'goo': set(['moo']),
'bla': set(['boo']),
'qux': set(['q'])
} ),
cu._FiletypeDictUnion( defaultdict( set, {
'foo': set(['zoo', 'bar']),
'goo': set(['moo']),
'qux': set(['q'])
} ), defaultdict( set, {
'foo': set(['maa']),
'bla': set(['boo']),
'qux': set(['q'])
} ) ) )
def MatchesSemanticTrigger_Basic_test():
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 7, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 6, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 5, ['.'] ) )
ok_( cu._MatchesSemanticTrigger( 'foo.bar', 4, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 3, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 2, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 1, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 0, ['.'] ) )
def MatchesSemanticTrigger_JustTrigger_test():
ok_( cu._MatchesSemanticTrigger( '.', 1, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( '.', 0, ['.'] ) )
def MatchesSemanticTrigger_TriggerBetweenWords_test():
ok_( cu._MatchesSemanticTrigger( 'foo . bar', 5, ['.'] ) )
def MatchesSemanticTrigger_BadInput_test():
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 10, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', -1, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( '', -1, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( '', 0, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( '', 1, ['.'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 4, [] ) )
def MatchesSemanticTrigger_TriggerIsWrong_test():
ok_( not cu._MatchesSemanticTrigger( 'foo.bar', 4, [':'] ) )
def MatchesSemanticTrigger_LongerTrigger_test():
ok_( cu._MatchesSemanticTrigger( 'foo::bar', 5, ['::'] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo::bar', 4, ['::'] ) )
def MatchesSemanticTrigger_OneTriggerMatches_test():
ok_( cu._MatchesSemanticTrigger( 'foo::bar', 5, ['.', ';', '::'] ) )
def MatchesSemanticTrigger_RegexTrigger_test():
ok_( cu._MatchesSemanticTrigger( 'foo.bar',
4,
[ cu._PrepareTrigger( r're!\w+\.' ) ] ) )
ok_( not cu._MatchesSemanticTrigger( 'foo . bar',
5,
[ cu._PrepareTrigger( r're!\w+\.' ) ] ) )
def PreparedTriggers_Basic_test():
triggers = cu.PreparedTriggers()
ok_( triggers.MatchesForFiletype( 'foo.bar', 4, 'c' ) )
ok_( triggers.MatchesForFiletype( 'foo->bar', 5, 'cpp' ) )
def PreparedTriggers_OnlySomeFiletypesSelected_test():
triggers = cu.PreparedTriggers( filetype_set = set( 'c' ) )
ok_( triggers.MatchesForFiletype( 'foo.bar', 4, 'c' ) )
ok_( not triggers.MatchesForFiletype( 'foo->bar', 5, 'cpp' ) )
def PreparedTriggers_UserTriggers_test():
triggers = cu.PreparedTriggers( user_trigger_map = { 'c': ['->'] } )
ok_( triggers.MatchesForFiletype( 'foo->bar', 5, 'c' ) )
def PreparedTriggers_VimTriggerIgnoresConcatOperator_test():
triggers = cu.PreparedTriggers()
ok_( triggers.MatchesForFiletype( 'foo.bar', 4, 'vim' ) )
ok_( not triggers.MatchesForFiletype( 'foo . bar', 4, 'vim' ) )
ok_( not triggers.MatchesForFiletype( 'foo . bar', 5, 'vim' ) )
ok_( not triggers.MatchesForFiletype( 'foo . bar', 6, 'vim' ) )
def PreparedTriggers_ObjectiveC_test():
triggers = cu.PreparedTriggers()
ok_( triggers.MatchesForFiletype( '[foo ', 5, 'objc' ) )
ok_( not triggers.MatchesForFiletype( '[foo', 4, 'objc' ) )
ok_( not triggers.MatchesForFiletype( '[3foo ', 6, 'objc' ) )
| {
"content_hash": "9d119e96299ee55810942398002f157e",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 80,
"avg_line_length": 35.704918032786885,
"alnum_prop": 0.568870523415978,
"repo_name": "korbenzhang/vim-ycm-win",
"id": "82ff5828d4f71075b64c02c97929fd9a77ef051e",
"size": "5094",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "third_party/ycmd/ycmd/completers/completer_utils_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "93345"
},
{
"name": "VimL",
"bytes": "30287"
}
],
"symlink_target": ""
} |
import sys, os, time
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute.
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.abspath('.'))
sys.path.append(os.path.abspath('html'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Paths that contain templates, relative to this directory.
templates_path = ['html']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
#master_doc = 'index'
# create content template for the homepage
from util import rst2html, copy_file
readme = rst2html('../README.txt', 'html/intro.html')
readme = copy_file('../CHANGES.txt', 'changelog.rst')
# Location of the PyAMF source root folder.
import pyamf
# General substitutions.
project = 'PyAMF'
url = 'http://pyamf.org'
description = 'AMF for Python'
copyright = "Copyright © 2007-%s The <a href='%s'>%s</a> Project. All rights reserved." % (
time.strftime('%Y'), url, project)
# We look for the __init__.py file in the current PyAMF source tree
# and replace the values accordingly.
#
# The full version, including alpha/beta/rc tags.
version = str(pyamf.version)
# The short X.Y version.
release = version[:3]
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# A list of directory paths, relative to the source directory, that are to
# be recursively excluded from the search for source files, that is, their
# subdirectories won’t be searched too.
exclude_trees = ['_build', 'tutorials/examples']
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = '%s - %s' % (project, description)
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['html/static']
# The name of an image file (.ico) that is the favicon of the docs.
html_favicon = 'pyamf.ico'
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'toc': 'sidebartoc.html'
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {
'index': 'indexcontent.html',
'tutorials/index': 'tutorials.html',
}
# Content template for the index page, filename relative to this file.
html_index = 'indexcontent.html'
# If false, no module index is generated.
html_use_modindex = True
# If true, the reST sources are included in the HTML build as _sources/<name>.
html_copy_source = False
# Output an OpenSearch description file.
html_use_opensearch = 'http://docs.pyamf.org'
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyamf' + release.replace('.', '')
# Split the index
html_split_index = True
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'PyAMF.tex', html_title,
copyright, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| {
"content_hash": "d5daaaec9615116a929b6bda702282b0",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 96,
"avg_line_length": 32.07909604519774,
"alnum_prop": 0.700951039098274,
"repo_name": "cardmagic/PyAMF",
"id": "31278d69f13a591741ee2c562277a1d4cc160719",
"size": "6100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "87097"
},
{
"name": "C",
"bytes": "635399"
},
{
"name": "Java",
"bytes": "374"
},
{
"name": "Python",
"bytes": "955083"
}
],
"symlink_target": ""
} |
from django.db import models
import base64
class Session(models.Model):
session = models.CharField(max_length=200,db_index=True)
date = models.DateTimeField(auto_now_add=True, db_index=True)
doc_url = models.CharField(max_length=1024)
browser = models.CharField(max_length=1024)
def count_events(self):
return Event.objects.filter(session = self.id).count()
def count_people(self):
return Session.objects.filter(doc_url = self.doc_url).values('session').distinct().count()
class Metadata(models.Model):
session = models.ForeignKey(Session,db_index=True)
key = models.CharField(max_length=200,db_index=True)
value = models.CharField(max_length=1024)
class Event(models.Model):
session = models.ForeignKey(Session,db_index=True)
ts = models.BigIntegerField(db_index=True)
category = models.CharField(max_length=200, db_index=True)
action = models.TextField()
label = models.TextField()
value = models.TextField()
| {
"content_hash": "05f9d6529bbde1e874ef9f7229d35837",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 98,
"avg_line_length": 35.464285714285715,
"alnum_prop": 0.7139979859013091,
"repo_name": "INRIA/intertrace",
"id": "a36cf96512876e01477fdb7fdef6dfdf648638ce",
"size": "993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recordtrace/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1040"
},
{
"name": "HTML",
"bytes": "56171"
},
{
"name": "JavaScript",
"bytes": "58302"
},
{
"name": "Python",
"bytes": "27884"
},
{
"name": "Shell",
"bytes": "332"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(
name = 'keywrapper',
version = '0.1.0',
description = 'Extremely simple key-value storage wrapper',
url = 'https://github.com/lbosque/keywrapper',
author = 'Luis Bosque',
author_email = 'luisico@gmai.com',
license = 'BSD-3',
packages = ['keywrapper'],
install_requires = ['redis'],
zip_safe = True
)
| {
"content_hash": "bcca22a65743509b5c213d1fd0717a62",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 65,
"avg_line_length": 28.357142857142858,
"alnum_prop": 0.5969773299748111,
"repo_name": "luisbosque/keywrapper",
"id": "e1b6c9adbe1a4552584d7bad498a14e5437328f2",
"size": "397",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "4232"
}
],
"symlink_target": ""
} |
import re
import sys
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
def create_addresses(from_addr,to_addr,cc=[],bcc=[]):
msg = MIMEMultipart()
msg['From'] = from_addr
msg['To'] = to_addr
if cc:
msg['CC'] = cc
if bcc:
msg['BCC'] = bcc
return msg.as_string()
def check_email_valid(email):
return re.match(r"[^@]+@[^@]+\.[^@]+",email) != None
def add_subject_to_email(msg,subject):
try:
msg['Subject'] = subject
return msg
except Exception,e:
print str(e)
# only plain text
def add_text_to_email(msg,text):
try:
msg.attach(MIMEText(text, 'plain'))
return msg
except Exception,e:
print str(e)
def add_html_to_email(msg,html)
try:
msg.attach(MIMETEXT(html,'html'))
return msg
except Exception,e:
print str(e)
def send_mail(to_addr,cc,bcc,msg,username,password,host,port)
try:
s = smtplib.SMTP_SSL(host, port)
s.ehlo()
s.login(username,password)
s.sendmail(username,[to_addr,cc,bcc],msg.as_string())
s.quit()
except Exception,e:
print str(e)
| {
"content_hash": "0ca755018b5433179ddbd9b67bc32d03",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 61,
"avg_line_length": 21.375,
"alnum_prop": 0.5931495405179615,
"repo_name": "slamice/emailout",
"id": "807847fe40b299a78e85ab332e8a96b119f4138a",
"size": "1219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "emailer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1953"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db.models.signals import post_save, pre_save
from django.test import TestCase
from .models import Account, Employee, Person, Profile, ProxyEmployee
class UpdateOnlyFieldsTests(TestCase):
def test_update_fields_basic(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s.gender = 'M'
s.name = 'Ian'
s.save(update_fields=['name'])
s = Person.objects.get(pk=s.pk)
self.assertEqual(s.gender, 'F')
self.assertEqual(s.name, 'Ian')
def test_update_fields_deferred(self):
s = Person.objects.create(name='Sara', gender='F', pid=22)
self.assertEqual(s.gender, 'F')
s1 = Person.objects.defer("gender", "pid").get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(1):
s1.save()
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Emily")
self.assertEqual(s2.gender, "M")
def test_update_fields_only_1(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(1):
s1.save()
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Emily")
self.assertEqual(s2.gender, "M")
def test_update_fields_only_2(self):
s = Person.objects.create(name='Sara', gender='F', pid=22)
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(2):
s1.save(update_fields=['pid'])
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Sara")
self.assertEqual(s2.gender, "F")
def test_update_fields_only_repeated(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.gender = 'M'
with self.assertNumQueries(1):
s1.save()
# Test that the deferred class does not remember that gender was
# set, instead the instance should remember this.
s1 = Person.objects.only('name').get(pk=s.pk)
with self.assertNumQueries(1):
s1.save()
def test_update_fields_inheritance_defer(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('name').get(pk=e1.pk)
e1.name = 'Linda'
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).name,
'Linda')
def test_update_fields_fk_defer(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('profile').get(pk=e1.pk)
e1.profile = profile_receptionist
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).profile, profile_receptionist)
e1.profile_id = profile_boss.pk
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).profile, profile_boss)
def test_select_related_only_interaction(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('profile__salary').select_related('profile').get(pk=e1.pk)
profile_boss.name = 'Clerk'
profile_boss.salary = 1000
profile_boss.save()
# The loaded salary of 3000 gets saved, the name of 'Clerk' isn't
# overwritten.
with self.assertNumQueries(1):
e1.profile.save()
reloaded_profile = Profile.objects.get(pk=profile_boss.pk)
self.assertEqual(reloaded_profile.name, profile_boss.name)
self.assertEqual(reloaded_profile.salary, 3000)
def test_update_fields_m2m(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
a1 = Account.objects.create(num=1)
a2 = Account.objects.create(num=2)
e1.accounts.set([a1, a2])
with self.assertRaises(ValueError):
e1.save(update_fields=['accounts'])
def test_update_fields_inheritance(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1.name = 'Ian'
e1.gender = 'M'
e1.save(update_fields=['name'])
e2 = Employee.objects.get(pk=e1.pk)
self.assertEqual(e2.name, 'Ian')
self.assertEqual(e2.gender, 'F')
self.assertEqual(e2.profile, profile_boss)
e2.profile = profile_receptionist
e2.name = 'Sara'
e2.save(update_fields=['profile'])
e3 = Employee.objects.get(pk=e1.pk)
self.assertEqual(e3.name, 'Ian')
self.assertEqual(e3.profile, profile_receptionist)
with self.assertNumQueries(1):
e3.profile = profile_boss
e3.save(update_fields=['profile_id'])
e4 = Employee.objects.get(pk=e3.pk)
self.assertEqual(e4.profile, profile_boss)
self.assertEqual(e4.profile_id, profile_boss.pk)
def test_update_fields_inheritance_with_proxy_model(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = ProxyEmployee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1.name = 'Ian'
e1.gender = 'M'
e1.save(update_fields=['name'])
e2 = ProxyEmployee.objects.get(pk=e1.pk)
self.assertEqual(e2.name, 'Ian')
self.assertEqual(e2.gender, 'F')
self.assertEqual(e2.profile, profile_boss)
e2.profile = profile_receptionist
e2.name = 'Sara'
e2.save(update_fields=['profile'])
e3 = ProxyEmployee.objects.get(pk=e1.pk)
self.assertEqual(e3.name, 'Ian')
self.assertEqual(e3.profile, profile_receptionist)
def test_update_fields_signals(self):
p = Person.objects.create(name='Sara', gender='F')
pre_save_data = []
def pre_save_receiver(**kwargs):
pre_save_data.append(kwargs['update_fields'])
pre_save.connect(pre_save_receiver)
post_save_data = []
def post_save_receiver(**kwargs):
post_save_data.append(kwargs['update_fields'])
post_save.connect(post_save_receiver)
p.save(update_fields=['name'])
self.assertEqual(len(pre_save_data), 1)
self.assertEqual(len(pre_save_data[0]), 1)
self.assertIn('name', pre_save_data[0])
self.assertEqual(len(post_save_data), 1)
self.assertEqual(len(post_save_data[0]), 1)
self.assertIn('name', post_save_data[0])
pre_save.disconnect(pre_save_receiver)
post_save.disconnect(post_save_receiver)
def test_update_fields_incorrect_params(self):
s = Person.objects.create(name='Sara', gender='F')
with self.assertRaises(ValueError):
s.save(update_fields=['first_name'])
with self.assertRaises(ValueError):
s.save(update_fields="name")
def test_empty_update_fields(self):
s = Person.objects.create(name='Sara', gender='F')
pre_save_data = []
def pre_save_receiver(**kwargs):
pre_save_data.append(kwargs['update_fields'])
pre_save.connect(pre_save_receiver)
post_save_data = []
def post_save_receiver(**kwargs):
post_save_data.append(kwargs['update_fields'])
post_save.connect(post_save_receiver)
# Save is skipped.
with self.assertNumQueries(0):
s.save(update_fields=[])
# Signals were skipped, too...
self.assertEqual(len(pre_save_data), 0)
self.assertEqual(len(post_save_data), 0)
pre_save.disconnect(pre_save_receiver)
post_save.disconnect(post_save_receiver)
def test_num_queries_inheritance(self):
s = Employee.objects.create(name='Sara', gender='F')
s.employee_num = 1
s.name = 'Emily'
with self.assertNumQueries(1):
s.save(update_fields=['employee_num'])
s = Employee.objects.get(pk=s.pk)
self.assertEqual(s.employee_num, 1)
self.assertEqual(s.name, 'Sara')
s.employee_num = 2
s.name = 'Emily'
with self.assertNumQueries(1):
s.save(update_fields=['name'])
s = Employee.objects.get(pk=s.pk)
self.assertEqual(s.name, 'Emily')
self.assertEqual(s.employee_num, 1)
# A little sanity check that we actually did updates...
self.assertEqual(Employee.objects.count(), 1)
self.assertEqual(Person.objects.count(), 1)
with self.assertNumQueries(2):
s.save(update_fields=['name', 'employee_num'])
| {
"content_hash": "c8e5f4f0a503d5940472c6054af4ad05",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 93,
"avg_line_length": 38.04924242424242,
"alnum_prop": 0.594126431060229,
"repo_name": "yephper/django",
"id": "2afaa8c1271151c951dc125c46ca3ab45cfd8d83",
"size": "10045",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/update_only_fields/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "1538"
},
{
"name": "CSS",
"bytes": "1697381"
},
{
"name": "HTML",
"bytes": "390772"
},
{
"name": "Java",
"bytes": "588"
},
{
"name": "JavaScript",
"bytes": "3172126"
},
{
"name": "Makefile",
"bytes": "134"
},
{
"name": "PHP",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "13365273"
},
{
"name": "Shell",
"bytes": "837"
},
{
"name": "Smarty",
"bytes": "133"
}
],
"symlink_target": ""
} |
from db.SqlHelper import SqlHelper
from util.exception import Con_DB_Fail
try:
sqlhelper = SqlHelper()
sqlhelper.init_db()
except Exception:
raise Con_DB_Fail
proxy = {'ip': '192.168.1.1', 'port': int('80'), 'type': 0, 'protocol': 0, 'country': u'中国', 'area': u'四川', 'speed': 0}
sqlhelper.insert(proxy) | {
"content_hash": "9c4463962414a81174ea238dae6e894a",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 119,
"avg_line_length": 28.727272727272727,
"alnum_prop": 0.6582278481012658,
"repo_name": "KFPA/ScrapyNews",
"id": "4b5285d2667b0b49e7fcbea3abeba421f6795d38",
"size": "339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "IPProxyPool-master/test/testsql.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "70"
},
{
"name": "Python",
"bytes": "126460"
}
],
"symlink_target": ""
} |
__author__ = 'main'
from sqlalchemy import create_engine, Column, Integer, String, Enum, ForeignKey
from sqlalchemy.orm import scoped_session, sessionmaker, backref, relationship
from sqlalchemy.ext.declarative import declarative_base
from server.config import svrcfg
engine = create_engine(svrcfg['SQLALCHEMY_DATABASE_URI'])
Session = sessionmaker(autocommit=False,
autoflush=False,
bind=engine)
session = scoped_session(Session)
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(String(20), primary_key=True)
password = Column(String(20))
role = Column(Enum('admin', 'common'))
silo_id = relationship("Silo", backref=backref("user", lazy="joined"))
def __init__(self, id, password, role):
self.id = id
self.password = password
self.role = role
def to_json(self):
return '{"id": "%s", "type": "User"}' % (self.id)
def __repr__(self):
return "<User(id='%s')>" % (self.id)
class Silo(Base):
__tablename__ = 'silo'
id = Column(String(20), primary_key=True)
dnsrecords = relationship("DnsRecord", backref=backref("silo", lazy="joined")) # automatically populates the property
user_id = Column(String(20), ForeignKey("user.id"))
def __init__(self, id, user_id):
self.id = id
self.user_id = user_id
def to_json(self):
return '{"id": "%s", "dnsrecords": [\r\n%s\r\n]}' % (self.id, ', \r\n'.join([record.to_json() for record in self.dnsrecords]))
def __repr__(self):
return self.to_json()
# return "<Silo (id='%s', dnsrecords='%s')>" % (self.id, self.dnsrecords)
class DnsRecord(Base):
__tablename__ = 'dnsrecord'
silo_id = Column(String(20), ForeignKey('silo.id'), primary_key=True)
hostname = Column(String(20), primary_key=True)
ip = Column(String(20))
def __init__(self, silo_id, hostname, ip):
self.silo_id = silo_id
self.hostname = hostname
self.ip = ip
def to_json(self):
return '{"hostname": "%s", "ip": "%s"}' % (self.hostname, self.ip)
def __repr__(self):
return self.to_json()
# return "<DnsRecord(name='%s', value='%s')>" % (self.hostname, self.ip)
def clear_schema():
Base.metadata.drop_all(engine)
def init_schema():
Base.metadata.create_all(engine)
| {
"content_hash": "78b1e1ffe0c628c184cd9866987b555f",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 134,
"avg_line_length": 28.392857142857142,
"alnum_prop": 0.6054507337526206,
"repo_name": "x1angli/DDNS-chiasma",
"id": "fdca4b857a93e6cd0855cd3833c67f3a1994fad4",
"size": "2385",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "server/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13875"
}
],
"symlink_target": ""
} |
def gendata(apikey, nthreads):
import sys
import concurrent.futures
import salt.client
import salt.runner
import collections
import warnings
import json
import requests
from datetime import datetime
warnings.simplefilter("ignore", DeprecationWarning)
result = {}
toreturn = {}
if nthreads is None:
nthreads = 8
def checkdellwarranty(apikey,serialnumber):
baseurl = 'https://api.dell.com/support/assetinfo/v4/getassetwarranty/'
httpresp = requests.get(baseurl+serialnumber+'?apikey='+apikey)
enddates = []
for entitlement in json.loads(httpresp.text)['AssetWarrantyResponse'][0]['AssetEntitlementData']:
enddates.append(entitlement['EndDate'])
return datetime.strptime(max(enddates),'%Y-%m-%dT%H:%M:%S').strftime('%Y-%m-%d')
def getwarrantyenddate(server):
c = salt.client.LocalClient()
manufacturer = c.cmd(server, 'grains.items')[server]['manufacturer']
if manufacturer == "Dell Inc.":
serialnumber = c.cmd(server, 'grains.items')[server]['serialnumber']
result[server] = checkdellwarranty(apikey,serialnumber)
opts = salt.config.master_config('/etc/salt/master')
opts['quiet'] = True
r = salt.runner.RunnerClient(opts)
mup = r.cmd('manage.up')
with concurrent.futures.ThreadPoolExecutor(nthreads) as executor:
future_to_upd = dict((executor.submit(getwarrantyenddate, server), server) for server in mup)
toreturn['ncol'] = 2
toreturn['headers'] = ['Hostname', 'Warranty Expiration']
toreturn['data'] = collections.OrderedDict(sorted(result.items()))
return toreturn
| {
"content_hash": "aa3d1d62982a374282743b0dda5295c3",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 105,
"avg_line_length": 41.048780487804876,
"alnum_prop": 0.6702317290552585,
"repo_name": "webedia-dev/pepperboard",
"id": "ce2cc159db7db622424f86bb325f4b95773f5763",
"size": "1731",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pepperboard/dashboards/dellwarranty.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "27891"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'new/(?P<item_id>[\d]+)/$', 'reviewclone.views.create_review',
name='create_review'),
url(r'relations/$', 'reviewclone.views.relations_list',
name='relations'),
url(r'relations/new/$', 'reviewclone.views.create_relation',
name='create_relation'),
url(r'relations/delete/$', 'reviewclone.views.delete_relation',
name='delete_relation'),
url(r'clones/$', 'reviewclone.views.similar_list', name='similar_list'),
url(r'movies/$', 'reviewclone.views.items_list', name='items_list'),
url(r'movies/(?P<letter>[-\w]+)/$', 'reviewclone.views.items_list',
name='items_list_letter'),
url(r'review/(?P<review_id>\d+)/$', 'reviewclone.views.after_review',
name='after_review'),
url(r'user/(?P<user_id>[-\d]+)/$', 'reviewclone.views.user_reviews',
name='user_reviews'),
url(r'dashboard/$', 'reviewclone.views.dashboard', name='dashboard'),
)
| {
"content_hash": "99eaaf4ee2cbddcb684e3b8c9fa175c2",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 76,
"avg_line_length": 47.80952380952381,
"alnum_prop": 0.6264940239043825,
"repo_name": "brianmacdonald/djangodash10",
"id": "68e87641cba90b8cedb618066c4aa8e66c410609",
"size": "1004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reviewclone/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "142944"
},
{
"name": "Python",
"bytes": "30932"
}
],
"symlink_target": ""
} |
from .volttronwebrpc import VolttronWebRPC
| {
"content_hash": "beb9f5225201b3f469b6131708644e73",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 42,
"avg_line_length": 43,
"alnum_prop": 0.8837209302325582,
"repo_name": "schandrika/volttron",
"id": "df47d694ea7a24a2c3c5554c945fbfd7d950bdf9",
"size": "2946",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/WebRPC/volttronwebrpc/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "33023"
},
{
"name": "HTML",
"bytes": "61489"
},
{
"name": "JavaScript",
"bytes": "497583"
},
{
"name": "Python",
"bytes": "3090478"
},
{
"name": "Shell",
"bytes": "41093"
}
],
"symlink_target": ""
} |
from time import time, sleep
from .processador import Processador
class Escalonador_Fifo(object):
"""
Define uma classe que representa um escalonador fifo.
"""
def __init__(self, processador):
self.processador = processador
self.processos = []
def escalonar_processos(self):
"""
Método para iniciar a execução e escalonamento dos
proecessos por meio de métodos auxiliares.
"""
if self.processos:
self.excecutar_processo()
processo = self.processo_em_execucao()
print "Processo {0} {1}...".format(processo.pid,
processo.status_do_processo())
segundos = 1
while (processo.tempo_em_execucao < processo.tempo_do_processo):
# o processo fica em execução até o tempo de execução ser igual
# ao tempo de execução
sleep(1)
processo.tempo_em_execucao += 1
print "{0} s".format(segundos)
segundos += 1
self.finaliza_processo(processo)
def excecutar_processo(self):
"""
Método que pega o primeiro processo da fila de processos, configura
informções de execução do processo e o coloca no processador.
"""
processo = self.processos.pop(0)
processo.estado = 1
processo.tempo_inicio = int(time())
self.processador.processo_em_execucao = processo
def processo_em_execucao(self):
"""
Pega o atual processo em execução no processador
"""
return self.processador.processo_em_execucao
def finaliza_processo(self, processo):
"""
Método para finalizar e remover um dado processo.
"""
proc = self.processador.processo_em_execucao
print "Processo {0} Finalizado\n".format(proc.pid)
#retira o processo do processador
self.processador.processo_em_execucao = None
class Escalonador_Round_Robin(object):
"""
Define uma classe que representa um escalonador Round Robin.
"""
def __init__(self, processador, quantum):
self.processador = processador
self.processos = []
self.quantum = quantum
def escalonar_processos(self):
"""
Método para iniciar a execução e escalonamento dos
proecessos.
"""
if self.processos:
self.excecutar_processo()
processo = self.processo_em_execucao()
print "Processo {0} {1}...".format(processo.pid,
processo.status_do_processo())
segundos = 1
while (processo.tempo_em_execucao < self.quantum and\
processo.tempo_em_execucao < processo.tempo_do_processo and\
processo.tempo_executado < processo.tempo_do_processo):
sleep(1)
processo.tempo_em_execucao += 1
processo.tempo_executado += 1
print "{0} s".format(segundos)
segundos += 1
if processo.tempo_executado == processo.tempo_do_processo:
self.finaliza_processo(processo)
else:
processo.estado = 0
self.processos.append(processo)
def excecutar_processo(self):
"""
Método que pega o primeiro processo da fila de processos, configura
informções de execução do processo e o coloca no processador.
"""
processo = self.processos.pop(0)
processo.estado = 1
processo.tempo_em_execucao = 0
if not processo.tempo_inicio:
processo.tempo_inicio = int(time())
self.processador.processo_em_execucao = processo
def processo_em_execucao(self):
"""
Pega o atual processo em execução no processador
"""
return self.processador.processo_em_execucao
def finaliza_processo(self, processo):
"""
Método para finalizar e remover um dado processo.
"""
proc = self.processador.processo_em_execucao
print "Processo {0} Finalizado\n".format(proc.pid)
#retira o processo do processador
self.processador.processo_em_execucao = None
| {
"content_hash": "58fd3c064308119815da84671a255f48",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 79,
"avg_line_length": 35.895161290322584,
"alnum_prop": 0.5625702089418109,
"repo_name": "DouglasHSS/operating_system",
"id": "2dd3c1b24acff42f78c29860c980cd9b1da29e8d",
"size": "4506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/escalonadores.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9095"
}
],
"symlink_target": ""
} |
import numpy as np
import unittest
import chainer
from chainer.datasets import TupleDataset
from chainer.iterators import SerialIterator
from chainer import testing
from chainercv.extensions import DetectionVOCEvaluator
from chainercv.utils import generate_random_bbox
from chainercv.utils.testing import attr
from chainermn import create_communicator
class _DetectionStubLink(chainer.Link):
def __init__(self, bboxes, labels, initial_count=0):
super(_DetectionStubLink, self).__init__()
self.count = initial_count
self.bboxes = bboxes
self.labels = labels
def predict(self, imgs):
n_img = len(imgs)
bboxes = self.bboxes[self.count:self.count + n_img]
labels = self.labels[self.count:self.count + n_img]
scores = [np.ones_like(l) for l in labels]
self.count += n_img
return bboxes, labels, scores
class TestDetectionVOCEvaluator(unittest.TestCase):
def setUp(self):
bboxes = [generate_random_bbox(5, (256, 324), 24, 120)
for _ in range(10)]
labels = np.ones((10, 5))
self.dataset = TupleDataset(
np.random.uniform(size=(10, 3, 32, 48)),
bboxes,
labels)
self.link = _DetectionStubLink(bboxes, labels)
self.iterator = SerialIterator(
self.dataset, 5, repeat=False, shuffle=False)
self.evaluator = DetectionVOCEvaluator(
self.iterator, self.link, label_names=('cls0', 'cls1', 'cls2'))
self.expected_ap = 1
def test_evaluate(self):
reporter = chainer.Reporter()
reporter.add_observer('target', self.link)
with reporter:
mean = self.evaluator.evaluate()
# No observation is reported to the current reporter. Instead the
# evaluator collect results in order to calculate their mean.
self.assertEqual(len(reporter.observation), 0)
np.testing.assert_equal(mean['target/map'], self.expected_ap)
np.testing.assert_equal(mean['target/ap/cls0'], np.nan)
np.testing.assert_equal(mean['target/ap/cls1'], self.expected_ap)
np.testing.assert_equal(mean['target/ap/cls2'], np.nan)
def test_call(self):
mean = self.evaluator()
# main is used as default
np.testing.assert_equal(mean['main/map'], self.expected_ap)
np.testing.assert_equal(mean['main/ap/cls0'], np.nan)
np.testing.assert_equal(mean['main/ap/cls1'], self.expected_ap)
np.testing.assert_equal(mean['main/ap/cls2'], np.nan)
def test_evaluator_name(self):
self.evaluator.name = 'eval'
mean = self.evaluator()
# name is used as a prefix
np.testing.assert_equal(mean['eval/main/map'], self.expected_ap)
np.testing.assert_equal(mean['eval/main/ap/cls0'], np.nan)
np.testing.assert_equal(mean['eval/main/ap/cls1'], self.expected_ap)
np.testing.assert_equal(mean['eval/main/ap/cls2'], np.nan)
def test_current_report(self):
reporter = chainer.Reporter()
with reporter:
mean = self.evaluator()
# The result is reported to the current reporter.
self.assertEqual(reporter.observation, mean)
@attr.mpi
class TestDetectionVOCEvaluatorMPI(unittest.TestCase):
def setUp(self):
self.comm = create_communicator('naive')
batchsize_per_process = 5
batchsize = batchsize_per_process * self.comm.size
if self.comm.rank == 0:
bboxes = [generate_random_bbox(5, (256, 324), 24, 120)
for _ in range(10)]
labels = [np.random.choice(np.arange(3, dtype=np.int32), size=(5,))
for _ in range(10)]
else:
bboxes = None
labels = None
initial_count = self.comm.rank * batchsize_per_process
bboxes = self.comm.bcast_obj(bboxes)
labels = self.comm.bcast_obj(labels)
self.bboxes = bboxes
self.labels = labels
self.dataset = TupleDataset(
np.random.uniform(size=(10, 3, 32, 48)),
bboxes, labels)
self.initial_count = initial_count
self.batchsize = batchsize
def test_consistency(self):
reporter = chainer.Reporter()
if self.comm.rank == 0:
multi_iterator = SerialIterator(
self.dataset, self.batchsize, repeat=False, shuffle=False)
else:
multi_iterator = None
multi_link = _DetectionStubLink(
self.bboxes, self.labels, self.initial_count)
multi_evaluator = DetectionVOCEvaluator(
multi_iterator, multi_link,
label_names=('cls0', 'cls1', 'cls2'),
comm=self.comm)
reporter.add_observer('target', multi_link)
with reporter:
multi_mean = multi_evaluator.evaluate()
if self.comm.rank != 0:
self.assertEqual(multi_mean, {})
return
single_iterator = SerialIterator(
self.dataset, self.batchsize, repeat=False, shuffle=False)
single_link = _DetectionStubLink(
self.bboxes, self.labels)
single_evaluator = DetectionVOCEvaluator(
single_iterator, single_link,
label_names=('cls0', 'cls1', 'cls2'))
reporter.add_observer('target', single_link)
with reporter:
single_mean = single_evaluator.evaluate()
self.assertEqual(set(multi_mean.keys()), set(single_mean.keys()))
for key in multi_mean.keys():
np.testing.assert_equal(single_mean[key], multi_mean[key])
testing.run_module(__name__, __file__)
| {
"content_hash": "255ff34c24bc88837125c048a9438fe8",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 79,
"avg_line_length": 35.30625,
"alnum_prop": 0.6149761019649496,
"repo_name": "yuyu2172/chainercv",
"id": "acc1bb0c546b97d811e0ec4cc9a3eb2665e46b52",
"size": "5649",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/extensions_tests/evaluator_tests/test_detection_voc_evaluator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "3080"
},
{
"name": "Python",
"bytes": "1201052"
},
{
"name": "Shell",
"bytes": "10815"
}
],
"symlink_target": ""
} |
import sys
import json
print json.dumps(json.load(sys.stdin, encoding='utf-8'),
sort_keys=True, indent=2, ensure_ascii=False).encode('utf-8')
| {
"content_hash": "1e9fb62338bfcf3845e7da85f71de645",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 78,
"avg_line_length": 32,
"alnum_prop": 0.6625,
"repo_name": "nqpz/ku-intranet-studiebeskeder",
"id": "26939cc89749d034a8ddec9b1382e48b3a222e76",
"size": "183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "format_json.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Nginx",
"bytes": "615"
},
{
"name": "Perl",
"bytes": "1862"
},
{
"name": "Python",
"bytes": "1483"
},
{
"name": "Shell",
"bytes": "214"
}
],
"symlink_target": ""
} |
"""Generic Z-Wave Entity Classes."""
import copy
import logging
from openzwavemqtt.const import (
EVENT_INSTANCE_STATUS_CHANGED,
EVENT_VALUE_CHANGED,
OZW_READY_STATES,
CommandClass,
ValueIndex,
)
from openzwavemqtt.models.node import OZWNode
from openzwavemqtt.models.value import OZWValue
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from . import const
from .const import DOMAIN, PLATFORMS
from .discovery import check_node_schema, check_value_schema
_LOGGER = logging.getLogger(__name__)
OZW_READY_STATES_VALUES = {st.value for st in OZW_READY_STATES}
class ZWaveDeviceEntityValues:
"""Manages entity access to the underlying Z-Wave value objects."""
def __init__(self, hass, options, schema, primary_value):
"""Initialize the values object with the passed entity schema."""
self._hass = hass
self._entity_created = False
self._schema = copy.deepcopy(schema)
self._values = {}
self.options = options
# Go through values listed in the discovery schema, initialize them,
# and add a check to the schema to make sure the Instance matches.
for name, disc_settings in self._schema[const.DISC_VALUES].items():
self._values[name] = None
disc_settings[const.DISC_INSTANCE] = (primary_value.instance,)
self._values[const.DISC_PRIMARY] = primary_value
self._node = primary_value.node
self._schema[const.DISC_NODE_ID] = [self._node.node_id]
def async_setup(self):
"""Set up values instance."""
# Check values that have already been discovered for node
# and see if they match the schema and need added to the entity.
for value in self._node.values():
self.async_check_value(value)
# Check if all the _required_ values in the schema are present and
# create the entity.
self._async_check_entity_ready()
def __getattr__(self, name):
"""Get the specified value for this entity."""
return self._values.get(name, None)
def __iter__(self):
"""Allow iteration over all values."""
return iter(self._values.values())
def __contains__(self, name):
"""Check if the specified name/key exists in the values."""
return name in self._values
@callback
def async_check_value(self, value):
"""Check if the new value matches a missing value for this entity.
If a match is found, it is added to the values mapping.
"""
# Make sure the node matches the schema for this entity.
if not check_node_schema(value.node, self._schema):
return
# Go through the possible values for this entity defined by the schema.
for name, name_value in self._values.items():
# Skip if it's already been added.
if name_value is not None:
continue
# Skip if the value doesn't match the schema.
if not check_value_schema(value, self._schema[const.DISC_VALUES][name]):
continue
# Add value to mapping.
self._values[name] = value
# If the entity has already been created, notify it of the new value.
if self._entity_created:
async_dispatcher_send(
self._hass, f"{DOMAIN}_{self.values_id}_value_added"
)
# Check if entity has all required values and create the entity if needed.
self._async_check_entity_ready()
@callback
def _async_check_entity_ready(self):
"""Check if all required values are discovered and create entity."""
# Abort if the entity has already been created
if self._entity_created:
return
# Go through values defined in the schema and abort if a required value is missing.
for name, disc_settings in self._schema[const.DISC_VALUES].items():
if self._values[name] is None and not disc_settings.get(
const.DISC_OPTIONAL
):
return
# We have all the required values, so create the entity.
component = self._schema[const.DISC_COMPONENT]
_LOGGER.debug(
"Adding Node_id=%s Generic_command_class=%s, "
"Specific_command_class=%s, "
"Command_class=%s, Index=%s, Value type=%s, "
"Genre=%s as %s",
self._node.node_id,
self._node.node_generic,
self._node.node_specific,
self.primary.command_class,
self.primary.index,
self.primary.type,
self.primary.genre,
component,
)
self._entity_created = True
if component in PLATFORMS:
async_dispatcher_send(self._hass, f"{DOMAIN}_new_{component}", self)
@property
def values_id(self):
"""Identification for this values collection."""
return create_value_id(self.primary)
class ZWaveDeviceEntity(Entity):
"""Generic Entity Class for a Z-Wave Device."""
def __init__(self, values):
"""Initialize a generic Z-Wave device entity."""
self.values = values
self.options = values.options
@callback
def on_value_update(self):
"""Call when a value is added/updated in the entity EntityValues Collection.
To be overridden by platforms needing this event.
"""
async def async_added_to_hass(self):
"""Call when entity is added."""
# Add dispatcher and OZW listeners callbacks.
# Add to on_remove so they will be cleaned up on entity removal.
self.async_on_remove(
self.options.listen(EVENT_VALUE_CHANGED, self._value_changed)
)
self.async_on_remove(
self.options.listen(EVENT_INSTANCE_STATUS_CHANGED, self._instance_updated)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, const.SIGNAL_DELETE_ENTITY, self._delete_callback
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self.values.values_id}_value_added",
self._value_added,
)
)
@property
def device_info(self):
"""Return device information for the device registry."""
node = self.values.primary.node
node_instance = self.values.primary.instance
dev_id = create_device_id(node, self.values.primary.instance)
node_firmware = node.get_value(
CommandClass.VERSION, ValueIndex.VERSION_APPLICATION
)
device_info = {
"identifiers": {(DOMAIN, dev_id)},
"name": create_device_name(node),
"manufacturer": node.node_manufacturer_name,
"model": node.node_product_name,
}
if node_firmware is not None:
device_info["sw_version"] = node_firmware.value
# device with multiple instances is split up into virtual devices for each instance
if node_instance > 1:
parent_dev_id = create_device_id(node)
device_info["name"] += f" - Instance {node_instance}"
device_info["via_device"] = (DOMAIN, parent_dev_id)
return device_info
@property
def extra_state_attributes(self):
"""Return the device specific state attributes."""
return {const.ATTR_NODE_ID: self.values.primary.node.node_id}
@property
def name(self):
"""Return the name of the entity."""
node = self.values.primary.node
return f"{create_device_name(node)}: {self.values.primary.label}"
@property
def unique_id(self):
"""Return the unique_id of the entity."""
return self.values.values_id
@property
def available(self) -> bool:
"""Return entity availability."""
# Use OZW Daemon status for availability.
instance_status = self.values.primary.ozw_instance.get_status()
return instance_status and instance_status.status in OZW_READY_STATES_VALUES
@callback
def _value_changed(self, value):
"""Call when a value from ZWaveDeviceEntityValues is changed.
Should not be overridden by subclasses.
"""
if value.value_id_key in (v.value_id_key for v in self.values if v):
self.on_value_update()
self.async_write_ha_state()
@callback
def _value_added(self):
"""Call when a value from ZWaveDeviceEntityValues is added.
Should not be overridden by subclasses.
"""
self.on_value_update()
@callback
def _instance_updated(self, new_status):
"""Call when the instance status changes.
Should not be overridden by subclasses.
"""
self.on_value_update()
self.async_write_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
async def _delete_callback(self, values_id):
"""Remove this entity."""
if not self.values:
return # race condition: delete already requested
if values_id == self.values.values_id:
await self.async_remove(force_remove=True)
def create_device_name(node: OZWNode):
"""Generate sensible (short) default device name from a OZWNode."""
# Prefer custom name set by OZWAdmin if present
if node.node_name:
return node.node_name
# Prefer short devicename from metadata if present
if node.meta_data and node.meta_data.get("Name"):
return node.meta_data["Name"]
# Fallback to productname or devicetype strings
if node.node_product_name:
return node.node_product_name
if node.node_device_type_string:
return node.node_device_type_string
if node.node_specific_string:
return node.node_specific_string
# Last resort: use Node id (should never happen, but just in case)
return f"Node {node.id}"
def create_device_id(node: OZWNode, node_instance: int = 1):
"""Generate unique device_id from a OZWNode."""
ozw_instance = node.parent.id
dev_id = f"{ozw_instance}.{node.node_id}.{node_instance}"
return dev_id
def create_value_id(value: OZWValue):
"""Generate unique value_id from an OZWValue."""
# [OZW_INSTANCE_ID]-[NODE_ID]-[VALUE_ID_KEY]
return f"{value.node.parent.id}-{value.node.id}-{value.value_id_key}"
| {
"content_hash": "7189c0e33210a9c51a79c0c38cafb415",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 91,
"avg_line_length": 35.00330033003301,
"alnum_prop": 0.6183292475957005,
"repo_name": "lukas-hetzenecker/home-assistant",
"id": "d5cafa615df82aa3b86c8af650d48408201b252f",
"size": "10606",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/ozw/entity.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2443"
},
{
"name": "Python",
"bytes": "38023745"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
"""search.py: Django company"""
from appsearch.registry import ModelSearch, search
from .models import Company
__author__ = "Steven Klass"
__date__ = "08/07/2019 21:59"
__copyright__ = "Copyright 2011-2020 Pivotal Energy Solutions. All rights reserved."
__credits__ = [
"Artem Hruzd",
"Steven Klass",
]
class CompanySearch(ModelSearch):
"""
Company search available for all users
"""
display_fields = ("name", "slug", "company_type")
search_fields = ("name", "company_type")
def user_has_perm(self, user):
return True
search.register(Company, CompanySearch)
| {
"content_hash": "5be2e338f98af1559f6f5e389d6b61c2",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 84,
"avg_line_length": 19.677419354838708,
"alnum_prop": 0.6573770491803279,
"repo_name": "pivotal-energy-solutions/django-appsearch",
"id": "a8d50e31b53b1ed750a536519807ed0c45672f12",
"size": "634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo_app/company/search.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "13049"
},
{
"name": "JavaScript",
"bytes": "8225"
},
{
"name": "Python",
"bytes": "78018"
},
{
"name": "Shell",
"bytes": "210"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0017_userprofile_avatar'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='avatar',
field=models.ImageField(default=b'/static/images/avatar.png', upload_to=b'/static/avatars/'),
),
]
| {
"content_hash": "b6805b84493130250309d11459d325e4",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 105,
"avg_line_length": 24.166666666666668,
"alnum_prop": 0.6137931034482759,
"repo_name": "tfiers/arenberg-online",
"id": "d8959a444b5651b2548d982ba91760ce0df9e51d",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "core/migrations/0018_auto_20151223_1651.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "62"
},
{
"name": "CSS",
"bytes": "31305"
},
{
"name": "HTML",
"bytes": "230598"
},
{
"name": "JavaScript",
"bytes": "96170"
},
{
"name": "Python",
"bytes": "178246"
}
],
"symlink_target": ""
} |
class Pessoa():
OLHOS = 2
def __init__(self, nome):
print('Iniciando __init__ de Pessoa')
self.nome=nome
def cumprimentar(this):
return 'Olá, meu nome é %s'%this.nome
def __eq__(self, other):
return self.nome == other.nome
def __hash__(self):
return hash(self.nome)
def __repr__(self):
return 'Pessoa(%s)'%self.nome
pessoa = Pessoa('Renzo')
denis = Pessoa('Denis')
print(pessoa.nome)
print(denis.nome)
print(denis.cumprimentar())
print(denis.cumprimentar())
print(pessoa.cumprimentar())
denis.OLHOS = 3
Pessoa.OLHOS=4
print(Pessoa.OLHOS)
print(pessoa.OLHOS)
print(denis.OLHOS)
del denis.OLHOS
print(denis.OLHOS) | {
"content_hash": "95b6cfddf15accb6cca3118e296eb0b3",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 45,
"avg_line_length": 20.87878787878788,
"alnum_prop": 0.6386066763425254,
"repo_name": "renzon/poo-python",
"id": "c4ab4bec43cddbd0e116884954493d039bf76bf7",
"size": "691",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aula5/pessoa.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12465"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.