commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dccc36a49111cd978add1ea9cd1fe26e6526c69f
|
data/calculate_totals.py
|
data/calculate_totals.py
|
# calculate_totals.py
import json
import datetime
from collections import defaultdict
# http://stackoverflow.com/questions/6999726/how-can-i-convert-a-datetime-object-to-milliseconds-since-epoch-unix-time-in-p
def unix_time(dt):
epoch = datetime.datetime.utcfromtimestamp(0)
delta = dt - epoch
return delta.total_seconds()
def unix_time_millis(dt):
return unix_time(dt) * 1000
event_rsvps = defaultdict(int)
event_counts = defaultdict(int)
json_object = {}
with open('groups.json') as group_json:
group_data = json.load(group_json)
for g in group_data[0]["groups"]:
#print(g)
mfile = "meetup_history_" + g +".json"
#print(mfile)
with open(mfile) as mjson:
dat = json.load(mjson)
for d in dat:
msepoch = d['time']
#print(msepoch, d['yes_rsvp_count'])
x = datetime.date.fromtimestamp(msepoch/1000)
#print(x)
monthdate = datetime.datetime(x.year, x.month, 1)
#print(monthdate)
event_rsvps[monthdate] += d['yes_rsvp_count']
event_counts[monthdate] += 1
for k,v in sorted(event_rsvps.iteritems()):
print(k,v, event_counts[k])
millis = unix_time_millis(k)
#print('%f' % millis)
#print('{0:f}'.format(millis))
print(int(millis))
|
Add a py script to get monthly totals.
|
Add a py script to get monthly totals.
|
Python
|
mit
|
jimbo00000/meetup-attendance-graph
|
Add a py script to get monthly totals.
|
# calculate_totals.py
import json
import datetime
from collections import defaultdict
# http://stackoverflow.com/questions/6999726/how-can-i-convert-a-datetime-object-to-milliseconds-since-epoch-unix-time-in-p
def unix_time(dt):
epoch = datetime.datetime.utcfromtimestamp(0)
delta = dt - epoch
return delta.total_seconds()
def unix_time_millis(dt):
return unix_time(dt) * 1000
event_rsvps = defaultdict(int)
event_counts = defaultdict(int)
json_object = {}
with open('groups.json') as group_json:
group_data = json.load(group_json)
for g in group_data[0]["groups"]:
#print(g)
mfile = "meetup_history_" + g +".json"
#print(mfile)
with open(mfile) as mjson:
dat = json.load(mjson)
for d in dat:
msepoch = d['time']
#print(msepoch, d['yes_rsvp_count'])
x = datetime.date.fromtimestamp(msepoch/1000)
#print(x)
monthdate = datetime.datetime(x.year, x.month, 1)
#print(monthdate)
event_rsvps[monthdate] += d['yes_rsvp_count']
event_counts[monthdate] += 1
for k,v in sorted(event_rsvps.iteritems()):
print(k,v, event_counts[k])
millis = unix_time_millis(k)
#print('%f' % millis)
#print('{0:f}'.format(millis))
print(int(millis))
|
<commit_before><commit_msg>Add a py script to get monthly totals.<commit_after>
|
# calculate_totals.py
import json
import datetime
from collections import defaultdict
# http://stackoverflow.com/questions/6999726/how-can-i-convert-a-datetime-object-to-milliseconds-since-epoch-unix-time-in-p
def unix_time(dt):
epoch = datetime.datetime.utcfromtimestamp(0)
delta = dt - epoch
return delta.total_seconds()
def unix_time_millis(dt):
return unix_time(dt) * 1000
event_rsvps = defaultdict(int)
event_counts = defaultdict(int)
json_object = {}
with open('groups.json') as group_json:
group_data = json.load(group_json)
for g in group_data[0]["groups"]:
#print(g)
mfile = "meetup_history_" + g +".json"
#print(mfile)
with open(mfile) as mjson:
dat = json.load(mjson)
for d in dat:
msepoch = d['time']
#print(msepoch, d['yes_rsvp_count'])
x = datetime.date.fromtimestamp(msepoch/1000)
#print(x)
monthdate = datetime.datetime(x.year, x.month, 1)
#print(monthdate)
event_rsvps[monthdate] += d['yes_rsvp_count']
event_counts[monthdate] += 1
for k,v in sorted(event_rsvps.iteritems()):
print(k,v, event_counts[k])
millis = unix_time_millis(k)
#print('%f' % millis)
#print('{0:f}'.format(millis))
print(int(millis))
|
Add a py script to get monthly totals.# calculate_totals.py
import json
import datetime
from collections import defaultdict
# http://stackoverflow.com/questions/6999726/how-can-i-convert-a-datetime-object-to-milliseconds-since-epoch-unix-time-in-p
def unix_time(dt):
epoch = datetime.datetime.utcfromtimestamp(0)
delta = dt - epoch
return delta.total_seconds()
def unix_time_millis(dt):
return unix_time(dt) * 1000
event_rsvps = defaultdict(int)
event_counts = defaultdict(int)
json_object = {}
with open('groups.json') as group_json:
group_data = json.load(group_json)
for g in group_data[0]["groups"]:
#print(g)
mfile = "meetup_history_" + g +".json"
#print(mfile)
with open(mfile) as mjson:
dat = json.load(mjson)
for d in dat:
msepoch = d['time']
#print(msepoch, d['yes_rsvp_count'])
x = datetime.date.fromtimestamp(msepoch/1000)
#print(x)
monthdate = datetime.datetime(x.year, x.month, 1)
#print(monthdate)
event_rsvps[monthdate] += d['yes_rsvp_count']
event_counts[monthdate] += 1
for k,v in sorted(event_rsvps.iteritems()):
print(k,v, event_counts[k])
millis = unix_time_millis(k)
#print('%f' % millis)
#print('{0:f}'.format(millis))
print(int(millis))
|
<commit_before><commit_msg>Add a py script to get monthly totals.<commit_after># calculate_totals.py
import json
import datetime
from collections import defaultdict
# http://stackoverflow.com/questions/6999726/how-can-i-convert-a-datetime-object-to-milliseconds-since-epoch-unix-time-in-p
def unix_time(dt):
epoch = datetime.datetime.utcfromtimestamp(0)
delta = dt - epoch
return delta.total_seconds()
def unix_time_millis(dt):
return unix_time(dt) * 1000
event_rsvps = defaultdict(int)
event_counts = defaultdict(int)
json_object = {}
with open('groups.json') as group_json:
group_data = json.load(group_json)
for g in group_data[0]["groups"]:
#print(g)
mfile = "meetup_history_" + g +".json"
#print(mfile)
with open(mfile) as mjson:
dat = json.load(mjson)
for d in dat:
msepoch = d['time']
#print(msepoch, d['yes_rsvp_count'])
x = datetime.date.fromtimestamp(msepoch/1000)
#print(x)
monthdate = datetime.datetime(x.year, x.month, 1)
#print(monthdate)
event_rsvps[monthdate] += d['yes_rsvp_count']
event_counts[monthdate] += 1
for k,v in sorted(event_rsvps.iteritems()):
print(k,v, event_counts[k])
millis = unix_time_millis(k)
#print('%f' % millis)
#print('{0:f}'.format(millis))
print(int(millis))
|
|
920e578c7ff192621a982b154f0b22c408667fed
|
main_circle.py
|
main_circle.py
|
#!/usr/bin/python2
import sys
sys.path = ['', 'pyglet-1.1.4'] + sys.path
import math
import pyglet
from pyglet.graphics import vertex_list
from pyglet import gl
from pyglet.window import key
window = pyglet.window.Window(width=800, height=600, resizable=True)
fps_display = pyglet.clock.ClockDisplay()
num_points = 12
num_lines = 10
num_points = 12
num_lines = 12
epsilon = 0.01
lines = [[0 for i in range(num_points)] for i in range(num_lines)]
vlists = [vertex_list(num_points, 'v2f', 'c4B') for i in range(num_lines)]
for vlist in vlists:
for i in range(len(vlist.colors)):
vlist.colors[i] = 255
@window.event
def on_resize(w, h):
print "on resize %d %d" % (w,h)
@window.event
def on_draw():
window.clear()
for vlist in vlists:
vlist.draw(gl.GL_LINE_STRIP)
fps_display.draw()
def update(dt):
size = min(window.width, window.height)
assert len(vlists) == len(lines)
for i in range(len(lines)):
for j in range(len(lines[i])):
lines[i][j] += i*j*dt*epsilon
vlists[i].vertices[2*j] = math.cos(lines[i][j])*size/2 + size/2
vlists[i].vertices[2*j+1] = math.sin(lines[i][j])*size/2 + size/2
assert lines[i][0] == 0.0
@window.event
def on_key_press(sym, mod):
pass
pyglet.clock.schedule_interval(update, 1.0/200.0)
pyglet.app.run()
|
Add magice circle demo app
|
Add magice circle demo app
|
Python
|
mit
|
dragonfi/snowfall,dragonfi/snowfall
|
Add magice circle demo app
|
#!/usr/bin/python2
import sys
sys.path = ['', 'pyglet-1.1.4'] + sys.path
import math
import pyglet
from pyglet.graphics import vertex_list
from pyglet import gl
from pyglet.window import key
window = pyglet.window.Window(width=800, height=600, resizable=True)
fps_display = pyglet.clock.ClockDisplay()
num_points = 12
num_lines = 10
num_points = 12
num_lines = 12
epsilon = 0.01
lines = [[0 for i in range(num_points)] for i in range(num_lines)]
vlists = [vertex_list(num_points, 'v2f', 'c4B') for i in range(num_lines)]
for vlist in vlists:
for i in range(len(vlist.colors)):
vlist.colors[i] = 255
@window.event
def on_resize(w, h):
print "on resize %d %d" % (w,h)
@window.event
def on_draw():
window.clear()
for vlist in vlists:
vlist.draw(gl.GL_LINE_STRIP)
fps_display.draw()
def update(dt):
size = min(window.width, window.height)
assert len(vlists) == len(lines)
for i in range(len(lines)):
for j in range(len(lines[i])):
lines[i][j] += i*j*dt*epsilon
vlists[i].vertices[2*j] = math.cos(lines[i][j])*size/2 + size/2
vlists[i].vertices[2*j+1] = math.sin(lines[i][j])*size/2 + size/2
assert lines[i][0] == 0.0
@window.event
def on_key_press(sym, mod):
pass
pyglet.clock.schedule_interval(update, 1.0/200.0)
pyglet.app.run()
|
<commit_before><commit_msg>Add magice circle demo app<commit_after>
|
#!/usr/bin/python2
import sys
sys.path = ['', 'pyglet-1.1.4'] + sys.path
import math
import pyglet
from pyglet.graphics import vertex_list
from pyglet import gl
from pyglet.window import key
window = pyglet.window.Window(width=800, height=600, resizable=True)
fps_display = pyglet.clock.ClockDisplay()
num_points = 12
num_lines = 10
num_points = 12
num_lines = 12
epsilon = 0.01
lines = [[0 for i in range(num_points)] for i in range(num_lines)]
vlists = [vertex_list(num_points, 'v2f', 'c4B') for i in range(num_lines)]
for vlist in vlists:
for i in range(len(vlist.colors)):
vlist.colors[i] = 255
@window.event
def on_resize(w, h):
print "on resize %d %d" % (w,h)
@window.event
def on_draw():
window.clear()
for vlist in vlists:
vlist.draw(gl.GL_LINE_STRIP)
fps_display.draw()
def update(dt):
size = min(window.width, window.height)
assert len(vlists) == len(lines)
for i in range(len(lines)):
for j in range(len(lines[i])):
lines[i][j] += i*j*dt*epsilon
vlists[i].vertices[2*j] = math.cos(lines[i][j])*size/2 + size/2
vlists[i].vertices[2*j+1] = math.sin(lines[i][j])*size/2 + size/2
assert lines[i][0] == 0.0
@window.event
def on_key_press(sym, mod):
pass
pyglet.clock.schedule_interval(update, 1.0/200.0)
pyglet.app.run()
|
Add magice circle demo app#!/usr/bin/python2
import sys
sys.path = ['', 'pyglet-1.1.4'] + sys.path
import math
import pyglet
from pyglet.graphics import vertex_list
from pyglet import gl
from pyglet.window import key
window = pyglet.window.Window(width=800, height=600, resizable=True)
fps_display = pyglet.clock.ClockDisplay()
num_points = 12
num_lines = 10
num_points = 12
num_lines = 12
epsilon = 0.01
lines = [[0 for i in range(num_points)] for i in range(num_lines)]
vlists = [vertex_list(num_points, 'v2f', 'c4B') for i in range(num_lines)]
for vlist in vlists:
for i in range(len(vlist.colors)):
vlist.colors[i] = 255
@window.event
def on_resize(w, h):
print "on resize %d %d" % (w,h)
@window.event
def on_draw():
window.clear()
for vlist in vlists:
vlist.draw(gl.GL_LINE_STRIP)
fps_display.draw()
def update(dt):
size = min(window.width, window.height)
assert len(vlists) == len(lines)
for i in range(len(lines)):
for j in range(len(lines[i])):
lines[i][j] += i*j*dt*epsilon
vlists[i].vertices[2*j] = math.cos(lines[i][j])*size/2 + size/2
vlists[i].vertices[2*j+1] = math.sin(lines[i][j])*size/2 + size/2
assert lines[i][0] == 0.0
@window.event
def on_key_press(sym, mod):
pass
pyglet.clock.schedule_interval(update, 1.0/200.0)
pyglet.app.run()
|
<commit_before><commit_msg>Add magice circle demo app<commit_after>#!/usr/bin/python2
import sys
sys.path = ['', 'pyglet-1.1.4'] + sys.path
import math
import pyglet
from pyglet.graphics import vertex_list
from pyglet import gl
from pyglet.window import key
window = pyglet.window.Window(width=800, height=600, resizable=True)
fps_display = pyglet.clock.ClockDisplay()
num_points = 12
num_lines = 10
num_points = 12
num_lines = 12
epsilon = 0.01
lines = [[0 for i in range(num_points)] for i in range(num_lines)]
vlists = [vertex_list(num_points, 'v2f', 'c4B') for i in range(num_lines)]
for vlist in vlists:
for i in range(len(vlist.colors)):
vlist.colors[i] = 255
@window.event
def on_resize(w, h):
print "on resize %d %d" % (w,h)
@window.event
def on_draw():
window.clear()
for vlist in vlists:
vlist.draw(gl.GL_LINE_STRIP)
fps_display.draw()
def update(dt):
size = min(window.width, window.height)
assert len(vlists) == len(lines)
for i in range(len(lines)):
for j in range(len(lines[i])):
lines[i][j] += i*j*dt*epsilon
vlists[i].vertices[2*j] = math.cos(lines[i][j])*size/2 + size/2
vlists[i].vertices[2*j+1] = math.sin(lines[i][j])*size/2 + size/2
assert lines[i][0] == 0.0
@window.event
def on_key_press(sym, mod):
pass
pyglet.clock.schedule_interval(update, 1.0/200.0)
pyglet.app.run()
|
|
243744ce6f692b524c4cf65508f2aecba8a035d2
|
nodes/transform_stamped_to_tf2.py
|
nodes/transform_stamped_to_tf2.py
|
#!/usr/bin/env python
import rospy
import tf2_ros
from geometry_msgs.msg import TransformStamped
class TransformBroadcaster(object):
'''This is hack to avoid tf2_ros on python3
'''
def __init__(self):
self._sub = rospy.Subscriber('transforms', TransformStamped,
self.broadcast_tf)
self._br = tf2_ros.TransformBroadcaster()
def broadcast_tf(self, transform_msg):
self._br.sendTransform(transform_msg)
if __name__ == '__main__':
rospy.init_node('transform_stamped_to_tf2')
br = TransformBroadcaster()
rospy.spin()
|
Add node to use tf2
|
Add node to use tf2
|
Python
|
apache-2.0
|
sem23/cozmo_driver,OTL/cozmo_driver
|
Add node to use tf2
|
#!/usr/bin/env python
import rospy
import tf2_ros
from geometry_msgs.msg import TransformStamped
class TransformBroadcaster(object):
'''This is hack to avoid tf2_ros on python3
'''
def __init__(self):
self._sub = rospy.Subscriber('transforms', TransformStamped,
self.broadcast_tf)
self._br = tf2_ros.TransformBroadcaster()
def broadcast_tf(self, transform_msg):
self._br.sendTransform(transform_msg)
if __name__ == '__main__':
rospy.init_node('transform_stamped_to_tf2')
br = TransformBroadcaster()
rospy.spin()
|
<commit_before><commit_msg>Add node to use tf2<commit_after>
|
#!/usr/bin/env python
import rospy
import tf2_ros
from geometry_msgs.msg import TransformStamped
class TransformBroadcaster(object):
'''This is hack to avoid tf2_ros on python3
'''
def __init__(self):
self._sub = rospy.Subscriber('transforms', TransformStamped,
self.broadcast_tf)
self._br = tf2_ros.TransformBroadcaster()
def broadcast_tf(self, transform_msg):
self._br.sendTransform(transform_msg)
if __name__ == '__main__':
rospy.init_node('transform_stamped_to_tf2')
br = TransformBroadcaster()
rospy.spin()
|
Add node to use tf2#!/usr/bin/env python
import rospy
import tf2_ros
from geometry_msgs.msg import TransformStamped
class TransformBroadcaster(object):
'''This is hack to avoid tf2_ros on python3
'''
def __init__(self):
self._sub = rospy.Subscriber('transforms', TransformStamped,
self.broadcast_tf)
self._br = tf2_ros.TransformBroadcaster()
def broadcast_tf(self, transform_msg):
self._br.sendTransform(transform_msg)
if __name__ == '__main__':
rospy.init_node('transform_stamped_to_tf2')
br = TransformBroadcaster()
rospy.spin()
|
<commit_before><commit_msg>Add node to use tf2<commit_after>#!/usr/bin/env python
import rospy
import tf2_ros
from geometry_msgs.msg import TransformStamped
class TransformBroadcaster(object):
'''This is hack to avoid tf2_ros on python3
'''
def __init__(self):
self._sub = rospy.Subscriber('transforms', TransformStamped,
self.broadcast_tf)
self._br = tf2_ros.TransformBroadcaster()
def broadcast_tf(self, transform_msg):
self._br.sendTransform(transform_msg)
if __name__ == '__main__':
rospy.init_node('transform_stamped_to_tf2')
br = TransformBroadcaster()
rospy.spin()
|
|
1ec07fa16d3be0796b7783fe3b5a3ba0728fb7bc
|
prototype/speed/rpcsize.py
|
prototype/speed/rpcsize.py
|
#!/usr/bin/env python
from pyon.net.endpoint import RPCClient
#from interface.services.idatastore_service import IDatastoreService
from interface.services.ihello_service import IHelloService
from pyon.net.messaging import make_node
import gevent
import time
import base64
import os
import argparse
import msgpack
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--datasize', type=int, help='Maximum size of data in bytes')
parser.add_argument('-m', '--msgpack', action='store_true', help='Encode data with msgpack')
parser.set_defaults(datasize=1024*1024, parallel=1)
opts = parser.parse_args()
node,iowat=make_node()
#dsclient = RPCClient(node=node, name="datastore", iface=IDatastoreService)
hsclient = RPCClient(node=node, name="hello", iface=IHelloService)
def notif(*args, **kwargs):
print "GOT A BACKPRESSURE NOTICE", str(args), str(kwargs)
#node.client.add_backpressure_callback(notif)
# make data (bytes)
DATA_SIZE = opts.datasize
# base64 encoding wastes a lot of space, truncate it at the exact data size we requested
data = base64.urlsafe_b64encode(os.urandom(DATA_SIZE))[:DATA_SIZE]
if opts.msgpack:
data = msgpack.dumps(data)
counter = 0
st = 0
def tick():
global counter, st
while True:
time.sleep(2)
ct = time.time()
elapsed_s = ct - st
sc = sum(counter)
mps = sc / elapsed_s
print counter, sc, "requests, per sec:", mps
def work(ds):
curdata = data[:ds]
global counter
global st
counter = 0
st = time.time()
while counter < 1000:
hsclient.noop(curdata)
#hsclient.hello(str(counter[wid]))
counter += 1
et = time.time()
return et - st
#_gt = gevent.spawn(tick)
results = {}
for size in [1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144]:
_gl = gevent.spawn(work, size)
try:
rs = _gl.get(timeout=10)
except gevent.Timeout:
print "10s elapsed, cutting it"
rs = time.time() - st
results[size] = { "elapsed": rs, "count": counter, "ps":counter/rs }
print "Size:", size, str(results[size])
import pprint
pprint.pprint(results)
|
Add more direct RPC size tests
|
Add more direct RPC size tests
|
Python
|
bsd-2-clause
|
mkl-/scioncc,ooici/pyon,scionrep/scioncc,scionrep/scioncc,mkl-/scioncc,crchemist/scioncc,scionrep/scioncc,mkl-/scioncc,crchemist/scioncc,ooici/pyon,crchemist/scioncc
|
Add more direct RPC size tests
|
#!/usr/bin/env python
from pyon.net.endpoint import RPCClient
#from interface.services.idatastore_service import IDatastoreService
from interface.services.ihello_service import IHelloService
from pyon.net.messaging import make_node
import gevent
import time
import base64
import os
import argparse
import msgpack
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--datasize', type=int, help='Maximum size of data in bytes')
parser.add_argument('-m', '--msgpack', action='store_true', help='Encode data with msgpack')
parser.set_defaults(datasize=1024*1024, parallel=1)
opts = parser.parse_args()
node,iowat=make_node()
#dsclient = RPCClient(node=node, name="datastore", iface=IDatastoreService)
hsclient = RPCClient(node=node, name="hello", iface=IHelloService)
def notif(*args, **kwargs):
print "GOT A BACKPRESSURE NOTICE", str(args), str(kwargs)
#node.client.add_backpressure_callback(notif)
# make data (bytes)
DATA_SIZE = opts.datasize
# base64 encoding wastes a lot of space, truncate it at the exact data size we requested
data = base64.urlsafe_b64encode(os.urandom(DATA_SIZE))[:DATA_SIZE]
if opts.msgpack:
data = msgpack.dumps(data)
counter = 0
st = 0
def tick():
global counter, st
while True:
time.sleep(2)
ct = time.time()
elapsed_s = ct - st
sc = sum(counter)
mps = sc / elapsed_s
print counter, sc, "requests, per sec:", mps
def work(ds):
curdata = data[:ds]
global counter
global st
counter = 0
st = time.time()
while counter < 1000:
hsclient.noop(curdata)
#hsclient.hello(str(counter[wid]))
counter += 1
et = time.time()
return et - st
#_gt = gevent.spawn(tick)
results = {}
for size in [1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144]:
_gl = gevent.spawn(work, size)
try:
rs = _gl.get(timeout=10)
except gevent.Timeout:
print "10s elapsed, cutting it"
rs = time.time() - st
results[size] = { "elapsed": rs, "count": counter, "ps":counter/rs }
print "Size:", size, str(results[size])
import pprint
pprint.pprint(results)
|
<commit_before><commit_msg>Add more direct RPC size tests<commit_after>
|
#!/usr/bin/env python
from pyon.net.endpoint import RPCClient
#from interface.services.idatastore_service import IDatastoreService
from interface.services.ihello_service import IHelloService
from pyon.net.messaging import make_node
import gevent
import time
import base64
import os
import argparse
import msgpack
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--datasize', type=int, help='Maximum size of data in bytes')
parser.add_argument('-m', '--msgpack', action='store_true', help='Encode data with msgpack')
parser.set_defaults(datasize=1024*1024, parallel=1)
opts = parser.parse_args()
node,iowat=make_node()
#dsclient = RPCClient(node=node, name="datastore", iface=IDatastoreService)
hsclient = RPCClient(node=node, name="hello", iface=IHelloService)
def notif(*args, **kwargs):
print "GOT A BACKPRESSURE NOTICE", str(args), str(kwargs)
#node.client.add_backpressure_callback(notif)
# make data (bytes)
DATA_SIZE = opts.datasize
# base64 encoding wastes a lot of space, truncate it at the exact data size we requested
data = base64.urlsafe_b64encode(os.urandom(DATA_SIZE))[:DATA_SIZE]
if opts.msgpack:
data = msgpack.dumps(data)
counter = 0
st = 0
def tick():
global counter, st
while True:
time.sleep(2)
ct = time.time()
elapsed_s = ct - st
sc = sum(counter)
mps = sc / elapsed_s
print counter, sc, "requests, per sec:", mps
def work(ds):
curdata = data[:ds]
global counter
global st
counter = 0
st = time.time()
while counter < 1000:
hsclient.noop(curdata)
#hsclient.hello(str(counter[wid]))
counter += 1
et = time.time()
return et - st
#_gt = gevent.spawn(tick)
results = {}
for size in [1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144]:
_gl = gevent.spawn(work, size)
try:
rs = _gl.get(timeout=10)
except gevent.Timeout:
print "10s elapsed, cutting it"
rs = time.time() - st
results[size] = { "elapsed": rs, "count": counter, "ps":counter/rs }
print "Size:", size, str(results[size])
import pprint
pprint.pprint(results)
|
Add more direct RPC size tests#!/usr/bin/env python
from pyon.net.endpoint import RPCClient
#from interface.services.idatastore_service import IDatastoreService
from interface.services.ihello_service import IHelloService
from pyon.net.messaging import make_node
import gevent
import time
import base64
import os
import argparse
import msgpack
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--datasize', type=int, help='Maximum size of data in bytes')
parser.add_argument('-m', '--msgpack', action='store_true', help='Encode data with msgpack')
parser.set_defaults(datasize=1024*1024, parallel=1)
opts = parser.parse_args()
node,iowat=make_node()
#dsclient = RPCClient(node=node, name="datastore", iface=IDatastoreService)
hsclient = RPCClient(node=node, name="hello", iface=IHelloService)
def notif(*args, **kwargs):
print "GOT A BACKPRESSURE NOTICE", str(args), str(kwargs)
#node.client.add_backpressure_callback(notif)
# make data (bytes)
DATA_SIZE = opts.datasize
# base64 encoding wastes a lot of space, truncate it at the exact data size we requested
data = base64.urlsafe_b64encode(os.urandom(DATA_SIZE))[:DATA_SIZE]
if opts.msgpack:
data = msgpack.dumps(data)
counter = 0
st = 0
def tick():
global counter, st
while True:
time.sleep(2)
ct = time.time()
elapsed_s = ct - st
sc = sum(counter)
mps = sc / elapsed_s
print counter, sc, "requests, per sec:", mps
def work(ds):
curdata = data[:ds]
global counter
global st
counter = 0
st = time.time()
while counter < 1000:
hsclient.noop(curdata)
#hsclient.hello(str(counter[wid]))
counter += 1
et = time.time()
return et - st
#_gt = gevent.spawn(tick)
results = {}
for size in [1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144]:
_gl = gevent.spawn(work, size)
try:
rs = _gl.get(timeout=10)
except gevent.Timeout:
print "10s elapsed, cutting it"
rs = time.time() - st
results[size] = { "elapsed": rs, "count": counter, "ps":counter/rs }
print "Size:", size, str(results[size])
import pprint
pprint.pprint(results)
|
<commit_before><commit_msg>Add more direct RPC size tests<commit_after>#!/usr/bin/env python
from pyon.net.endpoint import RPCClient
#from interface.services.idatastore_service import IDatastoreService
from interface.services.ihello_service import IHelloService
from pyon.net.messaging import make_node
import gevent
import time
import base64
import os
import argparse
import msgpack
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--datasize', type=int, help='Maximum size of data in bytes')
parser.add_argument('-m', '--msgpack', action='store_true', help='Encode data with msgpack')
parser.set_defaults(datasize=1024*1024, parallel=1)
opts = parser.parse_args()
node,iowat=make_node()
#dsclient = RPCClient(node=node, name="datastore", iface=IDatastoreService)
hsclient = RPCClient(node=node, name="hello", iface=IHelloService)
def notif(*args, **kwargs):
print "GOT A BACKPRESSURE NOTICE", str(args), str(kwargs)
#node.client.add_backpressure_callback(notif)
# make data (bytes)
DATA_SIZE = opts.datasize
# base64 encoding wastes a lot of space, truncate it at the exact data size we requested
data = base64.urlsafe_b64encode(os.urandom(DATA_SIZE))[:DATA_SIZE]
if opts.msgpack:
data = msgpack.dumps(data)
counter = 0
st = 0
def tick():
global counter, st
while True:
time.sleep(2)
ct = time.time()
elapsed_s = ct - st
sc = sum(counter)
mps = sc / elapsed_s
print counter, sc, "requests, per sec:", mps
def work(ds):
curdata = data[:ds]
global counter
global st
counter = 0
st = time.time()
while counter < 1000:
hsclient.noop(curdata)
#hsclient.hello(str(counter[wid]))
counter += 1
et = time.time()
return et - st
#_gt = gevent.spawn(tick)
results = {}
for size in [1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144]:
_gl = gevent.spawn(work, size)
try:
rs = _gl.get(timeout=10)
except gevent.Timeout:
print "10s elapsed, cutting it"
rs = time.time() - st
results[size] = { "elapsed": rs, "count": counter, "ps":counter/rs }
print "Size:", size, str(results[size])
import pprint
pprint.pprint(results)
|
|
2c071f2d8cdcf92f0d383422017ddc2a24189f68
|
scripts/add_new_plugin.py
|
scripts/add_new_plugin.py
|
import os
import click
@click.command()
@click.option(
'--matcher',
type=click.Choice(['url', 'body', 'header']),
required=True,
help='Set the matcher type.',
)
@click.option(
'--value',
type=str,
required=True,
help='Set the matcher value.',
)
@click.option(
'--category',
type=click.Choice(['frontend', 'backend', 'wordpress', 'infraestructure']),
required=True,
help='Set plugin category.',
)
@click.argument('name')
def main(name, category, matcher, value):
directory = os.path.dirname(__file__)
if matcher == 'header':
value = tuple(value.split(','))
create_plugin_file(directory, name, category, matcher, value)
create_test_file(directory, name, matcher)
def create_plugin_file(directory, name, category, matcher, value):
plugin_template = '''
from detectem.plugin import Plugin
class {title}Plugin(Plugin):
name = '{name}'
matchers = [
{{'{matcher}': '{value}'}},
]
'''.format(name=name, title=name.title(), matcher=matcher, value=value).lstrip()
plugin_filename = name + '.py'
plugin_filepath = os.path.abspath(
os.path.join(directory, '..', 'detectem', 'plugins', category, plugin_filename)
)
if os.path.exists(plugin_filepath):
raise FileExistsError('Plugin file already exists.')
with open(plugin_filepath, mode='w') as f:
f.write(plugin_template)
print('Created plugin file at {}'.format(plugin_filepath))
def create_test_file(directory, name, matcher):
test_template = '''
- plugin: {name}
matches:
- {matcher}:
version:
'''.format(name=name, matcher=matcher).lstrip()
test_filename = name + '.yml'
test_filepath = os.path.abspath(
os.path.join(directory, '..', 'tests', 'plugins', 'fixtures', test_filename)
)
if os.path.exists(test_filepath):
raise FileExistsError('Test file already exists.')
with open(test_filepath, mode='w') as f:
f.write(test_template)
print('Created test file at {}'.format(test_filepath))
if __name__ == "__main__":
main()
|
Add script to create plugins faster
|
Add script to create plugins faster
|
Python
|
mit
|
spectresearch/detectem
|
Add script to create plugins faster
|
import os
import click
@click.command()
@click.option(
'--matcher',
type=click.Choice(['url', 'body', 'header']),
required=True,
help='Set the matcher type.',
)
@click.option(
'--value',
type=str,
required=True,
help='Set the matcher value.',
)
@click.option(
'--category',
type=click.Choice(['frontend', 'backend', 'wordpress', 'infraestructure']),
required=True,
help='Set plugin category.',
)
@click.argument('name')
def main(name, category, matcher, value):
directory = os.path.dirname(__file__)
if matcher == 'header':
value = tuple(value.split(','))
create_plugin_file(directory, name, category, matcher, value)
create_test_file(directory, name, matcher)
def create_plugin_file(directory, name, category, matcher, value):
plugin_template = '''
from detectem.plugin import Plugin
class {title}Plugin(Plugin):
name = '{name}'
matchers = [
{{'{matcher}': '{value}'}},
]
'''.format(name=name, title=name.title(), matcher=matcher, value=value).lstrip()
plugin_filename = name + '.py'
plugin_filepath = os.path.abspath(
os.path.join(directory, '..', 'detectem', 'plugins', category, plugin_filename)
)
if os.path.exists(plugin_filepath):
raise FileExistsError('Plugin file already exists.')
with open(plugin_filepath, mode='w') as f:
f.write(plugin_template)
print('Created plugin file at {}'.format(plugin_filepath))
def create_test_file(directory, name, matcher):
test_template = '''
- plugin: {name}
matches:
- {matcher}:
version:
'''.format(name=name, matcher=matcher).lstrip()
test_filename = name + '.yml'
test_filepath = os.path.abspath(
os.path.join(directory, '..', 'tests', 'plugins', 'fixtures', test_filename)
)
if os.path.exists(test_filepath):
raise FileExistsError('Test file already exists.')
with open(test_filepath, mode='w') as f:
f.write(test_template)
print('Created test file at {}'.format(test_filepath))
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to create plugins faster<commit_after>
|
import os
import click
@click.command()
@click.option(
'--matcher',
type=click.Choice(['url', 'body', 'header']),
required=True,
help='Set the matcher type.',
)
@click.option(
'--value',
type=str,
required=True,
help='Set the matcher value.',
)
@click.option(
'--category',
type=click.Choice(['frontend', 'backend', 'wordpress', 'infraestructure']),
required=True,
help='Set plugin category.',
)
@click.argument('name')
def main(name, category, matcher, value):
directory = os.path.dirname(__file__)
if matcher == 'header':
value = tuple(value.split(','))
create_plugin_file(directory, name, category, matcher, value)
create_test_file(directory, name, matcher)
def create_plugin_file(directory, name, category, matcher, value):
plugin_template = '''
from detectem.plugin import Plugin
class {title}Plugin(Plugin):
name = '{name}'
matchers = [
{{'{matcher}': '{value}'}},
]
'''.format(name=name, title=name.title(), matcher=matcher, value=value).lstrip()
plugin_filename = name + '.py'
plugin_filepath = os.path.abspath(
os.path.join(directory, '..', 'detectem', 'plugins', category, plugin_filename)
)
if os.path.exists(plugin_filepath):
raise FileExistsError('Plugin file already exists.')
with open(plugin_filepath, mode='w') as f:
f.write(plugin_template)
print('Created plugin file at {}'.format(plugin_filepath))
def create_test_file(directory, name, matcher):
test_template = '''
- plugin: {name}
matches:
- {matcher}:
version:
'''.format(name=name, matcher=matcher).lstrip()
test_filename = name + '.yml'
test_filepath = os.path.abspath(
os.path.join(directory, '..', 'tests', 'plugins', 'fixtures', test_filename)
)
if os.path.exists(test_filepath):
raise FileExistsError('Test file already exists.')
with open(test_filepath, mode='w') as f:
f.write(test_template)
print('Created test file at {}'.format(test_filepath))
if __name__ == "__main__":
main()
|
Add script to create plugins fasterimport os
import click
@click.command()
@click.option(
'--matcher',
type=click.Choice(['url', 'body', 'header']),
required=True,
help='Set the matcher type.',
)
@click.option(
'--value',
type=str,
required=True,
help='Set the matcher value.',
)
@click.option(
'--category',
type=click.Choice(['frontend', 'backend', 'wordpress', 'infraestructure']),
required=True,
help='Set plugin category.',
)
@click.argument('name')
def main(name, category, matcher, value):
directory = os.path.dirname(__file__)
if matcher == 'header':
value = tuple(value.split(','))
create_plugin_file(directory, name, category, matcher, value)
create_test_file(directory, name, matcher)
def create_plugin_file(directory, name, category, matcher, value):
plugin_template = '''
from detectem.plugin import Plugin
class {title}Plugin(Plugin):
name = '{name}'
matchers = [
{{'{matcher}': '{value}'}},
]
'''.format(name=name, title=name.title(), matcher=matcher, value=value).lstrip()
plugin_filename = name + '.py'
plugin_filepath = os.path.abspath(
os.path.join(directory, '..', 'detectem', 'plugins', category, plugin_filename)
)
if os.path.exists(plugin_filepath):
raise FileExistsError('Plugin file already exists.')
with open(plugin_filepath, mode='w') as f:
f.write(plugin_template)
print('Created plugin file at {}'.format(plugin_filepath))
def create_test_file(directory, name, matcher):
test_template = '''
- plugin: {name}
matches:
- {matcher}:
version:
'''.format(name=name, matcher=matcher).lstrip()
test_filename = name + '.yml'
test_filepath = os.path.abspath(
os.path.join(directory, '..', 'tests', 'plugins', 'fixtures', test_filename)
)
if os.path.exists(test_filepath):
raise FileExistsError('Test file already exists.')
with open(test_filepath, mode='w') as f:
f.write(test_template)
print('Created test file at {}'.format(test_filepath))
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to create plugins faster<commit_after>import os
import click
@click.command()
@click.option(
'--matcher',
type=click.Choice(['url', 'body', 'header']),
required=True,
help='Set the matcher type.',
)
@click.option(
'--value',
type=str,
required=True,
help='Set the matcher value.',
)
@click.option(
'--category',
type=click.Choice(['frontend', 'backend', 'wordpress', 'infraestructure']),
required=True,
help='Set plugin category.',
)
@click.argument('name')
def main(name, category, matcher, value):
directory = os.path.dirname(__file__)
if matcher == 'header':
value = tuple(value.split(','))
create_plugin_file(directory, name, category, matcher, value)
create_test_file(directory, name, matcher)
def create_plugin_file(directory, name, category, matcher, value):
plugin_template = '''
from detectem.plugin import Plugin
class {title}Plugin(Plugin):
name = '{name}'
matchers = [
{{'{matcher}': '{value}'}},
]
'''.format(name=name, title=name.title(), matcher=matcher, value=value).lstrip()
plugin_filename = name + '.py'
plugin_filepath = os.path.abspath(
os.path.join(directory, '..', 'detectem', 'plugins', category, plugin_filename)
)
if os.path.exists(plugin_filepath):
raise FileExistsError('Plugin file already exists.')
with open(plugin_filepath, mode='w') as f:
f.write(plugin_template)
print('Created plugin file at {}'.format(plugin_filepath))
def create_test_file(directory, name, matcher):
test_template = '''
- plugin: {name}
matches:
- {matcher}:
version:
'''.format(name=name, matcher=matcher).lstrip()
test_filename = name + '.yml'
test_filepath = os.path.abspath(
os.path.join(directory, '..', 'tests', 'plugins', 'fixtures', test_filename)
)
if os.path.exists(test_filepath):
raise FileExistsError('Test file already exists.')
with open(test_filepath, mode='w') as f:
f.write(test_template)
print('Created test file at {}'.format(test_filepath))
if __name__ == "__main__":
main()
|
|
cb1a0aa689f9f86ba576f9637a8548cfaeaf0439
|
tests/src/test_data_seeder.py
|
tests/src/test_data_seeder.py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2012 Cidadania S. Coop. Galega
#
# This file is part of e-cidadania.
#
# e-cidadania is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# e-cidadania is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with e-cidadania. If not, see <http://www.gnu.org/licenses/>.
import unittest
from tests.data_seeder import seeder
from core.spaces.models import Space
from django.contrib.auth.models import User
class TestDataSeeder(unittest.TestCase):
"""Tests the DataSeeder class methods.
"""
def testInstanceIsCreated(self):
"""Tests if the correct instance of a model is generated.
"""
created_model = seeder.seed(Space)
self.assertTrue(isinstance(created_model, Space))
def testCorrectNumberOfInstancesAreGenerated(self):
"""Tests if correct number of model instances are generated.
"""
count = 5
actual_list = seeder.seedn(count, Space)
self.assertEqual(len(actual_list), count)
def testIfInstanceIsGeneratedWithRequiredAttributes(self):
"""Tests if the generated instance has the desired properties.
"""
properties = {
'name': 'Test Space',
'description': 'Temporary Description',
'public': 'False',
}
instance = seeder.seed(Space, model_properties=properties)
self.assertEqual(instance.name, properties['name'])
self.assertEqual(instance.description, properties['description'])
self.assertEqual(instance.public, properties['public'])
#Space.author is a Foreign Key. Since generate_fk is False by default,
#Space.author should be None as it will not be populated.
self.assertEqual(instance.author, None)
self.assertFalse(isinstance(instance.author, User))
def testIfForeignKeyFieldsOfaModelIsPopulated(self):
"""Tests if the foreign key fields of a model is populated if
generate_fk is set to True
"""
instance = seeder.seed(Space)
self.assertEqual(instance.author, None)
instance = seeder.seed(Space, generate_fk=True)
self.assertTrue(isinstance(instance.author, User))
User.objects.all().delete()
|
Add tests for the data seeder.
|
Add tests for the data seeder.
|
Python
|
apache-2.0
|
cidadania/e-cidadania,cidadania/e-cidadania
|
Add tests for the data seeder.
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2012 Cidadania S. Coop. Galega
#
# This file is part of e-cidadania.
#
# e-cidadania is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# e-cidadania is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with e-cidadania. If not, see <http://www.gnu.org/licenses/>.
import unittest
from tests.data_seeder import seeder
from core.spaces.models import Space
from django.contrib.auth.models import User
class TestDataSeeder(unittest.TestCase):
"""Tests the DataSeeder class methods.
"""
def testInstanceIsCreated(self):
"""Tests if the correct instance of a model is generated.
"""
created_model = seeder.seed(Space)
self.assertTrue(isinstance(created_model, Space))
def testCorrectNumberOfInstancesAreGenerated(self):
"""Tests if correct number of model instances are generated.
"""
count = 5
actual_list = seeder.seedn(count, Space)
self.assertEqual(len(actual_list), count)
def testIfInstanceIsGeneratedWithRequiredAttributes(self):
"""Tests if the generated instance has the desired properties.
"""
properties = {
'name': 'Test Space',
'description': 'Temporary Description',
'public': 'False',
}
instance = seeder.seed(Space, model_properties=properties)
self.assertEqual(instance.name, properties['name'])
self.assertEqual(instance.description, properties['description'])
self.assertEqual(instance.public, properties['public'])
#Space.author is a Foreign Key. Since generate_fk is False by default,
#Space.author should be None as it will not be populated.
self.assertEqual(instance.author, None)
self.assertFalse(isinstance(instance.author, User))
def testIfForeignKeyFieldsOfaModelIsPopulated(self):
"""Tests if the foreign key fields of a model is populated if
generate_fk is set to True
"""
instance = seeder.seed(Space)
self.assertEqual(instance.author, None)
instance = seeder.seed(Space, generate_fk=True)
self.assertTrue(isinstance(instance.author, User))
User.objects.all().delete()
|
<commit_before><commit_msg>Add tests for the data seeder.<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2012 Cidadania S. Coop. Galega
#
# This file is part of e-cidadania.
#
# e-cidadania is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# e-cidadania is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with e-cidadania. If not, see <http://www.gnu.org/licenses/>.
import unittest
from tests.data_seeder import seeder
from core.spaces.models import Space
from django.contrib.auth.models import User
class TestDataSeeder(unittest.TestCase):
"""Tests the DataSeeder class methods.
"""
def testInstanceIsCreated(self):
"""Tests if the correct instance of a model is generated.
"""
created_model = seeder.seed(Space)
self.assertTrue(isinstance(created_model, Space))
def testCorrectNumberOfInstancesAreGenerated(self):
"""Tests if correct number of model instances are generated.
"""
count = 5
actual_list = seeder.seedn(count, Space)
self.assertEqual(len(actual_list), count)
def testIfInstanceIsGeneratedWithRequiredAttributes(self):
"""Tests if the generated instance has the desired properties.
"""
properties = {
'name': 'Test Space',
'description': 'Temporary Description',
'public': 'False',
}
instance = seeder.seed(Space, model_properties=properties)
self.assertEqual(instance.name, properties['name'])
self.assertEqual(instance.description, properties['description'])
self.assertEqual(instance.public, properties['public'])
#Space.author is a Foreign Key. Since generate_fk is False by default,
#Space.author should be None as it will not be populated.
self.assertEqual(instance.author, None)
self.assertFalse(isinstance(instance.author, User))
def testIfForeignKeyFieldsOfaModelIsPopulated(self):
"""Tests if the foreign key fields of a model is populated if
generate_fk is set to True
"""
instance = seeder.seed(Space)
self.assertEqual(instance.author, None)
instance = seeder.seed(Space, generate_fk=True)
self.assertTrue(isinstance(instance.author, User))
User.objects.all().delete()
|
Add tests for the data seeder.# -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2012 Cidadania S. Coop. Galega
#
# This file is part of e-cidadania.
#
# e-cidadania is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# e-cidadania is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with e-cidadania. If not, see <http://www.gnu.org/licenses/>.
import unittest
from tests.data_seeder import seeder
from core.spaces.models import Space
from django.contrib.auth.models import User
class TestDataSeeder(unittest.TestCase):
"""Tests the DataSeeder class methods.
"""
def testInstanceIsCreated(self):
"""Tests if the correct instance of a model is generated.
"""
created_model = seeder.seed(Space)
self.assertTrue(isinstance(created_model, Space))
def testCorrectNumberOfInstancesAreGenerated(self):
"""Tests if correct number of model instances are generated.
"""
count = 5
actual_list = seeder.seedn(count, Space)
self.assertEqual(len(actual_list), count)
def testIfInstanceIsGeneratedWithRequiredAttributes(self):
"""Tests if the generated instance has the desired properties.
"""
properties = {
'name': 'Test Space',
'description': 'Temporary Description',
'public': 'False',
}
instance = seeder.seed(Space, model_properties=properties)
self.assertEqual(instance.name, properties['name'])
self.assertEqual(instance.description, properties['description'])
self.assertEqual(instance.public, properties['public'])
#Space.author is a Foreign Key. Since generate_fk is False by default,
#Space.author should be None as it will not be populated.
self.assertEqual(instance.author, None)
self.assertFalse(isinstance(instance.author, User))
def testIfForeignKeyFieldsOfaModelIsPopulated(self):
"""Tests if the foreign key fields of a model is populated if
generate_fk is set to True
"""
instance = seeder.seed(Space)
self.assertEqual(instance.author, None)
instance = seeder.seed(Space, generate_fk=True)
self.assertTrue(isinstance(instance.author, User))
User.objects.all().delete()
|
<commit_before><commit_msg>Add tests for the data seeder.<commit_after># -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2012 Cidadania S. Coop. Galega
#
# This file is part of e-cidadania.
#
# e-cidadania is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# e-cidadania is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with e-cidadania. If not, see <http://www.gnu.org/licenses/>.
import unittest
from tests.data_seeder import seeder
from core.spaces.models import Space
from django.contrib.auth.models import User
class TestDataSeeder(unittest.TestCase):
"""Tests the DataSeeder class methods.
"""
def testInstanceIsCreated(self):
"""Tests if the correct instance of a model is generated.
"""
created_model = seeder.seed(Space)
self.assertTrue(isinstance(created_model, Space))
def testCorrectNumberOfInstancesAreGenerated(self):
"""Tests if correct number of model instances are generated.
"""
count = 5
actual_list = seeder.seedn(count, Space)
self.assertEqual(len(actual_list), count)
def testIfInstanceIsGeneratedWithRequiredAttributes(self):
"""Tests if the generated instance has the desired properties.
"""
properties = {
'name': 'Test Space',
'description': 'Temporary Description',
'public': 'False',
}
instance = seeder.seed(Space, model_properties=properties)
self.assertEqual(instance.name, properties['name'])
self.assertEqual(instance.description, properties['description'])
self.assertEqual(instance.public, properties['public'])
#Space.author is a Foreign Key. Since generate_fk is False by default,
#Space.author should be None as it will not be populated.
self.assertEqual(instance.author, None)
self.assertFalse(isinstance(instance.author, User))
def testIfForeignKeyFieldsOfaModelIsPopulated(self):
"""Tests if the foreign key fields of a model is populated if
generate_fk is set to True
"""
instance = seeder.seed(Space)
self.assertEqual(instance.author, None)
instance = seeder.seed(Space, generate_fk=True)
self.assertTrue(isinstance(instance.author, User))
User.objects.all().delete()
|
|
e8b3d1ea64407322cc578752c49eaa1a33027d40
|
mars_r00_harmatheque.py
|
mars_r00_harmatheque.py
|
#!/usr/bin/env python
'''
Script for removing Harmatheque e-book records from MARS R00 report.
Created for the Harvard Library ITS MARS Reports Pilot Project, 2014.
'''
import codecs
import csv
import glob
import requests
import time
from lxml import html
nets = []
not_nets = []
counter = 0
for file in glob.glob('*.csv'):
with open(file, 'rb') as mars_csv:
reader = csv.reader(mars_csv)
for row in reader:
if row[15] == 'NET (GEN)':
bib = row[1]
marc_url = 'http://webservices.lib.harvard.edu/rest/marc/hollis/' + bib
presto = requests.get(marc_url)
marc_xml = presto.content.replace('<record xmlns="http://www.loc.gov/MARC21/slim" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd">','<record>')
marc_record = html.fromstring(marc_xml)
if marc_record.xpath('//leader/text()'):
h09 = marc_record.xpath('//datafield[@tag="H09"]/subfield[@code="m"]/text()') # Get list of H09 subfield m fields
if 'harmatheque' in h09:
nets.append(row)
else:
not_nets.append(row)
time.sleep(1)
else:
not_nets.append(row)
if len(nets) > 0:
with open('r00_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(nets)
if len(not_nets) > 0:
with open('r00_not_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(not_nets)
|
Add script for removing e-book records
|
Add script for removing e-book records
Added temporary script to remove Harmatheque e-book records from R00
report. Script should be integrated into mars_enhance.csv.
|
Python
|
mit
|
mbeckett7/mars-reports-project
|
Add script for removing e-book records
Added temporary script to remove Harmatheque e-book records from R00
report. Script should be integrated into mars_enhance.csv.
|
#!/usr/bin/env python
'''
Script for removing Harmatheque e-book records from MARS R00 report.
Created for the Harvard Library ITS MARS Reports Pilot Project, 2014.
'''
import codecs
import csv
import glob
import requests
import time
from lxml import html
nets = []
not_nets = []
counter = 0
for file in glob.glob('*.csv'):
with open(file, 'rb') as mars_csv:
reader = csv.reader(mars_csv)
for row in reader:
if row[15] == 'NET (GEN)':
bib = row[1]
marc_url = 'http://webservices.lib.harvard.edu/rest/marc/hollis/' + bib
presto = requests.get(marc_url)
marc_xml = presto.content.replace('<record xmlns="http://www.loc.gov/MARC21/slim" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd">','<record>')
marc_record = html.fromstring(marc_xml)
if marc_record.xpath('//leader/text()'):
h09 = marc_record.xpath('//datafield[@tag="H09"]/subfield[@code="m"]/text()') # Get list of H09 subfield m fields
if 'harmatheque' in h09:
nets.append(row)
else:
not_nets.append(row)
time.sleep(1)
else:
not_nets.append(row)
if len(nets) > 0:
with open('r00_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(nets)
if len(not_nets) > 0:
with open('r00_not_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(not_nets)
|
<commit_before><commit_msg>Add script for removing e-book records
Added temporary script to remove Harmatheque e-book records from R00
report. Script should be integrated into mars_enhance.csv.<commit_after>
|
#!/usr/bin/env python
'''
Script for removing Harmatheque e-book records from MARS R00 report.
Created for the Harvard Library ITS MARS Reports Pilot Project, 2014.
'''
import codecs
import csv
import glob
import requests
import time
from lxml import html
nets = []
not_nets = []
counter = 0
for file in glob.glob('*.csv'):
with open(file, 'rb') as mars_csv:
reader = csv.reader(mars_csv)
for row in reader:
if row[15] == 'NET (GEN)':
bib = row[1]
marc_url = 'http://webservices.lib.harvard.edu/rest/marc/hollis/' + bib
presto = requests.get(marc_url)
marc_xml = presto.content.replace('<record xmlns="http://www.loc.gov/MARC21/slim" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd">','<record>')
marc_record = html.fromstring(marc_xml)
if marc_record.xpath('//leader/text()'):
h09 = marc_record.xpath('//datafield[@tag="H09"]/subfield[@code="m"]/text()') # Get list of H09 subfield m fields
if 'harmatheque' in h09:
nets.append(row)
else:
not_nets.append(row)
time.sleep(1)
else:
not_nets.append(row)
if len(nets) > 0:
with open('r00_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(nets)
if len(not_nets) > 0:
with open('r00_not_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(not_nets)
|
Add script for removing e-book records
Added temporary script to remove Harmatheque e-book records from R00
report. Script should be integrated into mars_enhance.csv.#!/usr/bin/env python
'''
Script for removing Harmatheque e-book records from MARS R00 report.
Created for the Harvard Library ITS MARS Reports Pilot Project, 2014.
'''
import codecs
import csv
import glob
import requests
import time
from lxml import html
nets = []
not_nets = []
counter = 0
for file in glob.glob('*.csv'):
with open(file, 'rb') as mars_csv:
reader = csv.reader(mars_csv)
for row in reader:
if row[15] == 'NET (GEN)':
bib = row[1]
marc_url = 'http://webservices.lib.harvard.edu/rest/marc/hollis/' + bib
presto = requests.get(marc_url)
marc_xml = presto.content.replace('<record xmlns="http://www.loc.gov/MARC21/slim" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd">','<record>')
marc_record = html.fromstring(marc_xml)
if marc_record.xpath('//leader/text()'):
h09 = marc_record.xpath('//datafield[@tag="H09"]/subfield[@code="m"]/text()') # Get list of H09 subfield m fields
if 'harmatheque' in h09:
nets.append(row)
else:
not_nets.append(row)
time.sleep(1)
else:
not_nets.append(row)
if len(nets) > 0:
with open('r00_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(nets)
if len(not_nets) > 0:
with open('r00_not_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(not_nets)
|
<commit_before><commit_msg>Add script for removing e-book records
Added temporary script to remove Harmatheque e-book records from R00
report. Script should be integrated into mars_enhance.csv.<commit_after>#!/usr/bin/env python
'''
Script for removing Harmatheque e-book records from MARS R00 report.
Created for the Harvard Library ITS MARS Reports Pilot Project, 2014.
'''
import codecs
import csv
import glob
import requests
import time
from lxml import html
nets = []
not_nets = []
counter = 0
for file in glob.glob('*.csv'):
with open(file, 'rb') as mars_csv:
reader = csv.reader(mars_csv)
for row in reader:
if row[15] == 'NET (GEN)':
bib = row[1]
marc_url = 'http://webservices.lib.harvard.edu/rest/marc/hollis/' + bib
presto = requests.get(marc_url)
marc_xml = presto.content.replace('<record xmlns="http://www.loc.gov/MARC21/slim" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd">','<record>')
marc_record = html.fromstring(marc_xml)
if marc_record.xpath('//leader/text()'):
h09 = marc_record.xpath('//datafield[@tag="H09"]/subfield[@code="m"]/text()') # Get list of H09 subfield m fields
if 'harmatheque' in h09:
nets.append(row)
else:
not_nets.append(row)
time.sleep(1)
else:
not_nets.append(row)
if len(nets) > 0:
with open('r00_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(nets)
if len(not_nets) > 0:
with open('r00_not_harmatheque.csv', 'wb') as output:
output.write(codecs.BOM_UTF8)
writer = csv.writer(output, quoting=csv.QUOTE_ALL,quotechar='"')
writer.writerows(not_nets)
|
|
3a6dc8d05f158f08d064fd6bccd6d93843651deb
|
enqueue-test.py
|
enqueue-test.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
enqueue-test.py
Created by Gavin M. Roy on 2009-09-11.
Copyright (c) 2009 Insider Guides, Inc.. All rights reserved.
"""
import amqplib.client_0_8 as amqp
import sys
import os
def main():
conn = amqp.Connection(host="mq07:5672 ", userid="guest",
password="guest", virtual_host="/", insist=False)
chan = conn.channel()
chan.queue_declare(queue="Hotmail", durable=True,
exclusive=False, auto_delete=False)
chan.exchange_declare(exchange="Email", type="direct", durable=True,
auto_delete=False,)
chan.queue_bind(queue="Hotmail", exchange="Email",
routing_key="Email.Hotmail")
for i in range(0, 100000):
msg = amqp.Message("Test message %i!" % i)
msg.properties["delivery_mode"] = 2
chan.basic_publish(msg,exchange="Email",routing_key="Email.Hotmail")
pass
if __name__ == '__main__':
main()
|
Throw things into a queue.
|
Throw things into a queue.
|
Python
|
bsd-3-clause
|
gmr/rejected,gmr/rejected
|
Throw things into a queue.
|
#!/usr/bin/env python
# encoding: utf-8
"""
enqueue-test.py
Created by Gavin M. Roy on 2009-09-11.
Copyright (c) 2009 Insider Guides, Inc.. All rights reserved.
"""
import amqplib.client_0_8 as amqp
import sys
import os
def main():
conn = amqp.Connection(host="mq07:5672 ", userid="guest",
password="guest", virtual_host="/", insist=False)
chan = conn.channel()
chan.queue_declare(queue="Hotmail", durable=True,
exclusive=False, auto_delete=False)
chan.exchange_declare(exchange="Email", type="direct", durable=True,
auto_delete=False,)
chan.queue_bind(queue="Hotmail", exchange="Email",
routing_key="Email.Hotmail")
for i in range(0, 100000):
msg = amqp.Message("Test message %i!" % i)
msg.properties["delivery_mode"] = 2
chan.basic_publish(msg,exchange="Email",routing_key="Email.Hotmail")
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Throw things into a queue.<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
"""
enqueue-test.py
Created by Gavin M. Roy on 2009-09-11.
Copyright (c) 2009 Insider Guides, Inc.. All rights reserved.
"""
import amqplib.client_0_8 as amqp
import sys
import os
def main():
conn = amqp.Connection(host="mq07:5672 ", userid="guest",
password="guest", virtual_host="/", insist=False)
chan = conn.channel()
chan.queue_declare(queue="Hotmail", durable=True,
exclusive=False, auto_delete=False)
chan.exchange_declare(exchange="Email", type="direct", durable=True,
auto_delete=False,)
chan.queue_bind(queue="Hotmail", exchange="Email",
routing_key="Email.Hotmail")
for i in range(0, 100000):
msg = amqp.Message("Test message %i!" % i)
msg.properties["delivery_mode"] = 2
chan.basic_publish(msg,exchange="Email",routing_key="Email.Hotmail")
pass
if __name__ == '__main__':
main()
|
Throw things into a queue.#!/usr/bin/env python
# encoding: utf-8
"""
enqueue-test.py
Created by Gavin M. Roy on 2009-09-11.
Copyright (c) 2009 Insider Guides, Inc.. All rights reserved.
"""
import amqplib.client_0_8 as amqp
import sys
import os
def main():
conn = amqp.Connection(host="mq07:5672 ", userid="guest",
password="guest", virtual_host="/", insist=False)
chan = conn.channel()
chan.queue_declare(queue="Hotmail", durable=True,
exclusive=False, auto_delete=False)
chan.exchange_declare(exchange="Email", type="direct", durable=True,
auto_delete=False,)
chan.queue_bind(queue="Hotmail", exchange="Email",
routing_key="Email.Hotmail")
for i in range(0, 100000):
msg = amqp.Message("Test message %i!" % i)
msg.properties["delivery_mode"] = 2
chan.basic_publish(msg,exchange="Email",routing_key="Email.Hotmail")
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Throw things into a queue.<commit_after>#!/usr/bin/env python
# encoding: utf-8
"""
enqueue-test.py
Created by Gavin M. Roy on 2009-09-11.
Copyright (c) 2009 Insider Guides, Inc.. All rights reserved.
"""
import amqplib.client_0_8 as amqp
import sys
import os
def main():
conn = amqp.Connection(host="mq07:5672 ", userid="guest",
password="guest", virtual_host="/", insist=False)
chan = conn.channel()
chan.queue_declare(queue="Hotmail", durable=True,
exclusive=False, auto_delete=False)
chan.exchange_declare(exchange="Email", type="direct", durable=True,
auto_delete=False,)
chan.queue_bind(queue="Hotmail", exchange="Email",
routing_key="Email.Hotmail")
for i in range(0, 100000):
msg = amqp.Message("Test message %i!" % i)
msg.properties["delivery_mode"] = 2
chan.basic_publish(msg,exchange="Email",routing_key="Email.Hotmail")
pass
if __name__ == '__main__':
main()
|
|
4c1d5641ec1d1bc74c7738558b3e9a6312114cba
|
morsefit/leastsq2opt.py
|
morsefit/leastsq2opt.py
|
"""Defines subroutines for converting a leastsq closure to minimize closure
Since the :py:func:`scipy.optimize.leastsq` seems to be limited in terms of its
performance, the more generate :py:func:`scipy.optimize.minimize` function can be
tried to be used. However, they required different kind of closures for
computing the residues and the Jacobian. Here some utility functions are
provided to translate the closures for least square minimization into closures
for general minimizers.
The convention for naming argument is always :math:`N` parameter and :math:`M`
configuration problem.
"""
import numpy as np
def conv_residue(residue_closure, N, M):
"""Converts a residue closure into the residue square closure
:param func residue_closure: The closure returning the :math:`M` residues.
:param int N: The number of parameters, not actually used here.
:param int M: The number of configurations.
"""
def norm_sq_closure(param):
residues = residue_closure(param)
return sum(i_residue ** 2 for i_residue in residues)
return norm_sq_closure
def conv_jacobian(jacobian_closure, residue_closure, N, M):
"""Converts a Jacobian closure into the Jacobian for minimize
:param func jacobian_closure: The closure for computing the Jacobian matrix.
:param func residue_closure: The closure for computing the residue vector.
:param int N: The number of parameters
:param int M: The number of residues
"""
def jacobian(param):
residue = residue_closure(param)
jacobian = jacobian_closure(param)
result = np.empty(N, dtype=np.float64)
for i in xrange(0, N):
result[i] = sum(
2 * residue[j] * jacobian[i][j] for j in xrange(0, M)
)
continue
return result
return jacobian
|
Add the closure conversion functions
|
Add the closure conversion functions
In order to utilize the general minimization functions of scipy,
conversion routines are added to convert the previous residue and
Jacobian closure to be used with the general function.
|
Python
|
mpl-2.0
|
tschijnmo/morsefit
|
Add the closure conversion functions
In order to utilize the general minimization functions of scipy,
conversion routines are added to convert the previous residue and
Jacobian closure to be used with the general function.
|
"""Defines subroutines for converting a leastsq closure to minimize closure
Since the :py:func:`scipy.optimize.leastsq` seems to be limited in terms of its
performance, the more generate :py:func:`scipy.optimize.minimize` function can be
tried to be used. However, they required different kind of closures for
computing the residues and the Jacobian. Here some utility functions are
provided to translate the closures for least square minimization into closures
for general minimizers.
The convention for naming argument is always :math:`N` parameter and :math:`M`
configuration problem.
"""
import numpy as np
def conv_residue(residue_closure, N, M):
"""Converts a residue closure into the residue square closure
:param func residue_closure: The closure returning the :math:`M` residues.
:param int N: The number of parameters, not actually used here.
:param int M: The number of configurations.
"""
def norm_sq_closure(param):
residues = residue_closure(param)
return sum(i_residue ** 2 for i_residue in residues)
return norm_sq_closure
def conv_jacobian(jacobian_closure, residue_closure, N, M):
"""Converts a Jacobian closure into the Jacobian for minimize
:param func jacobian_closure: The closure for computing the Jacobian matrix.
:param func residue_closure: The closure for computing the residue vector.
:param int N: The number of parameters
:param int M: The number of residues
"""
def jacobian(param):
residue = residue_closure(param)
jacobian = jacobian_closure(param)
result = np.empty(N, dtype=np.float64)
for i in xrange(0, N):
result[i] = sum(
2 * residue[j] * jacobian[i][j] for j in xrange(0, M)
)
continue
return result
return jacobian
|
<commit_before><commit_msg>Add the closure conversion functions
In order to utilize the general minimization functions of scipy,
conversion routines are added to convert the previous residue and
Jacobian closure to be used with the general function.<commit_after>
|
"""Defines subroutines for converting a leastsq closure to minimize closure
Since the :py:func:`scipy.optimize.leastsq` seems to be limited in terms of its
performance, the more generate :py:func:`scipy.optimize.minimize` function can be
tried to be used. However, they required different kind of closures for
computing the residues and the Jacobian. Here some utility functions are
provided to translate the closures for least square minimization into closures
for general minimizers.
The convention for naming argument is always :math:`N` parameter and :math:`M`
configuration problem.
"""
import numpy as np
def conv_residue(residue_closure, N, M):
"""Converts a residue closure into the residue square closure
:param func residue_closure: The closure returning the :math:`M` residues.
:param int N: The number of parameters, not actually used here.
:param int M: The number of configurations.
"""
def norm_sq_closure(param):
residues = residue_closure(param)
return sum(i_residue ** 2 for i_residue in residues)
return norm_sq_closure
def conv_jacobian(jacobian_closure, residue_closure, N, M):
"""Converts a Jacobian closure into the Jacobian for minimize
:param func jacobian_closure: The closure for computing the Jacobian matrix.
:param func residue_closure: The closure for computing the residue vector.
:param int N: The number of parameters
:param int M: The number of residues
"""
def jacobian(param):
residue = residue_closure(param)
jacobian = jacobian_closure(param)
result = np.empty(N, dtype=np.float64)
for i in xrange(0, N):
result[i] = sum(
2 * residue[j] * jacobian[i][j] for j in xrange(0, M)
)
continue
return result
return jacobian
|
Add the closure conversion functions
In order to utilize the general minimization functions of scipy,
conversion routines are added to convert the previous residue and
Jacobian closure to be used with the general function."""Defines subroutines for converting a leastsq closure to minimize closure
Since the :py:func:`scipy.optimize.leastsq` seems to be limited in terms of its
performance, the more generate :py:func:`scipy.optimize.minimize` function can be
tried to be used. However, they required different kind of closures for
computing the residues and the Jacobian. Here some utility functions are
provided to translate the closures for least square minimization into closures
for general minimizers.
The convention for naming argument is always :math:`N` parameter and :math:`M`
configuration problem.
"""
import numpy as np
def conv_residue(residue_closure, N, M):
"""Converts a residue closure into the residue square closure
:param func residue_closure: The closure returning the :math:`M` residues.
:param int N: The number of parameters, not actually used here.
:param int M: The number of configurations.
"""
def norm_sq_closure(param):
residues = residue_closure(param)
return sum(i_residue ** 2 for i_residue in residues)
return norm_sq_closure
def conv_jacobian(jacobian_closure, residue_closure, N, M):
"""Converts a Jacobian closure into the Jacobian for minimize
:param func jacobian_closure: The closure for computing the Jacobian matrix.
:param func residue_closure: The closure for computing the residue vector.
:param int N: The number of parameters
:param int M: The number of residues
"""
def jacobian(param):
residue = residue_closure(param)
jacobian = jacobian_closure(param)
result = np.empty(N, dtype=np.float64)
for i in xrange(0, N):
result[i] = sum(
2 * residue[j] * jacobian[i][j] for j in xrange(0, M)
)
continue
return result
return jacobian
|
<commit_before><commit_msg>Add the closure conversion functions
In order to utilize the general minimization functions of scipy,
conversion routines are added to convert the previous residue and
Jacobian closure to be used with the general function.<commit_after>"""Defines subroutines for converting a leastsq closure to minimize closure
Since the :py:func:`scipy.optimize.leastsq` seems to be limited in terms of its
performance, the more generate :py:func:`scipy.optimize.minimize` function can be
tried to be used. However, they required different kind of closures for
computing the residues and the Jacobian. Here some utility functions are
provided to translate the closures for least square minimization into closures
for general minimizers.
The convention for naming argument is always :math:`N` parameter and :math:`M`
configuration problem.
"""
import numpy as np
def conv_residue(residue_closure, N, M):
"""Converts a residue closure into the residue square closure
:param func residue_closure: The closure returning the :math:`M` residues.
:param int N: The number of parameters, not actually used here.
:param int M: The number of configurations.
"""
def norm_sq_closure(param):
residues = residue_closure(param)
return sum(i_residue ** 2 for i_residue in residues)
return norm_sq_closure
def conv_jacobian(jacobian_closure, residue_closure, N, M):
"""Converts a Jacobian closure into the Jacobian for minimize
:param func jacobian_closure: The closure for computing the Jacobian matrix.
:param func residue_closure: The closure for computing the residue vector.
:param int N: The number of parameters
:param int M: The number of residues
"""
def jacobian(param):
residue = residue_closure(param)
jacobian = jacobian_closure(param)
result = np.empty(N, dtype=np.float64)
for i in xrange(0, N):
result[i] = sum(
2 * residue[j] * jacobian[i][j] for j in xrange(0, M)
)
continue
return result
return jacobian
|
|
842035001d5119e1cb5effd192ab245450be01d8
|
examples/mayavi/surface_from_irregular_data.py
|
examples/mayavi/surface_from_irregular_data.py
|
"""
An example which shows how to plot a surface from data acquired
irregularly.
Data giving the variation of a parameter 'z' as a function of two others
('x' and 'y') is often plotted as a `carpet plot`, using a surface to
visualize the underlying function. when the data has been acquired on a
regular grid for parameters 'x' and 'y', it can simply be view with the
mlab.surf function. However, when there are some missing points, or the
data has been acquired at random, the surf function cannot be used.
The difficulty stems from the fact that points positionned in 3D do
not define a surface if no connectivity information is given. With the
surf function, this information is implicite from the shape of the input
arrays.
In this example, randomly-positionned points in the (x, y) plane are
embedded in a surface in the z axis. We first visualize the points using
mlab.points3d. When then use the delaunay2d filter to extract the mesh by
nearest-neighboor matching, and visualize it using the surf module.
"""
# Author: Gael Varoquaux <gael.varoquaux@normalesup.org>
# Copyright (c) 2009, Enthought, Inc.
# License: BSD Style.
import numpy as np
# Create data with x and y random in the [-2, 2] segment, and z a
# Gaussian function of x and y.
np.random.seed(12345)
x = 4*(np.random.random(500) - 0.5)
y = 4*(np.random.random(500) - 0.5)
def f(x, y):
return np.exp(-(x**2 + y**2))
z = f(x, y)
from enthought.mayavi import mlab
mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1))
# Visualize the points
pts = mlab.points3d(x, y, z, z, scale_mode='none', scale_factor=0.2)
# Create and visualize the mesh
mesh = mlab.pipeline.delaunay2d(pts)
surf = mlab.pipeline.surface(mesh)
mlab.view(47, 57, 8.2, (0.1, 0.15, 0.14))
mlab.show()
|
Add an example showing the use of the delaunay2d filter to build a surface from a scattered set of points.
|
Add an example showing the use of the delaunay2d filter to build a surface from a scattered set of points.
|
Python
|
bsd-3-clause
|
alexandreleroux/mayavi,liulion/mayavi,dmsurti/mayavi,liulion/mayavi,alexandreleroux/mayavi,dmsurti/mayavi
|
Add an example showing the use of the delaunay2d filter to build a surface from a scattered set of points.
|
"""
An example which shows how to plot a surface from data acquired
irregularly.
Data giving the variation of a parameter 'z' as a function of two others
('x' and 'y') is often plotted as a `carpet plot`, using a surface to
visualize the underlying function. when the data has been acquired on a
regular grid for parameters 'x' and 'y', it can simply be view with the
mlab.surf function. However, when there are some missing points, or the
data has been acquired at random, the surf function cannot be used.
The difficulty stems from the fact that points positionned in 3D do
not define a surface if no connectivity information is given. With the
surf function, this information is implicite from the shape of the input
arrays.
In this example, randomly-positionned points in the (x, y) plane are
embedded in a surface in the z axis. We first visualize the points using
mlab.points3d. When then use the delaunay2d filter to extract the mesh by
nearest-neighboor matching, and visualize it using the surf module.
"""
# Author: Gael Varoquaux <gael.varoquaux@normalesup.org>
# Copyright (c) 2009, Enthought, Inc.
# License: BSD Style.
import numpy as np
# Create data with x and y random in the [-2, 2] segment, and z a
# Gaussian function of x and y.
np.random.seed(12345)
x = 4*(np.random.random(500) - 0.5)
y = 4*(np.random.random(500) - 0.5)
def f(x, y):
return np.exp(-(x**2 + y**2))
z = f(x, y)
from enthought.mayavi import mlab
mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1))
# Visualize the points
pts = mlab.points3d(x, y, z, z, scale_mode='none', scale_factor=0.2)
# Create and visualize the mesh
mesh = mlab.pipeline.delaunay2d(pts)
surf = mlab.pipeline.surface(mesh)
mlab.view(47, 57, 8.2, (0.1, 0.15, 0.14))
mlab.show()
|
<commit_before><commit_msg>Add an example showing the use of the delaunay2d filter to build a surface from a scattered set of points.<commit_after>
|
"""
An example which shows how to plot a surface from data acquired
irregularly.
Data giving the variation of a parameter 'z' as a function of two others
('x' and 'y') is often plotted as a `carpet plot`, using a surface to
visualize the underlying function. when the data has been acquired on a
regular grid for parameters 'x' and 'y', it can simply be view with the
mlab.surf function. However, when there are some missing points, or the
data has been acquired at random, the surf function cannot be used.
The difficulty stems from the fact that points positionned in 3D do
not define a surface if no connectivity information is given. With the
surf function, this information is implicite from the shape of the input
arrays.
In this example, randomly-positionned points in the (x, y) plane are
embedded in a surface in the z axis. We first visualize the points using
mlab.points3d. When then use the delaunay2d filter to extract the mesh by
nearest-neighboor matching, and visualize it using the surf module.
"""
# Author: Gael Varoquaux <gael.varoquaux@normalesup.org>
# Copyright (c) 2009, Enthought, Inc.
# License: BSD Style.
import numpy as np
# Create data with x and y random in the [-2, 2] segment, and z a
# Gaussian function of x and y.
np.random.seed(12345)
x = 4*(np.random.random(500) - 0.5)
y = 4*(np.random.random(500) - 0.5)
def f(x, y):
return np.exp(-(x**2 + y**2))
z = f(x, y)
from enthought.mayavi import mlab
mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1))
# Visualize the points
pts = mlab.points3d(x, y, z, z, scale_mode='none', scale_factor=0.2)
# Create and visualize the mesh
mesh = mlab.pipeline.delaunay2d(pts)
surf = mlab.pipeline.surface(mesh)
mlab.view(47, 57, 8.2, (0.1, 0.15, 0.14))
mlab.show()
|
Add an example showing the use of the delaunay2d filter to build a surface from a scattered set of points."""
An example which shows how to plot a surface from data acquired
irregularly.
Data giving the variation of a parameter 'z' as a function of two others
('x' and 'y') is often plotted as a `carpet plot`, using a surface to
visualize the underlying function. when the data has been acquired on a
regular grid for parameters 'x' and 'y', it can simply be view with the
mlab.surf function. However, when there are some missing points, or the
data has been acquired at random, the surf function cannot be used.
The difficulty stems from the fact that points positionned in 3D do
not define a surface if no connectivity information is given. With the
surf function, this information is implicite from the shape of the input
arrays.
In this example, randomly-positionned points in the (x, y) plane are
embedded in a surface in the z axis. We first visualize the points using
mlab.points3d. When then use the delaunay2d filter to extract the mesh by
nearest-neighboor matching, and visualize it using the surf module.
"""
# Author: Gael Varoquaux <gael.varoquaux@normalesup.org>
# Copyright (c) 2009, Enthought, Inc.
# License: BSD Style.
import numpy as np
# Create data with x and y random in the [-2, 2] segment, and z a
# Gaussian function of x and y.
np.random.seed(12345)
x = 4*(np.random.random(500) - 0.5)
y = 4*(np.random.random(500) - 0.5)
def f(x, y):
return np.exp(-(x**2 + y**2))
z = f(x, y)
from enthought.mayavi import mlab
mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1))
# Visualize the points
pts = mlab.points3d(x, y, z, z, scale_mode='none', scale_factor=0.2)
# Create and visualize the mesh
mesh = mlab.pipeline.delaunay2d(pts)
surf = mlab.pipeline.surface(mesh)
mlab.view(47, 57, 8.2, (0.1, 0.15, 0.14))
mlab.show()
|
<commit_before><commit_msg>Add an example showing the use of the delaunay2d filter to build a surface from a scattered set of points.<commit_after>"""
An example which shows how to plot a surface from data acquired
irregularly.
Data giving the variation of a parameter 'z' as a function of two others
('x' and 'y') is often plotted as a `carpet plot`, using a surface to
visualize the underlying function. when the data has been acquired on a
regular grid for parameters 'x' and 'y', it can simply be view with the
mlab.surf function. However, when there are some missing points, or the
data has been acquired at random, the surf function cannot be used.
The difficulty stems from the fact that points positionned in 3D do
not define a surface if no connectivity information is given. With the
surf function, this information is implicite from the shape of the input
arrays.
In this example, randomly-positionned points in the (x, y) plane are
embedded in a surface in the z axis. We first visualize the points using
mlab.points3d. When then use the delaunay2d filter to extract the mesh by
nearest-neighboor matching, and visualize it using the surf module.
"""
# Author: Gael Varoquaux <gael.varoquaux@normalesup.org>
# Copyright (c) 2009, Enthought, Inc.
# License: BSD Style.
import numpy as np
# Create data with x and y random in the [-2, 2] segment, and z a
# Gaussian function of x and y.
np.random.seed(12345)
x = 4*(np.random.random(500) - 0.5)
y = 4*(np.random.random(500) - 0.5)
def f(x, y):
return np.exp(-(x**2 + y**2))
z = f(x, y)
from enthought.mayavi import mlab
mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1))
# Visualize the points
pts = mlab.points3d(x, y, z, z, scale_mode='none', scale_factor=0.2)
# Create and visualize the mesh
mesh = mlab.pipeline.delaunay2d(pts)
surf = mlab.pipeline.surface(mesh)
mlab.view(47, 57, 8.2, (0.1, 0.15, 0.14))
mlab.show()
|
|
cb45cea953880bf87a774bec4120bb0e7331d480
|
tcconfig/parser/_model.py
|
tcconfig/parser/_model.py
|
from simplesqlite.model import Integer, Model, Text
from .._const import Tc
class Filter(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
filter_id = Text(attr_name=Tc.Param.FILTER_ID)
flowid = Text(attr_name=Tc.Param.FLOW_ID)
protocol = Text(attr_name=Tc.Param.PROTOCOL)
priority = Integer(attr_name=Tc.Param.PRIORITY)
src_network = Text(attr_name=Tc.Param.SRC_NETWORK)
dst_network = Text(attr_name=Tc.Param.DST_NETWORK)
src_port = Integer(attr_name=Tc.Param.SRC_PORT)
dst_port = Integer(attr_name=Tc.Param.DST_PORT)
classid = Text(attr_name=Tc.Param.CLASS_ID)
handle = Integer(attr_name=Tc.Param.HANDLE)
class Qdisc(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
direct_qlen = Integer()
parent = Text(attr_name=Tc.Param.PARENT, not_null=True)
handle = Text(attr_name=Tc.Param.HANDLE, not_null=True)
delay = Text()
delay_distro = Text(attr_name="delay-distro")
loss = Text()
duplicate = Text()
corrupt = Text()
reorder = Text()
rate = Text()
|
Add ORM models for filter/qdisc
|
Add ORM models for filter/qdisc
|
Python
|
mit
|
thombashi/tcconfig,thombashi/tcconfig
|
Add ORM models for filter/qdisc
|
from simplesqlite.model import Integer, Model, Text
from .._const import Tc
class Filter(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
filter_id = Text(attr_name=Tc.Param.FILTER_ID)
flowid = Text(attr_name=Tc.Param.FLOW_ID)
protocol = Text(attr_name=Tc.Param.PROTOCOL)
priority = Integer(attr_name=Tc.Param.PRIORITY)
src_network = Text(attr_name=Tc.Param.SRC_NETWORK)
dst_network = Text(attr_name=Tc.Param.DST_NETWORK)
src_port = Integer(attr_name=Tc.Param.SRC_PORT)
dst_port = Integer(attr_name=Tc.Param.DST_PORT)
classid = Text(attr_name=Tc.Param.CLASS_ID)
handle = Integer(attr_name=Tc.Param.HANDLE)
class Qdisc(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
direct_qlen = Integer()
parent = Text(attr_name=Tc.Param.PARENT, not_null=True)
handle = Text(attr_name=Tc.Param.HANDLE, not_null=True)
delay = Text()
delay_distro = Text(attr_name="delay-distro")
loss = Text()
duplicate = Text()
corrupt = Text()
reorder = Text()
rate = Text()
|
<commit_before><commit_msg>Add ORM models for filter/qdisc<commit_after>
|
from simplesqlite.model import Integer, Model, Text
from .._const import Tc
class Filter(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
filter_id = Text(attr_name=Tc.Param.FILTER_ID)
flowid = Text(attr_name=Tc.Param.FLOW_ID)
protocol = Text(attr_name=Tc.Param.PROTOCOL)
priority = Integer(attr_name=Tc.Param.PRIORITY)
src_network = Text(attr_name=Tc.Param.SRC_NETWORK)
dst_network = Text(attr_name=Tc.Param.DST_NETWORK)
src_port = Integer(attr_name=Tc.Param.SRC_PORT)
dst_port = Integer(attr_name=Tc.Param.DST_PORT)
classid = Text(attr_name=Tc.Param.CLASS_ID)
handle = Integer(attr_name=Tc.Param.HANDLE)
class Qdisc(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
direct_qlen = Integer()
parent = Text(attr_name=Tc.Param.PARENT, not_null=True)
handle = Text(attr_name=Tc.Param.HANDLE, not_null=True)
delay = Text()
delay_distro = Text(attr_name="delay-distro")
loss = Text()
duplicate = Text()
corrupt = Text()
reorder = Text()
rate = Text()
|
Add ORM models for filter/qdiscfrom simplesqlite.model import Integer, Model, Text
from .._const import Tc
class Filter(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
filter_id = Text(attr_name=Tc.Param.FILTER_ID)
flowid = Text(attr_name=Tc.Param.FLOW_ID)
protocol = Text(attr_name=Tc.Param.PROTOCOL)
priority = Integer(attr_name=Tc.Param.PRIORITY)
src_network = Text(attr_name=Tc.Param.SRC_NETWORK)
dst_network = Text(attr_name=Tc.Param.DST_NETWORK)
src_port = Integer(attr_name=Tc.Param.SRC_PORT)
dst_port = Integer(attr_name=Tc.Param.DST_PORT)
classid = Text(attr_name=Tc.Param.CLASS_ID)
handle = Integer(attr_name=Tc.Param.HANDLE)
class Qdisc(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
direct_qlen = Integer()
parent = Text(attr_name=Tc.Param.PARENT, not_null=True)
handle = Text(attr_name=Tc.Param.HANDLE, not_null=True)
delay = Text()
delay_distro = Text(attr_name="delay-distro")
loss = Text()
duplicate = Text()
corrupt = Text()
reorder = Text()
rate = Text()
|
<commit_before><commit_msg>Add ORM models for filter/qdisc<commit_after>from simplesqlite.model import Integer, Model, Text
from .._const import Tc
class Filter(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
filter_id = Text(attr_name=Tc.Param.FILTER_ID)
flowid = Text(attr_name=Tc.Param.FLOW_ID)
protocol = Text(attr_name=Tc.Param.PROTOCOL)
priority = Integer(attr_name=Tc.Param.PRIORITY)
src_network = Text(attr_name=Tc.Param.SRC_NETWORK)
dst_network = Text(attr_name=Tc.Param.DST_NETWORK)
src_port = Integer(attr_name=Tc.Param.SRC_PORT)
dst_port = Integer(attr_name=Tc.Param.DST_PORT)
classid = Text(attr_name=Tc.Param.CLASS_ID)
handle = Integer(attr_name=Tc.Param.HANDLE)
class Qdisc(Model):
device = Text(attr_name=Tc.Param.DEVICE, not_null=True)
direct_qlen = Integer()
parent = Text(attr_name=Tc.Param.PARENT, not_null=True)
handle = Text(attr_name=Tc.Param.HANDLE, not_null=True)
delay = Text()
delay_distro = Text(attr_name="delay-distro")
loss = Text()
duplicate = Text()
corrupt = Text()
reorder = Text()
rate = Text()
|
|
0291ac95161db254e5daa111670c422fdd2b1571
|
test/expressions/expr8.py
|
test/expressions/expr8.py
|
assert a or b, 'aaa'
assert : keyword.control.flow.python, source.python
a : source.python
or : keyword.operator.python, source.python
b, : source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
aaa : source.python, string.quoted.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.single.python
|
Add a test for the assert statement
|
Add a test for the assert statement
|
Python
|
mit
|
MagicStack/MagicPython,MagicStack/MagicPython,MagicStack/MagicPython
|
Add a test for the assert statement
|
assert a or b, 'aaa'
assert : keyword.control.flow.python, source.python
a : source.python
or : keyword.operator.python, source.python
b, : source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
aaa : source.python, string.quoted.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.single.python
|
<commit_before><commit_msg>Add a test for the assert statement<commit_after>
|
assert a or b, 'aaa'
assert : keyword.control.flow.python, source.python
a : source.python
or : keyword.operator.python, source.python
b, : source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
aaa : source.python, string.quoted.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.single.python
|
Add a test for the assert statementassert a or b, 'aaa'
assert : keyword.control.flow.python, source.python
a : source.python
or : keyword.operator.python, source.python
b, : source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
aaa : source.python, string.quoted.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.single.python
|
<commit_before><commit_msg>Add a test for the assert statement<commit_after>assert a or b, 'aaa'
assert : keyword.control.flow.python, source.python
a : source.python
or : keyword.operator.python, source.python
b, : source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
aaa : source.python, string.quoted.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.single.python
|
|
804f2dbc31a36d55d422c69d35cc93e645634f09
|
test/lib/test_download.py
|
test/lib/test_download.py
|
# Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
urlretrieveMock = mock.MagicMock()
try:
# Python 2.x
import urllib
#urllib.urlretrieve
urllib.urlretrieve = urlretrieveMock
except ImportError:
# Python 3.x or later
import urllib.request
urllib.request.urlretrieve = urlretrieveMock
import lib.download
class TestDownload(unittest.TestCase):
# run {{{
def test_run(self):
lib.download.run('https://exmaple.com/example.zip', '/dir1/dir2')
urlretrieveMock.assert_called_once_with('https://exmaple.com/example.zip', '/dir1/dir2')
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
Add a test for lib.download
|
Add a test for lib.download
|
Python
|
mit
|
googkit/googkit,googkit/googkit,googkit/googkit
|
Add a test for lib.download
|
# Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
urlretrieveMock = mock.MagicMock()
try:
# Python 2.x
import urllib
#urllib.urlretrieve
urllib.urlretrieve = urlretrieveMock
except ImportError:
# Python 3.x or later
import urllib.request
urllib.request.urlretrieve = urlretrieveMock
import lib.download
class TestDownload(unittest.TestCase):
# run {{{
def test_run(self):
lib.download.run('https://exmaple.com/example.zip', '/dir1/dir2')
urlretrieveMock.assert_called_once_with('https://exmaple.com/example.zip', '/dir1/dir2')
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
<commit_before><commit_msg>Add a test for lib.download<commit_after>
|
# Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
urlretrieveMock = mock.MagicMock()
try:
# Python 2.x
import urllib
#urllib.urlretrieve
urllib.urlretrieve = urlretrieveMock
except ImportError:
# Python 3.x or later
import urllib.request
urllib.request.urlretrieve = urlretrieveMock
import lib.download
class TestDownload(unittest.TestCase):
# run {{{
def test_run(self):
lib.download.run('https://exmaple.com/example.zip', '/dir1/dir2')
urlretrieveMock.assert_called_once_with('https://exmaple.com/example.zip', '/dir1/dir2')
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
Add a test for lib.download# Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
urlretrieveMock = mock.MagicMock()
try:
# Python 2.x
import urllib
#urllib.urlretrieve
urllib.urlretrieve = urlretrieveMock
except ImportError:
# Python 3.x or later
import urllib.request
urllib.request.urlretrieve = urlretrieveMock
import lib.download
class TestDownload(unittest.TestCase):
# run {{{
def test_run(self):
lib.download.run('https://exmaple.com/example.zip', '/dir1/dir2')
urlretrieveMock.assert_called_once_with('https://exmaple.com/example.zip', '/dir1/dir2')
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
<commit_before><commit_msg>Add a test for lib.download<commit_after># Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
urlretrieveMock = mock.MagicMock()
try:
# Python 2.x
import urllib
#urllib.urlretrieve
urllib.urlretrieve = urlretrieveMock
except ImportError:
# Python 3.x or later
import urllib.request
urllib.request.urlretrieve = urlretrieveMock
import lib.download
class TestDownload(unittest.TestCase):
# run {{{
def test_run(self):
lib.download.run('https://exmaple.com/example.zip', '/dir1/dir2')
urlretrieveMock.assert_called_once_with('https://exmaple.com/example.zip', '/dir1/dir2')
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
|
ac3447251395a0f6ee445d76e1c32910505a5bd4
|
scripts/remove_after_use/reindex_quickfiles.py
|
scripts/remove_after_use/reindex_quickfiles.py
|
import sys
import progressbar
from django.core.paginator import Paginator
from website.app import setup_django
setup_django()
from website.search.search import update_file
from osf.models import QuickFilesNode
PAGE_SIZE = 50
def reindex_quickfiles(dry):
qs = QuickFilesNode.objects.all().order_by('id')
count = qs.count()
paginator = Paginator(qs, PAGE_SIZE)
progress_bar = progressbar.ProgressBar(maxval=count).start()
n_processed = 0
for page_num in paginator.page_range:
page = paginator.page(page_num)
for quickfiles in page.object_list:
for file_ in quickfiles.files.all():
if not dry:
update_file(file_)
n_processed += len(page.object_list)
progress_bar.update(n_processed)
if __name__ == '__main__':
dry = '--dry' in sys.argv
reindex_quickfiles(dry=dry)
|
Add script to re-index users' files in quickfiles nodes
|
Add script to re-index users' files in quickfiles nodes
|
Python
|
apache-2.0
|
caseyrollins/osf.io,sloria/osf.io,erinspace/osf.io,aaxelb/osf.io,pattisdr/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,icereval/osf.io,adlius/osf.io,brianjgeiger/osf.io,mattclark/osf.io,Johnetordoff/osf.io,adlius/osf.io,aaxelb/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,erinspace/osf.io,brianjgeiger/osf.io,felliott/osf.io,mfraezz/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,aaxelb/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,mattclark/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,mfraezz/osf.io,adlius/osf.io,erinspace/osf.io,aaxelb/osf.io,icereval/osf.io,mfraezz/osf.io,baylee-d/osf.io,felliott/osf.io,HalcyonChimera/osf.io,felliott/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,baylee-d/osf.io,baylee-d/osf.io,adlius/osf.io,Johnetordoff/osf.io,felliott/osf.io,HalcyonChimera/osf.io,icereval/osf.io,caseyrollins/osf.io,mattclark/osf.io,mfraezz/osf.io
|
Add script to re-index users' files in quickfiles nodes
|
import sys
import progressbar
from django.core.paginator import Paginator
from website.app import setup_django
setup_django()
from website.search.search import update_file
from osf.models import QuickFilesNode
PAGE_SIZE = 50
def reindex_quickfiles(dry):
qs = QuickFilesNode.objects.all().order_by('id')
count = qs.count()
paginator = Paginator(qs, PAGE_SIZE)
progress_bar = progressbar.ProgressBar(maxval=count).start()
n_processed = 0
for page_num in paginator.page_range:
page = paginator.page(page_num)
for quickfiles in page.object_list:
for file_ in quickfiles.files.all():
if not dry:
update_file(file_)
n_processed += len(page.object_list)
progress_bar.update(n_processed)
if __name__ == '__main__':
dry = '--dry' in sys.argv
reindex_quickfiles(dry=dry)
|
<commit_before><commit_msg>Add script to re-index users' files in quickfiles nodes<commit_after>
|
import sys
import progressbar
from django.core.paginator import Paginator
from website.app import setup_django
setup_django()
from website.search.search import update_file
from osf.models import QuickFilesNode
PAGE_SIZE = 50
def reindex_quickfiles(dry):
qs = QuickFilesNode.objects.all().order_by('id')
count = qs.count()
paginator = Paginator(qs, PAGE_SIZE)
progress_bar = progressbar.ProgressBar(maxval=count).start()
n_processed = 0
for page_num in paginator.page_range:
page = paginator.page(page_num)
for quickfiles in page.object_list:
for file_ in quickfiles.files.all():
if not dry:
update_file(file_)
n_processed += len(page.object_list)
progress_bar.update(n_processed)
if __name__ == '__main__':
dry = '--dry' in sys.argv
reindex_quickfiles(dry=dry)
|
Add script to re-index users' files in quickfiles nodesimport sys
import progressbar
from django.core.paginator import Paginator
from website.app import setup_django
setup_django()
from website.search.search import update_file
from osf.models import QuickFilesNode
PAGE_SIZE = 50
def reindex_quickfiles(dry):
qs = QuickFilesNode.objects.all().order_by('id')
count = qs.count()
paginator = Paginator(qs, PAGE_SIZE)
progress_bar = progressbar.ProgressBar(maxval=count).start()
n_processed = 0
for page_num in paginator.page_range:
page = paginator.page(page_num)
for quickfiles in page.object_list:
for file_ in quickfiles.files.all():
if not dry:
update_file(file_)
n_processed += len(page.object_list)
progress_bar.update(n_processed)
if __name__ == '__main__':
dry = '--dry' in sys.argv
reindex_quickfiles(dry=dry)
|
<commit_before><commit_msg>Add script to re-index users' files in quickfiles nodes<commit_after>import sys
import progressbar
from django.core.paginator import Paginator
from website.app import setup_django
setup_django()
from website.search.search import update_file
from osf.models import QuickFilesNode
PAGE_SIZE = 50
def reindex_quickfiles(dry):
qs = QuickFilesNode.objects.all().order_by('id')
count = qs.count()
paginator = Paginator(qs, PAGE_SIZE)
progress_bar = progressbar.ProgressBar(maxval=count).start()
n_processed = 0
for page_num in paginator.page_range:
page = paginator.page(page_num)
for quickfiles in page.object_list:
for file_ in quickfiles.files.all():
if not dry:
update_file(file_)
n_processed += len(page.object_list)
progress_bar.update(n_processed)
if __name__ == '__main__':
dry = '--dry' in sys.argv
reindex_quickfiles(dry=dry)
|
|
2e5dbd5f76839e65e7d6658a0f5fabe9ce00f3d4
|
tests/unit/modules/test_tomcat.py
|
tests/unit/modules/test_tomcat.py
|
# -*- coding: utf-8 -*-
# Import future libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
from tests.support.mock import MagicMock, patch
# Import salt module
import salt.modules.tomcat as tomcat
# Import 3rd-party libs
from io import StringIO, BytesIO
class TomcatTestCasse(TestCase, LoaderModuleMockMixin):
'''
Tests cases for salt.modules.tomcat
'''
def setup_loader_modules(self):
return {tomcat: {}}
def test_tomcat_wget_no_bytestring(self):
responses = {
'string': StringIO('Best response ever\r\nAnd you know it!'),
'bytes': BytesIO(b'Best response ever\r\nAnd you know it!')
}
string_mock= MagicMock(return_value=responses['string'])
bytes_mock= MagicMock(return_value=responses['bytes'])
with patch('salt.modules.tomcat._auth', MagicMock(return_value=True)):
with patch('salt.modules.tomcat._urlopen', string_mock):
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
with patch('salt.modules.tomcat._urlopen', bytes_mock):
try:
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
except TypeError as type_error:
if type_error.args[0] == 'startswith first arg must be bytes or a tuple of bytes, not str':
self.fail('Got back a byte string, should\'ve been a string')
else:
raise type_error
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
|
Add unit test for _wget method in tomcat module
|
Add unit test for _wget method in tomcat module
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add unit test for _wget method in tomcat module
|
# -*- coding: utf-8 -*-
# Import future libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
from tests.support.mock import MagicMock, patch
# Import salt module
import salt.modules.tomcat as tomcat
# Import 3rd-party libs
from io import StringIO, BytesIO
class TomcatTestCasse(TestCase, LoaderModuleMockMixin):
'''
Tests cases for salt.modules.tomcat
'''
def setup_loader_modules(self):
return {tomcat: {}}
def test_tomcat_wget_no_bytestring(self):
responses = {
'string': StringIO('Best response ever\r\nAnd you know it!'),
'bytes': BytesIO(b'Best response ever\r\nAnd you know it!')
}
string_mock= MagicMock(return_value=responses['string'])
bytes_mock= MagicMock(return_value=responses['bytes'])
with patch('salt.modules.tomcat._auth', MagicMock(return_value=True)):
with patch('salt.modules.tomcat._urlopen', string_mock):
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
with patch('salt.modules.tomcat._urlopen', bytes_mock):
try:
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
except TypeError as type_error:
if type_error.args[0] == 'startswith first arg must be bytes or a tuple of bytes, not str':
self.fail('Got back a byte string, should\'ve been a string')
else:
raise type_error
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
|
<commit_before><commit_msg>Add unit test for _wget method in tomcat module<commit_after>
|
# -*- coding: utf-8 -*-
# Import future libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
from tests.support.mock import MagicMock, patch
# Import salt module
import salt.modules.tomcat as tomcat
# Import 3rd-party libs
from io import StringIO, BytesIO
class TomcatTestCasse(TestCase, LoaderModuleMockMixin):
'''
Tests cases for salt.modules.tomcat
'''
def setup_loader_modules(self):
return {tomcat: {}}
def test_tomcat_wget_no_bytestring(self):
responses = {
'string': StringIO('Best response ever\r\nAnd you know it!'),
'bytes': BytesIO(b'Best response ever\r\nAnd you know it!')
}
string_mock= MagicMock(return_value=responses['string'])
bytes_mock= MagicMock(return_value=responses['bytes'])
with patch('salt.modules.tomcat._auth', MagicMock(return_value=True)):
with patch('salt.modules.tomcat._urlopen', string_mock):
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
with patch('salt.modules.tomcat._urlopen', bytes_mock):
try:
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
except TypeError as type_error:
if type_error.args[0] == 'startswith first arg must be bytes or a tuple of bytes, not str':
self.fail('Got back a byte string, should\'ve been a string')
else:
raise type_error
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
|
Add unit test for _wget method in tomcat module# -*- coding: utf-8 -*-
# Import future libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
from tests.support.mock import MagicMock, patch
# Import salt module
import salt.modules.tomcat as tomcat
# Import 3rd-party libs
from io import StringIO, BytesIO
class TomcatTestCasse(TestCase, LoaderModuleMockMixin):
'''
Tests cases for salt.modules.tomcat
'''
def setup_loader_modules(self):
return {tomcat: {}}
def test_tomcat_wget_no_bytestring(self):
responses = {
'string': StringIO('Best response ever\r\nAnd you know it!'),
'bytes': BytesIO(b'Best response ever\r\nAnd you know it!')
}
string_mock= MagicMock(return_value=responses['string'])
bytes_mock= MagicMock(return_value=responses['bytes'])
with patch('salt.modules.tomcat._auth', MagicMock(return_value=True)):
with patch('salt.modules.tomcat._urlopen', string_mock):
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
with patch('salt.modules.tomcat._urlopen', bytes_mock):
try:
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
except TypeError as type_error:
if type_error.args[0] == 'startswith first arg must be bytes or a tuple of bytes, not str':
self.fail('Got back a byte string, should\'ve been a string')
else:
raise type_error
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
|
<commit_before><commit_msg>Add unit test for _wget method in tomcat module<commit_after># -*- coding: utf-8 -*-
# Import future libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
from tests.support.mock import MagicMock, patch
# Import salt module
import salt.modules.tomcat as tomcat
# Import 3rd-party libs
from io import StringIO, BytesIO
class TomcatTestCasse(TestCase, LoaderModuleMockMixin):
'''
Tests cases for salt.modules.tomcat
'''
def setup_loader_modules(self):
return {tomcat: {}}
def test_tomcat_wget_no_bytestring(self):
responses = {
'string': StringIO('Best response ever\r\nAnd you know it!'),
'bytes': BytesIO(b'Best response ever\r\nAnd you know it!')
}
string_mock= MagicMock(return_value=responses['string'])
bytes_mock= MagicMock(return_value=responses['bytes'])
with patch('salt.modules.tomcat._auth', MagicMock(return_value=True)):
with patch('salt.modules.tomcat._urlopen', string_mock):
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
with patch('salt.modules.tomcat._urlopen', bytes_mock):
try:
response = tomcat._wget('tomcat.wait', url='http://localhost:8080/nofail')
except TypeError as type_error:
if type_error.args[0] == 'startswith first arg must be bytes or a tuple of bytes, not str':
self.fail('Got back a byte string, should\'ve been a string')
else:
raise type_error
for line in response['msg']:
self.assertEqual(type(line).__name__, 'str')
|
|
ebfe1254ea11112689fa606cd6c29100a26e058d
|
acme/acme/__init__.py
|
acme/acme/__init__.py
|
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `v02`_.
.. _`ACME protocol`: https://github.com/letsencrypt/acme-spec
.. _`v02`:
https://github.com/letsencrypt/acme-spec/commit/d328fea2d507deb9822793c512830d827a4150c4
"""
|
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `draft-ietf-acme-01`_.
.. _`ACME protocol`: https://github.com/ietf-wg-acme/acme/
.. _`draft-ietf-acme-01`:
https://github.com/ietf-wg-acme/acme/tree/draft-ietf-acme-acme-01
"""
|
Update the ACME github repository URL.
|
Update the ACME github repository URL.
|
Python
|
apache-2.0
|
jsha/letsencrypt,letsencrypt/letsencrypt,mitnk/letsencrypt,stweil/letsencrypt,bsmr-misc-forks/letsencrypt,stweil/letsencrypt,letsencrypt/letsencrypt,VladimirTyrin/letsencrypt,lmcro/letsencrypt,lmcro/letsencrypt,DavidGarciaCat/letsencrypt,DavidGarciaCat/letsencrypt,twstrike/le_for_patching,brentdax/letsencrypt,mitnk/letsencrypt,bsmr-misc-forks/letsencrypt,TheBoegl/letsencrypt,kuba/letsencrypt,wteiken/letsencrypt,kuba/letsencrypt,jtl999/certbot,VladimirTyrin/letsencrypt,thanatos/lets-encrypt-preview,dietsche/letsencrypt,TheBoegl/letsencrypt,jtl999/certbot,thanatos/lets-encrypt-preview,dietsche/letsencrypt,jsha/letsencrypt,brentdax/letsencrypt,twstrike/le_for_patching,wteiken/letsencrypt
|
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `v02`_.
.. _`ACME protocol`: https://github.com/letsencrypt/acme-spec
.. _`v02`:
https://github.com/letsencrypt/acme-spec/commit/d328fea2d507deb9822793c512830d827a4150c4
"""
Update the ACME github repository URL.
|
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `draft-ietf-acme-01`_.
.. _`ACME protocol`: https://github.com/ietf-wg-acme/acme/
.. _`draft-ietf-acme-01`:
https://github.com/ietf-wg-acme/acme/tree/draft-ietf-acme-acme-01
"""
|
<commit_before>"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `v02`_.
.. _`ACME protocol`: https://github.com/letsencrypt/acme-spec
.. _`v02`:
https://github.com/letsencrypt/acme-spec/commit/d328fea2d507deb9822793c512830d827a4150c4
"""
<commit_msg>Update the ACME github repository URL.<commit_after>
|
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `draft-ietf-acme-01`_.
.. _`ACME protocol`: https://github.com/ietf-wg-acme/acme/
.. _`draft-ietf-acme-01`:
https://github.com/ietf-wg-acme/acme/tree/draft-ietf-acme-acme-01
"""
|
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `v02`_.
.. _`ACME protocol`: https://github.com/letsencrypt/acme-spec
.. _`v02`:
https://github.com/letsencrypt/acme-spec/commit/d328fea2d507deb9822793c512830d827a4150c4
"""
Update the ACME github repository URL."""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `draft-ietf-acme-01`_.
.. _`ACME protocol`: https://github.com/ietf-wg-acme/acme/
.. _`draft-ietf-acme-01`:
https://github.com/ietf-wg-acme/acme/tree/draft-ietf-acme-acme-01
"""
|
<commit_before>"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `v02`_.
.. _`ACME protocol`: https://github.com/letsencrypt/acme-spec
.. _`v02`:
https://github.com/letsencrypt/acme-spec/commit/d328fea2d507deb9822793c512830d827a4150c4
"""
<commit_msg>Update the ACME github repository URL.<commit_after>"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_. Latest
supported version: `draft-ietf-acme-01`_.
.. _`ACME protocol`: https://github.com/ietf-wg-acme/acme/
.. _`draft-ietf-acme-01`:
https://github.com/ietf-wg-acme/acme/tree/draft-ietf-acme-acme-01
"""
|
dac411035f12f92f336d6c42aa3103b3c04f01ab
|
backend/populate_dimkarakostas.py
|
backend/populate_dimkarakostas.py
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
Add method comment to population script for easy deploy
|
Add method comment to population script for easy deploy
|
Python
|
mit
|
dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,dimriou/rupture,dimriou/rupture,esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
Add method comment to population script for easy deploy
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
<commit_before>from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
<commit_msg>Add method comment to population script for easy deploy<commit_after>
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
Add method comment to population script for easy deployfrom string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
<commit_before>from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
<commit_msg>Add method comment to population script for easy deploy<commit_after>from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
1eb2d3b9fa773455e9c69921b58529241e59b00e
|
thezombies/management/commands/report_invalid_urls.py
|
thezombies/management/commands/report_invalid_urls.py
|
from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
|
from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
else:
self.stdout.write('URL Count: {0}\n\n'.format(probe_list.count()))
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
|
Add url count per agency to the invalid url report
|
Add url count per agency to the invalid url report
|
Python
|
bsd-3-clause
|
sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies
|
from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
Add url count per agency to the invalid url report
|
from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
else:
self.stdout.write('URL Count: {0}\n\n'.format(probe_list.count()))
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
|
<commit_before>from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
<commit_msg>Add url count per agency to the invalid url report<commit_after>
|
from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
else:
self.stdout.write('URL Count: {0}\n\n'.format(probe_list.count()))
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
|
from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
Add url count per agency to the invalid url reportfrom __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
else:
self.stdout.write('URL Count: {0}\n\n'.format(probe_list.count()))
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
|
<commit_before>from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
<commit_msg>Add url count per agency to the invalid url report<commit_after>from __future__ import division
from django.core.management.base import BaseCommand
from django.utils import timezone
from thezombies.models import (Agency, Probe)
REPORT_DATE_FORMATTER = u"{:%Y-%m-%d %I:%M%p %Z}\n"
class Command(BaseCommand):
"""Show some information on invalid/bad urls"""
def handle(self, *args, **kwargs):
agency_list = Agency.objects.all()
self.stdout.write(u"# Invalid URL Report\n")
report_date = REPORT_DATE_FORMATTER.format(timezone.localtime(timezone.now()))
self.stdout.write(u"Report generated: {0}\n\n".format(report_date))
for agency in agency_list:
self.stdout.write('## Agency: {0}\n\n'.format(agency.name))
probe_list = Probe.objects.filter(audit__agency=agency, result__contains={'valid_url': 'false'})
if probe_list.count() == 0:
self.stdout.write('None!\n\n')
else:
self.stdout.write('URL Count: {0}\n\n'.format(probe_list.count()))
for probe in probe_list:
self.stdout.write('* {0}'.format(probe.result.get('initial_url', '???')))
if probe_list.count() > 0:
self.stdout.write('\n')
|
462b8e53c8a1add0f471f53d31718816939f1372
|
cineapp/utils.py
|
cineapp/utils.py
|
# -*- coding: utf-8 -*-
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
|
# -*- coding: utf-8 -*-
from cineapp import db
from cineapp.models import Movie, Mark
from sqlalchemy.sql.expression import literal, desc
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
def get_activity_list(start, length):
"""
Returns an array containing activity records ordered by descending date
Params are a range of records we want to have in the returned array
"""
# Object_items
object_dict={"count": 0, "list": []}
object_list=[]
# Movie Query
movies_query=db.session.query(Movie.id,literal("user_id").label("user_id"),Movie.added_when.label("entry_date"),literal("movies").label("entry_type"))
# Marks Query
marks_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.updated_when.label("entry_date"),literal("marks").label("entry_type")).filter(Mark.mark != None)
# Homework Query
homework_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.homework_when.label("entry_date"),literal("homeworks").label("entry_type")).filter(Mark.homework_when != None)
# Build the union request
activity_list = movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).slice(int(start),int(start) + int(length))
for cur_item in activity_list:
if cur_item.entry_type == "movies":
object_list.append({"entry_type": "movies", "object" : Movie.query.get(cur_item.id)})
elif cur_item.entry_type == "marks":
object_list.append({"entry_type": "marks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
elif cur_item.entry_type == "homeworks":
object_list.append({"entry_type" : "homeworks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
# Count activity number (Will be used for the datatable pagination)
object_dict["count"]=movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).count()
object_dict["list"]=object_list
# Return the filled object
return object_dict
|
Move activity SQL query into a dedicated function
|
Move activity SQL query into a dedicated function
The SQL query based on the UNION predicate is now into a function which takes as
parameters the number of record we want to have in the result. This function
will be used for the activity dashboard and also for the new global activity
page using the datatable plugin.
The subqueries have been also updated in order to exclude some records which
shouldn't be fetched :
- For the homework don't fetch records without an homework date
- For the mark, don't fetch record without a mark
|
Python
|
mit
|
ptitoliv/cineapp,ptitoliv/cineapp,ptitoliv/cineapp
|
# -*- coding: utf-8 -*-
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
Move activity SQL query into a dedicated function
The SQL query based on the UNION predicate is now into a function which takes as
parameters the number of record we want to have in the result. This function
will be used for the activity dashboard and also for the new global activity
page using the datatable plugin.
The subqueries have been also updated in order to exclude some records which
shouldn't be fetched :
- For the homework don't fetch records without an homework date
- For the mark, don't fetch record without a mark
|
# -*- coding: utf-8 -*-
from cineapp import db
from cineapp.models import Movie, Mark
from sqlalchemy.sql.expression import literal, desc
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
def get_activity_list(start, length):
"""
Returns an array containing activity records ordered by descending date
Params are a range of records we want to have in the returned array
"""
# Object_items
object_dict={"count": 0, "list": []}
object_list=[]
# Movie Query
movies_query=db.session.query(Movie.id,literal("user_id").label("user_id"),Movie.added_when.label("entry_date"),literal("movies").label("entry_type"))
# Marks Query
marks_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.updated_when.label("entry_date"),literal("marks").label("entry_type")).filter(Mark.mark != None)
# Homework Query
homework_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.homework_when.label("entry_date"),literal("homeworks").label("entry_type")).filter(Mark.homework_when != None)
# Build the union request
activity_list = movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).slice(int(start),int(start) + int(length))
for cur_item in activity_list:
if cur_item.entry_type == "movies":
object_list.append({"entry_type": "movies", "object" : Movie.query.get(cur_item.id)})
elif cur_item.entry_type == "marks":
object_list.append({"entry_type": "marks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
elif cur_item.entry_type == "homeworks":
object_list.append({"entry_type" : "homeworks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
# Count activity number (Will be used for the datatable pagination)
object_dict["count"]=movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).count()
object_dict["list"]=object_list
# Return the filled object
return object_dict
|
<commit_before># -*- coding: utf-8 -*-
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
<commit_msg>Move activity SQL query into a dedicated function
The SQL query based on the UNION predicate is now into a function which takes as
parameters the number of record we want to have in the result. This function
will be used for the activity dashboard and also for the new global activity
page using the datatable plugin.
The subqueries have been also updated in order to exclude some records which
shouldn't be fetched :
- For the homework don't fetch records without an homework date
- For the mark, don't fetch record without a mark<commit_after>
|
# -*- coding: utf-8 -*-
from cineapp import db
from cineapp.models import Movie, Mark
from sqlalchemy.sql.expression import literal, desc
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
def get_activity_list(start, length):
"""
Returns an array containing activity records ordered by descending date
Params are a range of records we want to have in the returned array
"""
# Object_items
object_dict={"count": 0, "list": []}
object_list=[]
# Movie Query
movies_query=db.session.query(Movie.id,literal("user_id").label("user_id"),Movie.added_when.label("entry_date"),literal("movies").label("entry_type"))
# Marks Query
marks_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.updated_when.label("entry_date"),literal("marks").label("entry_type")).filter(Mark.mark != None)
# Homework Query
homework_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.homework_when.label("entry_date"),literal("homeworks").label("entry_type")).filter(Mark.homework_when != None)
# Build the union request
activity_list = movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).slice(int(start),int(start) + int(length))
for cur_item in activity_list:
if cur_item.entry_type == "movies":
object_list.append({"entry_type": "movies", "object" : Movie.query.get(cur_item.id)})
elif cur_item.entry_type == "marks":
object_list.append({"entry_type": "marks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
elif cur_item.entry_type == "homeworks":
object_list.append({"entry_type" : "homeworks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
# Count activity number (Will be used for the datatable pagination)
object_dict["count"]=movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).count()
object_dict["list"]=object_list
# Return the filled object
return object_dict
|
# -*- coding: utf-8 -*-
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
Move activity SQL query into a dedicated function
The SQL query based on the UNION predicate is now into a function which takes as
parameters the number of record we want to have in the result. This function
will be used for the activity dashboard and also for the new global activity
page using the datatable plugin.
The subqueries have been also updated in order to exclude some records which
shouldn't be fetched :
- For the homework don't fetch records without an homework date
- For the mark, don't fetch record without a mark# -*- coding: utf-8 -*-
from cineapp import db
from cineapp.models import Movie, Mark
from sqlalchemy.sql.expression import literal, desc
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
def get_activity_list(start, length):
"""
Returns an array containing activity records ordered by descending date
Params are a range of records we want to have in the returned array
"""
# Object_items
object_dict={"count": 0, "list": []}
object_list=[]
# Movie Query
movies_query=db.session.query(Movie.id,literal("user_id").label("user_id"),Movie.added_when.label("entry_date"),literal("movies").label("entry_type"))
# Marks Query
marks_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.updated_when.label("entry_date"),literal("marks").label("entry_type")).filter(Mark.mark != None)
# Homework Query
homework_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.homework_when.label("entry_date"),literal("homeworks").label("entry_type")).filter(Mark.homework_when != None)
# Build the union request
activity_list = movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).slice(int(start),int(start) + int(length))
for cur_item in activity_list:
if cur_item.entry_type == "movies":
object_list.append({"entry_type": "movies", "object" : Movie.query.get(cur_item.id)})
elif cur_item.entry_type == "marks":
object_list.append({"entry_type": "marks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
elif cur_item.entry_type == "homeworks":
object_list.append({"entry_type" : "homeworks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
# Count activity number (Will be used for the datatable pagination)
object_dict["count"]=movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).count()
object_dict["list"]=object_list
# Return the filled object
return object_dict
|
<commit_before># -*- coding: utf-8 -*-
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
<commit_msg>Move activity SQL query into a dedicated function
The SQL query based on the UNION predicate is now into a function which takes as
parameters the number of record we want to have in the result. This function
will be used for the activity dashboard and also for the new global activity
page using the datatable plugin.
The subqueries have been also updated in order to exclude some records which
shouldn't be fetched :
- For the homework don't fetch records without an homework date
- For the mark, don't fetch record without a mark<commit_after># -*- coding: utf-8 -*-
from cineapp import db
from cineapp.models import Movie, Mark
from sqlalchemy.sql.expression import literal, desc
def frange(start, end, step):
tmp = start
while(tmp <= end):
yield tmp
tmp += step
def get_activity_list(start, length):
"""
Returns an array containing activity records ordered by descending date
Params are a range of records we want to have in the returned array
"""
# Object_items
object_dict={"count": 0, "list": []}
object_list=[]
# Movie Query
movies_query=db.session.query(Movie.id,literal("user_id").label("user_id"),Movie.added_when.label("entry_date"),literal("movies").label("entry_type"))
# Marks Query
marks_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.updated_when.label("entry_date"),literal("marks").label("entry_type")).filter(Mark.mark != None)
# Homework Query
homework_query=db.session.query(Mark.movie_id,Mark.user_id.label("user_id"),Mark.homework_when.label("entry_date"),literal("homeworks").label("entry_type")).filter(Mark.homework_when != None)
# Build the union request
activity_list = movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).slice(int(start),int(start) + int(length))
for cur_item in activity_list:
if cur_item.entry_type == "movies":
object_list.append({"entry_type": "movies", "object" : Movie.query.get(cur_item.id)})
elif cur_item.entry_type == "marks":
object_list.append({"entry_type": "marks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
elif cur_item.entry_type == "homeworks":
object_list.append({"entry_type" : "homeworks", "object" : Mark.query.get((cur_item.user_id,cur_item.id))})
# Count activity number (Will be used for the datatable pagination)
object_dict["count"]=movies_query.union(marks_query,homework_query).order_by(desc("entry_date")).count()
object_dict["list"]=object_list
# Return the filled object
return object_dict
|
2280624b54ec8f1ebae656336fab13d032f504ad
|
antevents/__init__.py
|
antevents/__init__.py
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following modules:
* `base` - the core abstractions and classes of the system.
* `sensor` - defines data types and functions specifically for sensor events.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
"""
__version__ = "1.0"
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following module:
* `base` - the core abstractions and classes of the system.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
* `sensors` - interfaces to sensors go here
"""
__version__ = "1.0"
|
Update doc string on location of sensors
|
Update doc string on location of sensors
|
Python
|
apache-2.0
|
mpi-sws-rse/thingflow-python,mpi-sws-rse/antevents-python,mpi-sws-rse/thingflow-python,mpi-sws-rse/antevents-python
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following modules:
* `base` - the core abstractions and classes of the system.
* `sensor` - defines data types and functions specifically for sensor events.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
"""
__version__ = "1.0"
Update doc string on location of sensors
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following module:
* `base` - the core abstractions and classes of the system.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
* `sensors` - interfaces to sensors go here
"""
__version__ = "1.0"
|
<commit_before># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following modules:
* `base` - the core abstractions and classes of the system.
* `sensor` - defines data types and functions specifically for sensor events.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
"""
__version__ = "1.0"
<commit_msg>Update doc string on location of sensors<commit_after>
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following module:
* `base` - the core abstractions and classes of the system.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
* `sensors` - interfaces to sensors go here
"""
__version__ = "1.0"
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following modules:
* `base` - the core abstractions and classes of the system.
* `sensor` - defines data types and functions specifically for sensor events.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
"""
__version__ = "1.0"
Update doc string on location of sensors# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following module:
* `base` - the core abstractions and classes of the system.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
* `sensors` - interfaces to sensors go here
"""
__version__ = "1.0"
|
<commit_before># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following modules:
* `base` - the core abstractions and classes of the system.
* `sensor` - defines data types and functions specifically for sensor events.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
"""
__version__ = "1.0"
<commit_msg>Update doc string on location of sensors<commit_after># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
This is the main package for antevents. Directly within this package you fill
find the following module:
* `base` - the core abstractions and classes of the system.
The rest of the functionality is in sub-packages:
* `adapters` - components to read/write events outside the system
* `internal` - some internal definitions
* `linq` - filters that allow linq-style query pipelines over event streams
* `sensors` - interfaces to sensors go here
"""
__version__ = "1.0"
|
d36053764e8a5776d3c37a7e35beb9ba5cb67386
|
dask/diagnostics/__init__.py
|
dask/diagnostics/__init__.py
|
from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
|
from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
try:
from .profile_visualize import visualize
except ImportError:
pass
|
Add visualize to diagnostics import
|
Add visualize to diagnostics import
|
Python
|
bsd-3-clause
|
PhE/dask,mraspaud/dask,vikhyat/dask,dask/dask,jcrist/dask,vikhyat/dask,mrocklin/dask,mrocklin/dask,cpcloud/dask,pombredanne/dask,ContinuumIO/dask,ContinuumIO/dask,dask/dask,gameduell/dask,blaze/dask,blaze/dask,jakirkham/dask,chrisbarber/dask,jcrist/dask,PhE/dask,mikegraham/dask,jakirkham/dask,cowlicks/dask,mraspaud/dask,pombredanne/dask
|
from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
Add visualize to diagnostics import
|
from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
try:
from .profile_visualize import visualize
except ImportError:
pass
|
<commit_before>from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
<commit_msg>Add visualize to diagnostics import<commit_after>
|
from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
try:
from .profile_visualize import visualize
except ImportError:
pass
|
from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
Add visualize to diagnostics importfrom .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
try:
from .profile_visualize import visualize
except ImportError:
pass
|
<commit_before>from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
<commit_msg>Add visualize to diagnostics import<commit_after>from .profile import Profiler, ResourceProfiler
from .progress import ProgressBar
try:
from .profile_visualize import visualize
except ImportError:
pass
|
bae391e1f5485dfb3f973144ef8d8413a2ac2f75
|
circuits/app/dropprivileges.py
|
circuits/app/dropprivileges.py
|
from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
|
from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(0o077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
|
Fix DropPrivileges component to be compatible with python3
|
Fix DropPrivileges component to be compatible with python3
|
Python
|
mit
|
nizox/circuits,treemo/circuits,treemo/circuits,eriol/circuits,eriol/circuits,eriol/circuits,treemo/circuits
|
from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
Fix DropPrivileges component to be compatible with python3
|
from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(0o077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
|
<commit_before>from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
<commit_msg>Fix DropPrivileges component to be compatible with python3<commit_after>
|
from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(0o077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
|
from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
Fix DropPrivileges component to be compatible with python3from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(0o077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
|
<commit_before>from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
<commit_msg>Fix DropPrivileges component to be compatible with python3<commit_after>from pwd import getpwnam
from grp import getgrnam
from traceback import format_exc
from os import getuid, setgroups, setgid, setuid, umask
from circuits.core import handler, BaseComponent
class DropPrivileges(BaseComponent):
def init(self, user="nobody", group="nobody", **kwargs):
self.user = user
self.group = group
def drop_privileges(self):
if getuid() > 0:
# Running as non-root. Ignore.
return
try:
# Get the uid/gid from the name
uid = getpwnam(self.user).pw_uid
gid = getgrnam(self.group).gr_gid
except KeyError as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
try:
# Remove group privileges
setgroups([])
# Try setting the new uid/gid
setgid(gid)
setuid(uid)
# Ensure a very conservative umask
umask(0o077)
except Exception as error:
print("ERROR: Could not drop privileges {0:s}".format(error))
print(format_exc())
raise SystemExit(-1)
@handler("ready", channel="*")
def on_ready(self, server, bind):
try:
self.drop_privileges()
finally:
self.unregister()
|
6c417f49ebd0f466ebf8100a28006e7c5ea2ff3d
|
tests/lib/__init__.py
|
tests/lib/__init__.py
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
|
Create somewhat questionable sql function
|
Create somewhat questionable sql function
|
Python
|
mit
|
matthewfranglen/postgres-elasticsearch-fdw
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
Create somewhat questionable sql function
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
|
<commit_before>from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
<commit_msg>Create somewhat questionable sql function<commit_after>
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
|
from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
Create somewhat questionable sql functionfrom os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
|
<commit_before>from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
except Exception:
return False
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
<commit_msg>Create somewhat questionable sql function<commit_after>from os.path import abspath, dirname, join
import sh
import psycopg2
import requests
PROJECT_FOLDER=dirname(dirname(abspath(__file__)))
DOCKER_FOLDER=join(PROJECT_FOLDER, 'docker')
def docker_compose(version, *args):
sh.docker_compose('-f', 'docker/{version}/docker-compose.yml'.format(version=version), *args)
def test_pg():
def callback(cursor):
cursor.execute('select 1 + 1;')
return cursor.fetchone()[0] == 2
def error():
return False
return sql(callback, error)
def test_es():
try:
return requests.get('http://localhost:9200').json()['tagline'] == 'You Know, for Search'
except Exception:
return False
def sql(callback, error):
try:
with psycopg2.connect(host='localhost', port=5432, user='postgres', dbname='postgres') as conn:
with conn.cursor() as cursor:
return callback(cursor)
except Exception:
return error()
|
3fc1637350cb85b3c83d1a4561493bf526cea810
|
kolibri/__init__.py
|
kolibri/__init__.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 9, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 10, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
|
Update 0.9 alpha to 0.10 alpha
|
Update 0.9 alpha to 0.10 alpha
|
Python
|
mit
|
benjaoming/kolibri,indirectlylit/kolibri,lyw07/kolibri,mrpau/kolibri,DXCanas/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,DXCanas/kolibri,benjaoming/kolibri,benjaoming/kolibri,jonboiser/kolibri,learningequality/kolibri,lyw07/kolibri,mrpau/kolibri,jonboiser/kolibri,lyw07/kolibri,lyw07/kolibri,learningequality/kolibri,jonboiser/kolibri,DXCanas/kolibri,learningequality/kolibri,jonboiser/kolibri,mrpau/kolibri,DXCanas/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri,benjaoming/kolibri
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 9, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
Update 0.9 alpha to 0.10 alpha
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 10, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 9, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
<commit_msg>Update 0.9 alpha to 0.10 alpha<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 10, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 9, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
Update 0.9 alpha to 0.10 alphafrom __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 10, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 9, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
<commit_msg>Update 0.9 alpha to 0.10 alpha<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils.version import get_version
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 10, 0, 'alpha', 0)
__author__ = 'Learning Equality'
__email__ = 'info@learningequality.org'
__version__ = str(get_version(VERSION))
|
6ecc64a7a22b9b57958dad704d309b18028140e1
|
tests/test_drivers.py
|
tests/test_drivers.py
|
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "OGR Error 0: OGR: OGROpen(docs/data/test_uk.shp" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
|
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "Option CPL_DEBUG" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
|
Test log for a string with no "'b"
|
Test log for a string with no "'b"
|
Python
|
bsd-3-clause
|
perrygeo/Fiona,Toblerity/Fiona,rbuffat/Fiona,perrygeo/Fiona,rbuffat/Fiona,Toblerity/Fiona,johanvdw/Fiona
|
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "OGR Error 0: OGR: OGROpen(docs/data/test_uk.shp" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
Test log for a string with no "'b"
|
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "Option CPL_DEBUG" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
|
<commit_before>
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "OGR Error 0: OGR: OGROpen(docs/data/test_uk.shp" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
<commit_msg>Test log for a string with no "'b"<commit_after>
|
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "Option CPL_DEBUG" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
|
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "OGR Error 0: OGR: OGROpen(docs/data/test_uk.shp" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
Test log for a string with no "'b"
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "Option CPL_DEBUG" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
|
<commit_before>
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "OGR Error 0: OGR: OGROpen(docs/data/test_uk.shp" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
<commit_msg>Test log for a string with no "'b"<commit_after>
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "Option CPL_DEBUG" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
|
b69301c57076f86e99f738d5434dd75fd912753d
|
tests/test_preview.py
|
tests/test_preview.py
|
import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
|
import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
@pytest.mark.parametrize("target_request", ["IATI Preview"])
def test_xml_web_address_form_presence(self, target_request):
"""
Test that there is a form to enter a URL of a valid XML file on the Preview Tool.
"""
req = self.loaded_request_from_test_name(target_request)
form_xpath = '//*[@id="main"]/div/div/div[1]/div/form'
form_action_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@action'
form_method_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@method'
input_xpath = '//*[@id="url"]'
button_xpath = '//*[@id="main"]/div/div/div[1]/div/form/div/div/span/button'
forms = utility.locate_xpath_result(req, form_xpath)
form_action = utility.locate_xpath_result(req, form_action_xpath)
form_method = utility.locate_xpath_result(req, form_method_xpath)
form_inputs = utility.locate_xpath_result(req, input_xpath)
form_buttons = utility.locate_xpath_result(req, input_xpath)
assert len(forms) == 1
assert form_action == ['index.php']
assert form_method == ['get']
assert len(form_inputs) == 1
assert len(form_buttons) == 1
|
Add test for form on Preview Tool
|
Add test for form on Preview Tool
Test to see that the form to enter a URL to use the Preview Tool
exists, has the correct action and has the correct elements.
|
Python
|
mit
|
IATI/IATI-Website-Tests
|
import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
Add test for form on Preview Tool
Test to see that the form to enter a URL to use the Preview Tool
exists, has the correct action and has the correct elements.
|
import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
@pytest.mark.parametrize("target_request", ["IATI Preview"])
def test_xml_web_address_form_presence(self, target_request):
"""
Test that there is a form to enter a URL of a valid XML file on the Preview Tool.
"""
req = self.loaded_request_from_test_name(target_request)
form_xpath = '//*[@id="main"]/div/div/div[1]/div/form'
form_action_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@action'
form_method_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@method'
input_xpath = '//*[@id="url"]'
button_xpath = '//*[@id="main"]/div/div/div[1]/div/form/div/div/span/button'
forms = utility.locate_xpath_result(req, form_xpath)
form_action = utility.locate_xpath_result(req, form_action_xpath)
form_method = utility.locate_xpath_result(req, form_method_xpath)
form_inputs = utility.locate_xpath_result(req, input_xpath)
form_buttons = utility.locate_xpath_result(req, input_xpath)
assert len(forms) == 1
assert form_action == ['index.php']
assert form_method == ['get']
assert len(form_inputs) == 1
assert len(form_buttons) == 1
|
<commit_before>import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
<commit_msg>Add test for form on Preview Tool
Test to see that the form to enter a URL to use the Preview Tool
exists, has the correct action and has the correct elements.<commit_after>
|
import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
@pytest.mark.parametrize("target_request", ["IATI Preview"])
def test_xml_web_address_form_presence(self, target_request):
"""
Test that there is a form to enter a URL of a valid XML file on the Preview Tool.
"""
req = self.loaded_request_from_test_name(target_request)
form_xpath = '//*[@id="main"]/div/div/div[1]/div/form'
form_action_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@action'
form_method_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@method'
input_xpath = '//*[@id="url"]'
button_xpath = '//*[@id="main"]/div/div/div[1]/div/form/div/div/span/button'
forms = utility.locate_xpath_result(req, form_xpath)
form_action = utility.locate_xpath_result(req, form_action_xpath)
form_method = utility.locate_xpath_result(req, form_method_xpath)
form_inputs = utility.locate_xpath_result(req, input_xpath)
form_buttons = utility.locate_xpath_result(req, input_xpath)
assert len(forms) == 1
assert form_action == ['index.php']
assert form_method == ['get']
assert len(form_inputs) == 1
assert len(form_buttons) == 1
|
import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
Add test for form on Preview Tool
Test to see that the form to enter a URL to use the Preview Tool
exists, has the correct action and has the correct elements.import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
@pytest.mark.parametrize("target_request", ["IATI Preview"])
def test_xml_web_address_form_presence(self, target_request):
"""
Test that there is a form to enter a URL of a valid XML file on the Preview Tool.
"""
req = self.loaded_request_from_test_name(target_request)
form_xpath = '//*[@id="main"]/div/div/div[1]/div/form'
form_action_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@action'
form_method_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@method'
input_xpath = '//*[@id="url"]'
button_xpath = '//*[@id="main"]/div/div/div[1]/div/form/div/div/span/button'
forms = utility.locate_xpath_result(req, form_xpath)
form_action = utility.locate_xpath_result(req, form_action_xpath)
form_method = utility.locate_xpath_result(req, form_method_xpath)
form_inputs = utility.locate_xpath_result(req, input_xpath)
form_buttons = utility.locate_xpath_result(req, input_xpath)
assert len(forms) == 1
assert form_action == ['index.php']
assert form_method == ['get']
assert len(form_inputs) == 1
assert len(form_buttons) == 1
|
<commit_before>import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
<commit_msg>Add test for form on Preview Tool
Test to see that the form to enter a URL to use the Preview Tool
exists, has the correct action and has the correct elements.<commit_after>import pytest
from web_test_base import *
class TestIATIPreview(WebTestBase):
requests_to_load = {
'IATI Preview': {
'url': 'http://preview.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://www.iatistandard.org/" in result
@pytest.mark.parametrize("target_request", ["IATI Preview"])
def test_xml_web_address_form_presence(self, target_request):
"""
Test that there is a form to enter a URL of a valid XML file on the Preview Tool.
"""
req = self.loaded_request_from_test_name(target_request)
form_xpath = '//*[@id="main"]/div/div/div[1]/div/form'
form_action_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@action'
form_method_xpath = '//*[@id="main"]/div/div/div[1]/div/form/@method'
input_xpath = '//*[@id="url"]'
button_xpath = '//*[@id="main"]/div/div/div[1]/div/form/div/div/span/button'
forms = utility.locate_xpath_result(req, form_xpath)
form_action = utility.locate_xpath_result(req, form_action_xpath)
form_method = utility.locate_xpath_result(req, form_method_xpath)
form_inputs = utility.locate_xpath_result(req, input_xpath)
form_buttons = utility.locate_xpath_result(req, input_xpath)
assert len(forms) == 1
assert form_action == ['index.php']
assert form_method == ['get']
assert len(form_inputs) == 1
assert len(form_buttons) == 1
|
1b0d153b0f08e0ca5b962b0b9d839f745a035c62
|
tests/test_stock.py
|
tests/test_stock.py
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_price_of_a_new_stock_class_should_be_None(self):
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, goog.price)
if __name__ == "__main__":
unittest.main()
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
|
Add comment and clean up code.
|
Add comment and clean up code.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_price_of_a_new_stock_class_should_be_None(self):
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, goog.price)
if __name__ == "__main__":
unittest.main()
Add comment and clean up code.
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_price_of_a_new_stock_class_should_be_None(self):
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, goog.price)
if __name__ == "__main__":
unittest.main()
<commit_msg>Add comment and clean up code.<commit_after>
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
|
import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_price_of_a_new_stock_class_should_be_None(self):
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, goog.price)
if __name__ == "__main__":
unittest.main()
Add comment and clean up code.import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_price_of_a_new_stock_class_should_be_None(self):
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, goog.price)
if __name__ == "__main__":
unittest.main()
<commit_msg>Add comment and clean up code.<commit_after>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
|
9138112f3abef61a96485ade7e0b484a43429b81
|
tests/unit/actions.py
|
tests/unit/actions.py
|
"""Unit tests for `pycall.actions`."""
class TestActions(TestCase):
"""Test all `pycall.actions` classes to ensure they are actual
`pycall.actions.Action` subclasses.
"""
pass
|
"""Unit tests for `pycall.actions`."""
|
Revert "Adding test case stub."
|
Revert "Adding test case stub."
This reverts commit 6c6b08a63b308690144d73f54b98000e3b1b5672.
|
Python
|
unlicense
|
rdegges/pycall
|
"""Unit tests for `pycall.actions`."""
class TestActions(TestCase):
"""Test all `pycall.actions` classes to ensure they are actual
`pycall.actions.Action` subclasses.
"""
pass
Revert "Adding test case stub."
This reverts commit 6c6b08a63b308690144d73f54b98000e3b1b5672.
|
"""Unit tests for `pycall.actions`."""
|
<commit_before>"""Unit tests for `pycall.actions`."""
class TestActions(TestCase):
"""Test all `pycall.actions` classes to ensure they are actual
`pycall.actions.Action` subclasses.
"""
pass
<commit_msg>Revert "Adding test case stub."
This reverts commit 6c6b08a63b308690144d73f54b98000e3b1b5672.<commit_after>
|
"""Unit tests for `pycall.actions`."""
|
"""Unit tests for `pycall.actions`."""
class TestActions(TestCase):
"""Test all `pycall.actions` classes to ensure they are actual
`pycall.actions.Action` subclasses.
"""
pass
Revert "Adding test case stub."
This reverts commit 6c6b08a63b308690144d73f54b98000e3b1b5672."""Unit tests for `pycall.actions`."""
|
<commit_before>"""Unit tests for `pycall.actions`."""
class TestActions(TestCase):
"""Test all `pycall.actions` classes to ensure they are actual
`pycall.actions.Action` subclasses.
"""
pass
<commit_msg>Revert "Adding test case stub."
This reverts commit 6c6b08a63b308690144d73f54b98000e3b1b5672.<commit_after>"""Unit tests for `pycall.actions`."""
|
ec6099421bad222595be15f4f0b2596952d8c9cc
|
username_to_uuid.py
|
username_to_uuid.py
|
""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
uuid = json_data['id']
return uuid
|
""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
|
Improve robustness: surround the 'id' fetch from result array with a try clause.
|
Improve robustness: surround the 'id' fetch from result array with a try clause.
|
Python
|
mit
|
mrlolethan/MinecraftUsernameToUUID
|
""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
uuid = json_data['id']
return uuid
Improve robustness: surround the 'id' fetch from result array with a try clause.
|
""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
|
<commit_before>""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
uuid = json_data['id']
return uuid
<commit_msg>Improve robustness: surround the 'id' fetch from result array with a try clause.<commit_after>
|
""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
|
""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
uuid = json_data['id']
return uuid
Improve robustness: surround the 'id' fetch from result array with a try clause.""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
|
<commit_before>""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
uuid = json_data['id']
return uuid
<commit_msg>Improve robustness: surround the 'id' fetch from result array with a try clause.<commit_after>""" Username to UUID
Converts a Minecraft username to it's UUID equivalent.
Uses the official Mojang API to fetch player data.
"""
import http.client
import json
class UsernameToUUID:
def __init__(self, username):
self.username = username
def get_uuid(self, timestamp=None):
"""
Get the UUID of the player.
Parameters
----------
timestamp : long integer
The time at which the player used this name, expressed as a Unix timestamp.
"""
get_args = "" if timestamp is None else "?at=" + str(timestamp)
http_conn = http.client.HTTPSConnection("api.mojang.com");
http_conn.request("GET", "/users/profiles/minecraft/" + self.username + get_args,
headers={'User-Agent':'Minecraft Username -> UUID', 'Content-Type':'application/json'});
response = http_conn.getresponse().read().decode("utf-8")
if (not response and timestamp is None): # No response & no timestamp
return self.get_uuid(0) # Let's retry with the Unix timestamp 0.
if (not response): # No response (player probably doesn't exist)
return ""
json_data = json.loads(response)
try:
uuid = json_data['id']
except KeyError as e:
print("KeyError raised:", e);
return uuid
|
fcb5c5756299a2804c08c8682430d4a0545ae3d9
|
linkatos/message.py
|
linkatos/message.py
|
import re
link_re = re.compile("(\s|^)<(https?://[\w./?+]+)>(\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(2).strip()
return answer
|
import re
link_re = re.compile("(?:\s|^)<(https?://[\w./?+]+)>(?:\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(1).strip()
return answer
|
Change regex to not capture the useless groups
|
feature: Change regex to not capture the useless groups
|
Python
|
mit
|
iwi/linkatos,iwi/linkatos
|
import re
link_re = re.compile("(\s|^)<(https?://[\w./?+]+)>(\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(2).strip()
return answer
feature: Change regex to not capture the useless groups
|
import re
link_re = re.compile("(?:\s|^)<(https?://[\w./?+]+)>(?:\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(1).strip()
return answer
|
<commit_before>import re
link_re = re.compile("(\s|^)<(https?://[\w./?+]+)>(\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(2).strip()
return answer
<commit_msg>feature: Change regex to not capture the useless groups<commit_after>
|
import re
link_re = re.compile("(?:\s|^)<(https?://[\w./?+]+)>(?:\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(1).strip()
return answer
|
import re
link_re = re.compile("(\s|^)<(https?://[\w./?+]+)>(\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(2).strip()
return answer
feature: Change regex to not capture the useless groupsimport re
link_re = re.compile("(?:\s|^)<(https?://[\w./?+]+)>(?:\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(1).strip()
return answer
|
<commit_before>import re
link_re = re.compile("(\s|^)<(https?://[\w./?+]+)>(\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(2).strip()
return answer
<commit_msg>feature: Change regex to not capture the useless groups<commit_after>import re
link_re = re.compile("(?:\s|^)<(https?://[\w./?+]+)>(?:\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(1).strip()
return answer
|
88bd75c4b0e039c208a1471d84006cdfb4bbaf93
|
starbowmodweb/site/templatetags/bbformat.py
|
starbowmodweb/site/templatetags/bbformat.py
|
"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
|
"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
def bbcode_email(tag_name, value, options, parent, context):
return '<a href="mailto:{}">{}</a>'.format(value, value)
def bbcode_font(tag_name, value, options, parent, context):
return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value)
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
bbcode_parser.add_formatter("email", bbcode_email)
bbcode_parser.add_formatter("font", bbcode_font)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
|
Add support for email and font bbcode tags.
|
Add support for email and font bbcode tags.
|
Python
|
mit
|
Starbow/StarbowWebSite,Starbow/StarbowWebSite,Starbow/StarbowWebSite
|
"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
Add support for email and font bbcode tags.
|
"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
def bbcode_email(tag_name, value, options, parent, context):
return '<a href="mailto:{}">{}</a>'.format(value, value)
def bbcode_font(tag_name, value, options, parent, context):
return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value)
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
bbcode_parser.add_formatter("email", bbcode_email)
bbcode_parser.add_formatter("font", bbcode_font)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
|
<commit_before>"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
<commit_msg>Add support for email and font bbcode tags.<commit_after>
|
"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
def bbcode_email(tag_name, value, options, parent, context):
return '<a href="mailto:{}">{}</a>'.format(value, value)
def bbcode_font(tag_name, value, options, parent, context):
return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value)
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
bbcode_parser.add_formatter("email", bbcode_email)
bbcode_parser.add_formatter("font", bbcode_font)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
|
"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
Add support for email and font bbcode tags."""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
def bbcode_email(tag_name, value, options, parent, context):
return '<a href="mailto:{}">{}</a>'.format(value, value)
def bbcode_font(tag_name, value, options, parent, context):
return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value)
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
bbcode_parser.add_formatter("email", bbcode_email)
bbcode_parser.add_formatter("font", bbcode_font)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
|
<commit_before>"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
<commit_msg>Add support for email and font bbcode tags.<commit_after>"""
This module defines all of our bbcode capabilities.
To add a new bbcode tag do the following:
def bbcode_<tag_name>(tag_name, value, options, parent, context):
return formatted_html
bbcode_parser.add_formatter("<tag_name>", func_name, **tag_options)
For more information on the different argumnents and options available see the bbcode docs:
http://bbcode.readthedocs.org/en/latest/
"""
from django import template
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
def bbcode_email(tag_name, value, options, parent, context):
return '<a href="mailto:{}">{}</a>'.format(value, value)
def bbcode_font(tag_name, value, options, parent, context):
return '<span style="font-family: {}">{}</span>'.format(options[tag_name], value)
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
bbcode_parser.add_formatter("email", bbcode_email)
bbcode_parser.add_formatter("font", bbcode_font)
def bbformat(value):
return bbcode_parser.format(value)
register = template.Library()
register.filter('bbformat', bbformat)
|
689f700fcfcdfcdc7d027f204a9654b101ac9ecb
|
docs/conf.py
|
docs/conf.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r"(Issue )?#(?P<issue>\d+)",
url='{package_url}/issues/{issue}',
),
dict(
pattern=r"^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n",
with_scm="{text}\n{rev[timestamp]:%d %b %Y}\n",
),
dict(
pattern=r"PEP[- ](?P<pep_number>\d+)",
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r'(Issue )?#(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
|
Use single-quotes to satisfy the style nazis.
|
Use single-quotes to satisfy the style nazis.
|
Python
|
mit
|
yougov/mettle,cherrypy/magicbus,python/importlib_metadata,jaraco/keyring,jaraco/jaraco.context,pwdyson/inflect.py,yougov/mettle,yougov/librarypaste,jaraco/jaraco.text,jaraco/jaraco.collections,jaraco/jaraco.functools,jaraco/hgtools,yougov/pmxbot,jaraco/zipp,hugovk/inflect.py,jaraco/jaraco.path,yougov/mettle,jaraco/jaraco.itertools,pytest-dev/pytest-runner,jazzband/inflect,jaraco/calendra,yougov/mettle,cherrypy/cheroot,jaraco/portend,jaraco/irc,jaraco/rwt,yougov/librarypaste,jaraco/jaraco.stream,jaraco/backports.functools_lru_cache,yougov/pmxbot,jaraco/jaraco.classes,yougov/pmxbot,jaraco/jaraco.logging,jaraco/tempora
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r"(Issue )?#(?P<issue>\d+)",
url='{package_url}/issues/{issue}',
),
dict(
pattern=r"^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n",
with_scm="{text}\n{rev[timestamp]:%d %b %Y}\n",
),
dict(
pattern=r"PEP[- ](?P<pep_number>\d+)",
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
Use single-quotes to satisfy the style nazis.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r'(Issue )?#(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r"(Issue )?#(?P<issue>\d+)",
url='{package_url}/issues/{issue}',
),
dict(
pattern=r"^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n",
with_scm="{text}\n{rev[timestamp]:%d %b %Y}\n",
),
dict(
pattern=r"PEP[- ](?P<pep_number>\d+)",
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
<commit_msg>Use single-quotes to satisfy the style nazis.<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r'(Issue )?#(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r"(Issue )?#(?P<issue>\d+)",
url='{package_url}/issues/{issue}',
),
dict(
pattern=r"^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n",
with_scm="{text}\n{rev[timestamp]:%d %b %Y}\n",
),
dict(
pattern=r"PEP[- ](?P<pep_number>\d+)",
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
Use single-quotes to satisfy the style nazis.#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r'(Issue )?#(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r"(Issue )?#(?P<issue>\d+)",
url='{package_url}/issues/{issue}',
),
dict(
pattern=r"^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n",
with_scm="{text}\n{rev[timestamp]:%d %b %Y}\n",
),
dict(
pattern=r"PEP[- ](?P<pep_number>\d+)",
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
<commit_msg>Use single-quotes to satisfy the style nazis.<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'jaraco.packaging.sphinx',
'rst.linker',
]
master_doc = 'index'
link_files = {
'../CHANGES.rst': dict(
using=dict(
GH='https://github.com',
),
replace=[
dict(
pattern=r'(Issue )?#(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
),
}
|
4594ed6599d98f1773a6e393c617c3230a1d8bec
|
django_evolution/__init__.py
|
django_evolution/__init__.py
|
"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
|
"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
tag = VERSION[3]
if tag != 'final':
if tag == 'alpha':
tag = 'a'
elif tag == 'beta':
tag = 'b'
version += '%s%s' % (tag, VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
|
Remove a deprecation warning when computing the package version.
|
Remove a deprecation warning when computing the package version.
In pretty much all of our Python packages, we generate a package version
using legacy identifiers of "alpha" and "beta". These get turned into
"a" and "b" by `pkg_resources`, and a warning is thrown to inform us
that we're doing it wrong.
To reduce those warnings, this change converts the legacy naming to
modern naming when generating the package version.
Testing Done:
Built packages. Saw the correct identifier in the package file, without
seeing any warnings.
Reviewed at https://reviews.reviewboard.org/r/11264/
|
Python
|
bsd-3-clause
|
beanbaginc/django-evolution
|
"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
Remove a deprecation warning when computing the package version.
In pretty much all of our Python packages, we generate a package version
using legacy identifiers of "alpha" and "beta". These get turned into
"a" and "b" by `pkg_resources`, and a warning is thrown to inform us
that we're doing it wrong.
To reduce those warnings, this change converts the legacy naming to
modern naming when generating the package version.
Testing Done:
Built packages. Saw the correct identifier in the package file, without
seeing any warnings.
Reviewed at https://reviews.reviewboard.org/r/11264/
|
"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
tag = VERSION[3]
if tag != 'final':
if tag == 'alpha':
tag = 'a'
elif tag == 'beta':
tag = 'b'
version += '%s%s' % (tag, VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
|
<commit_before>"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
<commit_msg>Remove a deprecation warning when computing the package version.
In pretty much all of our Python packages, we generate a package version
using legacy identifiers of "alpha" and "beta". These get turned into
"a" and "b" by `pkg_resources`, and a warning is thrown to inform us
that we're doing it wrong.
To reduce those warnings, this change converts the legacy naming to
modern naming when generating the package version.
Testing Done:
Built packages. Saw the correct identifier in the package file, without
seeing any warnings.
Reviewed at https://reviews.reviewboard.org/r/11264/<commit_after>
|
"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
tag = VERSION[3]
if tag != 'final':
if tag == 'alpha':
tag = 'a'
elif tag == 'beta':
tag = 'b'
version += '%s%s' % (tag, VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
|
"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
Remove a deprecation warning when computing the package version.
In pretty much all of our Python packages, we generate a package version
using legacy identifiers of "alpha" and "beta". These get turned into
"a" and "b" by `pkg_resources`, and a warning is thrown to inform us
that we're doing it wrong.
To reduce those warnings, this change converts the legacy naming to
modern naming when generating the package version.
Testing Done:
Built packages. Saw the correct identifier in the package file, without
seeing any warnings.
Reviewed at https://reviews.reviewboard.org/r/11264/"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
tag = VERSION[3]
if tag != 'final':
if tag == 'alpha':
tag = 'a'
elif tag == 'beta':
tag = 'b'
version += '%s%s' % (tag, VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
|
<commit_before>"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
<commit_msg>Remove a deprecation warning when computing the package version.
In pretty much all of our Python packages, we generate a package version
using legacy identifiers of "alpha" and "beta". These get turned into
"a" and "b" by `pkg_resources`, and a warning is thrown to inform us
that we're doing it wrong.
To reduce those warnings, this change converts the legacy naming to
modern naming when generating the package version.
Testing Done:
Built packages. Saw the correct identifier in the package file, without
seeing any warnings.
Reviewed at https://reviews.reviewboard.org/r/11264/<commit_after>"""Django Evolution version and package information.
These variables and functions can be used to identify the version of
Review Board. They're largely used for packaging purposes.
"""
from __future__ import unicode_literals
# The version of Django Evolution
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (2, 1, 0, 'alpha', 0, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
tag = VERSION[3]
if tag != 'final':
if tag == 'alpha':
tag = 'a'
elif tag == 'beta':
tag = 'b'
version += '%s%s' % (tag, VERSION[4])
return version
def is_release():
return VERSION[5]
__version_info__ = VERSION[:-1]
__version__ = get_package_version()
|
14c872b3405326079ba01f9309622bb0188bf8ce
|
Install/toolbox/scripts/utils.py
|
Install/toolbox/scripts/utils.py
|
import sys
import collections
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
|
# -*- coding: utf-8 -*-
import csv
import collections
import sys
import re
import os
import binascii
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
|
Update includes; set encoding for file.
|
Update includes; set encoding for file.
|
Python
|
mpl-2.0
|
genegis/genegis,genegis/genegis,genegis/genegis
|
import sys
import collections
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
Update includes; set encoding for file.
|
# -*- coding: utf-8 -*-
import csv
import collections
import sys
import re
import os
import binascii
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
|
<commit_before>import sys
import collections
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
<commit_msg>Update includes; set encoding for file.<commit_after>
|
# -*- coding: utf-8 -*-
import csv
import collections
import sys
import re
import os
import binascii
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
|
import sys
import collections
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
Update includes; set encoding for file.# -*- coding: utf-8 -*-
import csv
import collections
import sys
import re
import os
import binascii
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
|
<commit_before>import sys
import collections
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
<commit_msg>Update includes; set encoding for file.<commit_after># -*- coding: utf-8 -*-
import csv
import collections
import sys
import re
import os
import binascii
def parameters_from_args(defaults_tuple=None, sys_args):
"""Provided a set of tuples for default values, return a list of mapped
variables."""
defaults = collections.OrderedDict(defaults_tuple)
if defaults_tuple is not None:
args = len(sys_args) - 1
for i, key in enumerate(defaults.keys()):
idx = i + 1
if idx <= args:
defaults[key] = sys_args[idx]
return defaults
def msg(output_msg, mtype='message', exception=None):
if mtype == 'error':
arcpy_messages = arcpy.GetMessages()
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
if config.mode == 'script':
if exception:
# print the raw exception
print exception
# Arcpy and Python stuff, hopefully also helpful
err_msg = "ArcPy Error: {msg_text}\nPython Error: ${tbinfo}".format(
msg_text=arcpy_messages, tbinfo=tbinfo)
else:
arcpy.AddMessage(output_msg)
if exception:
arcpy.AddError(exception)
arcpy.AddError(arcpy_messages)
arcpy.AddMessage("Python Error: ${tbinfo}".format(tbinfo=tbinfo))
elif config.mode == 'script':
print output_msg
else:
if mtype == 'message':
arcpy.AddMessage(output_msg)
elif mtype == 'warning':
arcpy.AddWarning(output_msg)
|
d4c9603e4c5913b02746af3dec21f682d906e001
|
nn/file/__init__.py
|
nn/file/__init__.py
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), "batches_in_queue")
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors, metric_name="batches_in_queue"):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), metric_name)
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
Make metric_name option of monitored_batch_queue
|
Make metric_name option of monitored_batch_queue
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), "batches_in_queue")
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
Make metric_name option of monitored_batch_queue
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors, metric_name="batches_in_queue"):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), metric_name)
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
<commit_before>import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), "batches_in_queue")
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
<commit_msg>Make metric_name option of monitored_batch_queue<commit_after>
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors, metric_name="batches_in_queue"):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), metric_name)
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), "batches_in_queue")
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
Make metric_name option of monitored_batch_queueimport functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors, metric_name="batches_in_queue"):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), metric_name)
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
<commit_before>import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), "batches_in_queue")
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
<commit_msg>Make metric_name option of monitored_batch_queue<commit_after>import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope, dtypes
from .util import batch_queue, add_queue_runner
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope()
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope()
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope()
def monitored_batch_queue(*tensors, metric_name="batches_in_queue"):
queue = batch_queue(dtypes(*tensors))
collections.add_metric(queue.size(), metric_name)
add_queue_runner(queue, [queue.enqueue(tensors)])
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
4d64841bcf9a4eb4862643f9038d865c0b9dacf5
|
goodtablesio/helpers/retrieve.py
|
goodtablesio/helpers/retrieve.py
|
from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
|
from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
# TODO: this should not be needed after #33
if 'report' not in result:
result['report'] = None
if 'finished' not in result:
result['finished'] = None
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
|
Make sure report is part of the job dict
|
Make sure report is part of the job dict
To get around failures like this for now:
https://travis-ci.org/frictionlessdata/goodtables.io/builds/179121528
This should definitely be investigated and fixed as part of #33
|
Python
|
agpl-3.0
|
frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io
|
from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
Make sure report is part of the job dict
To get around failures like this for now:
https://travis-ci.org/frictionlessdata/goodtables.io/builds/179121528
This should definitely be investigated and fixed as part of #33
|
from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
# TODO: this should not be needed after #33
if 'report' not in result:
result['report'] = None
if 'finished' not in result:
result['finished'] = None
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
|
<commit_before>from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
<commit_msg>Make sure report is part of the job dict
To get around failures like this for now:
https://travis-ci.org/frictionlessdata/goodtables.io/builds/179121528
This should definitely be investigated and fixed as part of #33<commit_after>
|
from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
# TODO: this should not be needed after #33
if 'report' not in result:
result['report'] = None
if 'finished' not in result:
result['finished'] = None
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
|
from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
Make sure report is part of the job dict
To get around failures like this for now:
https://travis-ci.org/frictionlessdata/goodtables.io/builds/179121528
This should definitely be investigated and fixed as part of #33from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
# TODO: this should not be needed after #33
if 'report' not in result:
result['report'] = None
if 'finished' not in result:
result['finished'] = None
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
|
<commit_before>from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
<commit_msg>Make sure report is part of the job dict
To get around failures like this for now:
https://travis-ci.org/frictionlessdata/goodtables.io/builds/179121528
This should definitely be investigated and fixed as part of #33<commit_after>from goodtablesio import services
# Module API
def get_job(job_id):
"""Get job by identifier.
Args:
job_id (str): job identifier
Returns:
dict: job result if job was found, None otherwise
"""
result = services.database['jobs'].find_one(job_id=job_id)
if not result:
return None
# TODO: we need to store the status in the DB as we can no longer rely on
# the job id being the same one used by a celery task
status = 'Not Implemented'
# TODO: this should not be needed after #33
if 'report' not in result:
result['report'] = None
if 'finished' not in result:
result['finished'] = None
return {'status': status, 'result': result}
def get_job_ids():
"""Get all job identifiers.
Returns:
str[]: list of job identifiers
"""
return [r['job_id']
for r in
services.database['jobs'].find(order_by=['-created'])]
|
e8ad5aafb63c2aea5b855b000cafcc1ba9248af6
|
malcolm/modules/scanning/parts/simultaneousaxespart.py
|
malcolm/modules/scanning/parts/simultaneousaxespart.py
|
from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TABLE.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
|
from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TEXTINPUT.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
|
Change Widget tag on SimultaneousAxes to TEXTINPUT
|
Change Widget tag on SimultaneousAxes to TEXTINPUT
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TABLE.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
Change Widget tag on SimultaneousAxes to TEXTINPUT
|
from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TEXTINPUT.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
|
<commit_before>from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TABLE.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
<commit_msg>Change Widget tag on SimultaneousAxes to TEXTINPUT<commit_after>
|
from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TEXTINPUT.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
|
from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TABLE.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
Change Widget tag on SimultaneousAxes to TEXTINPUTfrom annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TEXTINPUT.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
|
<commit_before>from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TABLE.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
<commit_msg>Change Widget tag on SimultaneousAxes to TEXTINPUT<commit_after>from annotypes import Anno, Array, Union, Sequence, add_call_types
from malcolm.core import Part, StringArrayMeta, Widget, config_tag, \
PartRegistrar, APartName
from ..hooks import ValidateHook, AAxesToMove
with Anno("Initial value for set of axes that can be moved at the same time"):
ASimultaneousAxes = Array[str]
USimultaneousAxes = Union[ASimultaneousAxes, Sequence[str], str]
class SimultaneousAxesPart(Part):
def __init__(self, name="simultaneousAxes", value=None):
# type: (APartName, USimultaneousAxes) -> None
super(SimultaneousAxesPart, self).__init__(name)
self.attr = StringArrayMeta(
"Set of axes that can be specified in axesToMove at configure",
tags=[Widget.TEXTINPUT.tag(), config_tag()]
).create_attribute_model(value)
# Hooks
self.register_hooked(ValidateHook, self.validate)
# This will be serialized, so maintain camelCase for axesToMove
# noinspection PyPep8Naming
@add_call_types
def validate(self, axesToMove):
# type: (AAxesToMove) -> None
assert not set(axesToMove) - set(self.attr.value), \
"Can only move %s simultaneously, requested %s" % (
list(self.attr.value), axesToMove)
def setup(self, registrar):
# type: (PartRegistrar) -> None
registrar.add_attribute_model(
"simultaneousAxes", self.attr, self.attr.set_value)
|
02f8f992aca37d21b9ae119f13b46de8eb1541ae
|
gsl/utils.py
|
gsl/utils.py
|
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
|
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
retcode = 1
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
|
Return retcode properly iff erroring
|
Return retcode properly iff erroring
fixes #13
|
Python
|
mit
|
erasche/community-package-cache,erasche/community-package-cache,gregvonkuster/cargo-port,erasche/community-package-cache,galaxyproject/cargo-port,galaxyproject/cargo-port,gregvonkuster/cargo-port,gregvonkuster/cargo-port
|
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
Return retcode properly iff erroring
fixes #13
|
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
retcode = 1
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
|
<commit_before>#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
<commit_msg>Return retcode properly iff erroring
fixes #13<commit_after>
|
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
retcode = 1
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
|
#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
Return retcode properly iff erroring
fixes #13#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
retcode = 1
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
|
<commit_before>#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
<commit_msg>Return retcode properly iff erroring
fixes #13<commit_after>#!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def yield_packages(handle, meta=False, retcode=None):
for lineno, line in enumerate(handle):
if line.startswith('#'):
continue
try:
data = line.split('\t')
keys = ['id', 'version', 'platform', 'arch', 'url', 'sha', 'size',
'alt_url', 'comment']
if len(data) != len(keys):
log.error('[%s] data has wrong number of columns. %s != %s', lineno + 1, len(data), len(keys))
retcode = 1
ld = {k: v for (k, v) in zip(keys, line.split('\t'))}
if meta:
yield ld, lineno, line, retcode
else:
yield ld
except Exception, e:
log.error(str(e))
|
508167ee3c289258857aee0963d4917c39201d9a
|
tailor/listeners/mainlistener.py
|
tailor/listeners/mainlistener.py
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
className = ctx.getText()
if not isUpperCamelCase(className):
print('Line', str(ctx.start.line) + ':', 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def exitStructName(self, ctx):
pass
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
Add method to handle UpperCamelCase verification
|
Add method to handle UpperCamelCase verification
|
Python
|
mit
|
sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
className = ctx.getText()
if not isUpperCamelCase(className):
print('Line', str(ctx.start.line) + ':', 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def exitStructName(self, ctx):
pass
Add method to handle UpperCamelCase verification
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
<commit_before>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
className = ctx.getText()
if not isUpperCamelCase(className):
print('Line', str(ctx.start.line) + ':', 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def exitStructName(self, ctx):
pass
<commit_msg>Add method to handle UpperCamelCase verification<commit_after>
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
className = ctx.getText()
if not isUpperCamelCase(className):
print('Line', str(ctx.start.line) + ':', 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def exitStructName(self, ctx):
pass
Add method to handle UpperCamelCase verificationfrom tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
<commit_before>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
className = ctx.getText()
if not isUpperCamelCase(className):
print('Line', str(ctx.start.line) + ':', 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def exitStructName(self, ctx):
pass
<commit_msg>Add method to handle UpperCamelCase verification<commit_after>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
a9abf8361f8728dfb1ef18a27c5eaad84ca2f054
|
accounting/apps/clients/forms.py
|
accounting/apps/clients/forms.py
|
from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
)
|
from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
"organization",
)
|
Add the relationship field to the Client form
|
Add the relationship field to the Client form
|
Python
|
mit
|
kenjhim/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting
|
from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
)
Add the relationship field to the Client form
|
from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
"organization",
)
|
<commit_before>from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
)
<commit_msg>Add the relationship field to the Client form<commit_after>
|
from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
"organization",
)
|
from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
)
Add the relationship field to the Client formfrom django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
"organization",
)
|
<commit_before>from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
)
<commit_msg>Add the relationship field to the Client form<commit_after>from django.forms import ModelForm
from .models import Client
class ClientForm(ModelForm):
class Meta:
model = Client
fields = (
"name",
"address_line_1",
"address_line_2",
"city",
"postal_code",
"country",
"organization",
)
|
b95fbc22615e94cea4bd16b17c887214aba44175
|
dthm4kaiako/config/__init__.py
|
dthm4kaiako/config/__init__.py
|
"""Configuration for Django system."""
__version__ = "0.14.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.15.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
Increment version number to 0.15.0
|
Increment version number to 0.15.0
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Configuration for Django system."""
__version__ = "0.14.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.15.0
|
"""Configuration for Django system."""
__version__ = "0.15.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.14.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.15.0<commit_after>
|
"""Configuration for Django system."""
__version__ = "0.15.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.14.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.15.0"""Configuration for Django system."""
__version__ = "0.15.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.14.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.15.0<commit_after>"""Configuration for Django system."""
__version__ = "0.15.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
a485998447ffbe5a19ce8f9b49e61ac313c8241a
|
glitter_events/search_indexes.py
|
glitter_events/search_indexes.py
|
# -*- coding: utf-8 -*-
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
return self.get_model().objects.published().select_related()
|
# -*- coding: utf-8 -*-
import datetime
from django.conf import settings
from django.utils import timezone
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
if getattr(settings, 'GLITTER_EVENTS_SEARCH_INDEX_EXPIRED', None):
today = datetime.datetime.combine(date=datetime.date.today(), time=datetime.time.min)
today = timezone.make_aware(today)
qs = self.get_model().objects.filter(start__gte=today).select_related()
else:
qs = self.get_model().objects.published().select_related()
return qs
|
Add option to ignore expired events for the search index
|
Add option to ignore expired events for the search index
|
Python
|
bsd-3-clause
|
blancltd/django-glitter-events,blancltd/django-glitter-events
|
# -*- coding: utf-8 -*-
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
return self.get_model().objects.published().select_related()
Add option to ignore expired events for the search index
|
# -*- coding: utf-8 -*-
import datetime
from django.conf import settings
from django.utils import timezone
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
if getattr(settings, 'GLITTER_EVENTS_SEARCH_INDEX_EXPIRED', None):
today = datetime.datetime.combine(date=datetime.date.today(), time=datetime.time.min)
today = timezone.make_aware(today)
qs = self.get_model().objects.filter(start__gte=today).select_related()
else:
qs = self.get_model().objects.published().select_related()
return qs
|
<commit_before># -*- coding: utf-8 -*-
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
return self.get_model().objects.published().select_related()
<commit_msg>Add option to ignore expired events for the search index<commit_after>
|
# -*- coding: utf-8 -*-
import datetime
from django.conf import settings
from django.utils import timezone
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
if getattr(settings, 'GLITTER_EVENTS_SEARCH_INDEX_EXPIRED', None):
today = datetime.datetime.combine(date=datetime.date.today(), time=datetime.time.min)
today = timezone.make_aware(today)
qs = self.get_model().objects.filter(start__gte=today).select_related()
else:
qs = self.get_model().objects.published().select_related()
return qs
|
# -*- coding: utf-8 -*-
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
return self.get_model().objects.published().select_related()
Add option to ignore expired events for the search index# -*- coding: utf-8 -*-
import datetime
from django.conf import settings
from django.utils import timezone
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
if getattr(settings, 'GLITTER_EVENTS_SEARCH_INDEX_EXPIRED', None):
today = datetime.datetime.combine(date=datetime.date.today(), time=datetime.time.min)
today = timezone.make_aware(today)
qs = self.get_model().objects.filter(start__gte=today).select_related()
else:
qs = self.get_model().objects.published().select_related()
return qs
|
<commit_before># -*- coding: utf-8 -*-
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
return self.get_model().objects.published().select_related()
<commit_msg>Add option to ignore expired events for the search index<commit_after># -*- coding: utf-8 -*-
import datetime
from django.conf import settings
from django.utils import timezone
from haystack import indexes
from .models import Event
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Event
def index_queryset(self, using=None):
if getattr(settings, 'GLITTER_EVENTS_SEARCH_INDEX_EXPIRED', None):
today = datetime.datetime.combine(date=datetime.date.today(), time=datetime.time.min)
today = timezone.make_aware(today)
qs = self.get_model().objects.filter(start__gte=today).select_related()
else:
qs = self.get_model().objects.published().select_related()
return qs
|
cccd6e8fe76fc96b39791912ecfd07f867d8dacc
|
cms/djangoapps/export_course_metadata/management/commands/export_course_metadata_for_all_courses.py
|
cms/djangoapps/export_course_metadata/management/commands/export_course_metadata_for_all_courses.py
|
"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata(None, course.id)
|
"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
|
Call celery task directly from management command instead of calling the signal
|
Call celery task directly from management command instead of calling the signal
AA-461
|
Python
|
agpl-3.0
|
eduNEXT/edunext-platform,EDUlib/edx-platform,arbrandes/edx-platform,edx/edx-platform,angelapper/edx-platform,edx/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform,angelapper/edx-platform,EDUlib/edx-platform,eduNEXT/edx-platform,angelapper/edx-platform,edx/edx-platform,angelapper/edx-platform,edx/edx-platform,eduNEXT/edunext-platform,arbrandes/edx-platform,EDUlib/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform
|
"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata(None, course.id)
Call celery task directly from management command instead of calling the signal
AA-461
|
"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
|
<commit_before>"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata(None, course.id)
<commit_msg>Call celery task directly from management command instead of calling the signal
AA-461<commit_after>
|
"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
|
"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata(None, course.id)
Call celery task directly from management command instead of calling the signal
AA-461"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
|
<commit_before>"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata(None, course.id)
<commit_msg>Call celery task directly from management command instead of calling the signal
AA-461<commit_after>"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
|
e04cec6c4260a181c773371406323758d9f162bf
|
examples/adaptive_scan_demo.py
|
examples/adaptive_scan_demo.py
|
import matplotlib.pyplot as plt
from bluesky import RunEngine, Mover, SynGauss
from bluesky.examples import adaptive_scan
RE = RunEngine()
RE.verbose = False
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
def live_scalar_plotter(ax, y, x):
x_data, y_data = [], []
line, = ax.plot([], [], 'ro', markersize=10)
def update_plot(doc):
# Update with the latest data.
x_data.append(doc['data'][x])
y_data.append(doc['data'][y])
line.set_data(x_data, y_data)
# Rescale and redraw.
ax.relim(visible_only=True)
ax.autoscale_view(tight=True)
ax.figure.canvas.draw()
ax.figure.canvas.flush_events()
return update_plot
fig, ax = plt.subplots()
plt.show()
ax.set_xlim([-15, 5])
ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
my_plotter = live_scalar_plotter(ax, 'det', 'pos')
ad_scan = adaptive_scan(motor, det, 'pos', 'det', -15, 5, .01, 1, .05)
RE.run(ad_scan, subscriptions={'event': my_plotter})
|
import matplotlib.pyplot as plt
from bluesky import RunEngine
from bluesky.scans import AdaptiveAscan
from bluesky.examples import Mover, SynGauss
from bluesky.callbacks import LivePlot, LiveTable
from bluesky.tests.utils import setup_test_run_engine
#plt.ion()
RE = setup_test_run_engine()
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
#fig, ax = plt.subplots()
#ax.set_xlim([-15, 5])
#ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
#my_plotter = LivePlot('det', 'pos')
table = LiveTable(['det', 'pos'])
ad_scan = AdaptiveAscan(motor, [det], 'det', -15, 5, .01, 1, .05, True)
RE(ad_scan, subscriptions={'all': [table]}) #, my_plotter})
|
Fix adaptive example with LivePlot
|
WIP: Fix adaptive example with LivePlot
|
Python
|
bsd-3-clause
|
dchabot/bluesky,sameera2004/bluesky,ericdill/bluesky,klauer/bluesky,ericdill/bluesky,sameera2004/bluesky,dchabot/bluesky,klauer/bluesky
|
import matplotlib.pyplot as plt
from bluesky import RunEngine, Mover, SynGauss
from bluesky.examples import adaptive_scan
RE = RunEngine()
RE.verbose = False
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
def live_scalar_plotter(ax, y, x):
x_data, y_data = [], []
line, = ax.plot([], [], 'ro', markersize=10)
def update_plot(doc):
# Update with the latest data.
x_data.append(doc['data'][x])
y_data.append(doc['data'][y])
line.set_data(x_data, y_data)
# Rescale and redraw.
ax.relim(visible_only=True)
ax.autoscale_view(tight=True)
ax.figure.canvas.draw()
ax.figure.canvas.flush_events()
return update_plot
fig, ax = plt.subplots()
plt.show()
ax.set_xlim([-15, 5])
ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
my_plotter = live_scalar_plotter(ax, 'det', 'pos')
ad_scan = adaptive_scan(motor, det, 'pos', 'det', -15, 5, .01, 1, .05)
RE.run(ad_scan, subscriptions={'event': my_plotter})
WIP: Fix adaptive example with LivePlot
|
import matplotlib.pyplot as plt
from bluesky import RunEngine
from bluesky.scans import AdaptiveAscan
from bluesky.examples import Mover, SynGauss
from bluesky.callbacks import LivePlot, LiveTable
from bluesky.tests.utils import setup_test_run_engine
#plt.ion()
RE = setup_test_run_engine()
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
#fig, ax = plt.subplots()
#ax.set_xlim([-15, 5])
#ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
#my_plotter = LivePlot('det', 'pos')
table = LiveTable(['det', 'pos'])
ad_scan = AdaptiveAscan(motor, [det], 'det', -15, 5, .01, 1, .05, True)
RE(ad_scan, subscriptions={'all': [table]}) #, my_plotter})
|
<commit_before>import matplotlib.pyplot as plt
from bluesky import RunEngine, Mover, SynGauss
from bluesky.examples import adaptive_scan
RE = RunEngine()
RE.verbose = False
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
def live_scalar_plotter(ax, y, x):
x_data, y_data = [], []
line, = ax.plot([], [], 'ro', markersize=10)
def update_plot(doc):
# Update with the latest data.
x_data.append(doc['data'][x])
y_data.append(doc['data'][y])
line.set_data(x_data, y_data)
# Rescale and redraw.
ax.relim(visible_only=True)
ax.autoscale_view(tight=True)
ax.figure.canvas.draw()
ax.figure.canvas.flush_events()
return update_plot
fig, ax = plt.subplots()
plt.show()
ax.set_xlim([-15, 5])
ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
my_plotter = live_scalar_plotter(ax, 'det', 'pos')
ad_scan = adaptive_scan(motor, det, 'pos', 'det', -15, 5, .01, 1, .05)
RE.run(ad_scan, subscriptions={'event': my_plotter})
<commit_msg>WIP: Fix adaptive example with LivePlot<commit_after>
|
import matplotlib.pyplot as plt
from bluesky import RunEngine
from bluesky.scans import AdaptiveAscan
from bluesky.examples import Mover, SynGauss
from bluesky.callbacks import LivePlot, LiveTable
from bluesky.tests.utils import setup_test_run_engine
#plt.ion()
RE = setup_test_run_engine()
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
#fig, ax = plt.subplots()
#ax.set_xlim([-15, 5])
#ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
#my_plotter = LivePlot('det', 'pos')
table = LiveTable(['det', 'pos'])
ad_scan = AdaptiveAscan(motor, [det], 'det', -15, 5, .01, 1, .05, True)
RE(ad_scan, subscriptions={'all': [table]}) #, my_plotter})
|
import matplotlib.pyplot as plt
from bluesky import RunEngine, Mover, SynGauss
from bluesky.examples import adaptive_scan
RE = RunEngine()
RE.verbose = False
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
def live_scalar_plotter(ax, y, x):
x_data, y_data = [], []
line, = ax.plot([], [], 'ro', markersize=10)
def update_plot(doc):
# Update with the latest data.
x_data.append(doc['data'][x])
y_data.append(doc['data'][y])
line.set_data(x_data, y_data)
# Rescale and redraw.
ax.relim(visible_only=True)
ax.autoscale_view(tight=True)
ax.figure.canvas.draw()
ax.figure.canvas.flush_events()
return update_plot
fig, ax = plt.subplots()
plt.show()
ax.set_xlim([-15, 5])
ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
my_plotter = live_scalar_plotter(ax, 'det', 'pos')
ad_scan = adaptive_scan(motor, det, 'pos', 'det', -15, 5, .01, 1, .05)
RE.run(ad_scan, subscriptions={'event': my_plotter})
WIP: Fix adaptive example with LivePlotimport matplotlib.pyplot as plt
from bluesky import RunEngine
from bluesky.scans import AdaptiveAscan
from bluesky.examples import Mover, SynGauss
from bluesky.callbacks import LivePlot, LiveTable
from bluesky.tests.utils import setup_test_run_engine
#plt.ion()
RE = setup_test_run_engine()
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
#fig, ax = plt.subplots()
#ax.set_xlim([-15, 5])
#ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
#my_plotter = LivePlot('det', 'pos')
table = LiveTable(['det', 'pos'])
ad_scan = AdaptiveAscan(motor, [det], 'det', -15, 5, .01, 1, .05, True)
RE(ad_scan, subscriptions={'all': [table]}) #, my_plotter})
|
<commit_before>import matplotlib.pyplot as plt
from bluesky import RunEngine, Mover, SynGauss
from bluesky.examples import adaptive_scan
RE = RunEngine()
RE.verbose = False
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
def live_scalar_plotter(ax, y, x):
x_data, y_data = [], []
line, = ax.plot([], [], 'ro', markersize=10)
def update_plot(doc):
# Update with the latest data.
x_data.append(doc['data'][x])
y_data.append(doc['data'][y])
line.set_data(x_data, y_data)
# Rescale and redraw.
ax.relim(visible_only=True)
ax.autoscale_view(tight=True)
ax.figure.canvas.draw()
ax.figure.canvas.flush_events()
return update_plot
fig, ax = plt.subplots()
plt.show()
ax.set_xlim([-15, 5])
ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
my_plotter = live_scalar_plotter(ax, 'det', 'pos')
ad_scan = adaptive_scan(motor, det, 'pos', 'det', -15, 5, .01, 1, .05)
RE.run(ad_scan, subscriptions={'event': my_plotter})
<commit_msg>WIP: Fix adaptive example with LivePlot<commit_after>import matplotlib.pyplot as plt
from bluesky import RunEngine
from bluesky.scans import AdaptiveAscan
from bluesky.examples import Mover, SynGauss
from bluesky.callbacks import LivePlot, LiveTable
from bluesky.tests.utils import setup_test_run_engine
#plt.ion()
RE = setup_test_run_engine()
motor = Mover('motor', ['pos'])
det = SynGauss('det', motor, 'pos', center=0, Imax=1, sigma=1)
#fig, ax = plt.subplots()
#ax.set_xlim([-15, 5])
#ax.set_ylim([0, 2])
# Point the function to our axes above, and specify what to plot.
#my_plotter = LivePlot('det', 'pos')
table = LiveTable(['det', 'pos'])
ad_scan = AdaptiveAscan(motor, [det], 'det', -15, 5, .01, 1, .05, True)
RE(ad_scan, subscriptions={'all': [table]}) #, my_plotter})
|
c3ed431f97e4ca24a00ff979a5204d65b251dd87
|
greenlight/views/__init__.py
|
greenlight/views/__init__.py
|
from .base import APIView
from django.http import Http404
from three import Three
class QCThree(Three):
def __init__(self):
self.endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/"
self.format = "json"
self.jurisdiction = "ville.quebec.qc.ca"
QC_three = QCThree()
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
|
from three import Three
from django.http import Http404
from .base import APIView
QC_three = Three(
endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/",
format = "json",
jurisdiction = "ville.quebec.qc.ca",
)
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
|
Initialize the three API wrapper differently to fix a bug.
|
Initialize the three API wrapper differently to fix a bug.
|
Python
|
mit
|
ironweb/lesfeuxverts-backend
|
from .base import APIView
from django.http import Http404
from three import Three
class QCThree(Three):
def __init__(self):
self.endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/"
self.format = "json"
self.jurisdiction = "ville.quebec.qc.ca"
QC_three = QCThree()
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
Initialize the three API wrapper differently to fix a bug.
|
from three import Three
from django.http import Http404
from .base import APIView
QC_three = Three(
endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/",
format = "json",
jurisdiction = "ville.quebec.qc.ca",
)
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
|
<commit_before>from .base import APIView
from django.http import Http404
from three import Three
class QCThree(Three):
def __init__(self):
self.endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/"
self.format = "json"
self.jurisdiction = "ville.quebec.qc.ca"
QC_three = QCThree()
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
<commit_msg>Initialize the three API wrapper differently to fix a bug.<commit_after>
|
from three import Three
from django.http import Http404
from .base import APIView
QC_three = Three(
endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/",
format = "json",
jurisdiction = "ville.quebec.qc.ca",
)
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
|
from .base import APIView
from django.http import Http404
from three import Three
class QCThree(Three):
def __init__(self):
self.endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/"
self.format = "json"
self.jurisdiction = "ville.quebec.qc.ca"
QC_three = QCThree()
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
Initialize the three API wrapper differently to fix a bug.from three import Three
from django.http import Http404
from .base import APIView
QC_three = Three(
endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/",
format = "json",
jurisdiction = "ville.quebec.qc.ca",
)
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
|
<commit_before>from .base import APIView
from django.http import Http404
from three import Three
class QCThree(Three):
def __init__(self):
self.endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/"
self.format = "json"
self.jurisdiction = "ville.quebec.qc.ca"
QC_three = QCThree()
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
<commit_msg>Initialize the three API wrapper differently to fix a bug.<commit_after>from three import Three
from django.http import Http404
from .base import APIView
QC_three = Three(
endpoint = "http://dev-api.ville.quebec.qc.ca/open311/v2/",
format = "json",
jurisdiction = "ville.quebec.qc.ca",
)
class ServicesView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.services())
class RequestsView(APIView):
def get(self, request):
return self.OkAPIResponse(QC_three.requests())
class RequestView(APIView):
def get(self, request, id):
requests = QC_three.request(id)
if requests:
return self.OkAPIResponse(requests[0])
else:
raise Http404
|
9c11fa9d0a26d1e4caa47d2b3f0f1bf92cf8e965
|
examples/enable/gadgets/vu_demo.py
|
examples/enable/gadgets/vu_demo.py
|
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup, HGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
HGroup(
VGroup(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
|
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
|
Remove extraneous Groups from the View in the VU Meter demo.
|
Remove extraneous Groups from the View in the VU Meter demo.
|
Python
|
bsd-3-clause
|
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
|
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup, HGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
HGroup(
VGroup(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
Remove extraneous Groups from the View in the VU Meter demo.
|
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
|
<commit_before>
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup, HGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
HGroup(
VGroup(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
<commit_msg>Remove extraneous Groups from the View in the VU Meter demo.<commit_after>
|
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
|
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup, HGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
HGroup(
VGroup(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
Remove extraneous Groups from the View in the VU Meter demo.
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
|
<commit_before>
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup, HGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
HGroup(
VGroup(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
<commit_msg>Remove extraneous Groups from the View in the VU Meter demo.<commit_after>
from traits.api import HasTraits, Instance
from traitsui.api import View, UItem, Item, RangeEditor, Group, VGroup
from enable.api import ComponentEditor
from enable.gadgets.vu_meter import VUMeter
class Demo(HasTraits):
vu = Instance(VUMeter)
traits_view = \
View(
VGroup(
Group(
UItem('vu', editor=ComponentEditor(size=(60, 60)),
style='custom'),
),
Item('object.vu.percent',
editor=RangeEditor(low=0.0, high=200.0,
mode='slider')),
),
'_',
VGroup(
Item('object.vu.angle', label="angle",
editor=RangeEditor(low=0.0, high=89.0,
mode='slider')),
Item('object.vu._beta',
editor=RangeEditor(low=0.0, high=1.0,
mode='slider')),
),
width=450,
height=380,
title="VU Meter",
resizable=True,
)
if __name__ == "__main__":
color = (0.9, 0.85, 0.7)
vu = VUMeter(border_visible=True, border_width=2, bgcolor=color)
demo = Demo(vu=vu)
demo.configure_traits()
|
8e8986a17b7fa38417fe39ec8fbf4e1d3ee43f64
|
arduino_flasher/reset_arduino.py
|
arduino_flasher/reset_arduino.py
|
#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(1)
time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
Reset script first pulls pin high
|
Reset script first pulls pin high
|
Python
|
bsd-3-clause
|
Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS,Pavlos1/SensoringJMSS
|
#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
Reset script first pulls pin high
|
#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(1)
time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
<commit_before>#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
<commit_msg>Reset script first pulls pin high<commit_after>
|
#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(1)
time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
Reset script first pulls pin high#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(1)
time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
<commit_before>#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
<commit_msg>Reset script first pulls pin high<commit_after>#!/usr/local/bin/python
import mraa
import time
resetPin = mraa.Gpio(8)
resetPin.dir(mraa.DIR_OUT)
resetPin.write(1)
time.sleep(0.2)
resetPin.write(0)
time.sleep(0.2)
resetPin.write(1)
|
1371f1a1b2914a7f2e328f69bdc599c1eada54db
|
Python-practice/fy_print_seq_len_in_fasta.py
|
Python-practice/fy_print_seq_len_in_fasta.py
|
#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
|
#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
seqlen = []
num_of_seq = 0
total_len = 0
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
num_of_seq += 1
total_len += len(record)
seqlen.append(len(record))
seqlen.sort()
min_len = seqlen[0]
max_len = seqlen[-1]
print("Number of sequences: " + str(num_of_seq))
print("Total length: " + str(total_len))
print("Max length: " + str(max_len))
print("Min length: " + str(min_len))
|
Add the statistics information of sequence length
|
Add the statistics information of sequence length
Include number of sequences, total length, maximum length, and minimum
length
|
Python
|
bsd-2-clause
|
lileiting/gfat
|
#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
Add the statistics information of sequence length
Include number of sequences, total length, maximum length, and minimum
length
|
#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
seqlen = []
num_of_seq = 0
total_len = 0
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
num_of_seq += 1
total_len += len(record)
seqlen.append(len(record))
seqlen.sort()
min_len = seqlen[0]
max_len = seqlen[-1]
print("Number of sequences: " + str(num_of_seq))
print("Total length: " + str(total_len))
print("Max length: " + str(max_len))
print("Min length: " + str(min_len))
|
<commit_before>#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
<commit_msg>Add the statistics information of sequence length
Include number of sequences, total length, maximum length, and minimum
length<commit_after>
|
#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
seqlen = []
num_of_seq = 0
total_len = 0
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
num_of_seq += 1
total_len += len(record)
seqlen.append(len(record))
seqlen.sort()
min_len = seqlen[0]
max_len = seqlen[-1]
print("Number of sequences: " + str(num_of_seq))
print("Total length: " + str(total_len))
print("Max length: " + str(max_len))
print("Min length: " + str(min_len))
|
#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
Add the statistics information of sequence length
Include number of sequences, total length, maximum length, and minimum
length#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
seqlen = []
num_of_seq = 0
total_len = 0
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
num_of_seq += 1
total_len += len(record)
seqlen.append(len(record))
seqlen.sort()
min_len = seqlen[0]
max_len = seqlen[-1]
print("Number of sequences: " + str(num_of_seq))
print("Total length: " + str(total_len))
print("Max length: " + str(max_len))
print("Min length: " + str(min_len))
|
<commit_before>#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
<commit_msg>Add the statistics information of sequence length
Include number of sequences, total length, maximum length, and minimum
length<commit_after>#!/usr/bin/env python
'''
Script: fy_print_seq_len_in_fasta.py
Function: Print sequence length to STDOUT in fasta file
Note: Python3 is not default installed for most computer,
and the extra-installed module like Biopython could
not be directly used by python3. So, it's not the
righ time to use Python3 now.
Date: 2014/11/11
'''
import sys
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' <FASTA>')
sys.exit()
from Bio import SeqIO
seqlen = []
num_of_seq = 0
total_len = 0
for record in SeqIO.parse(sys.argv[1], 'fasta'):
print("%s %i" % (record.id, len(record)))
num_of_seq += 1
total_len += len(record)
seqlen.append(len(record))
seqlen.sort()
min_len = seqlen[0]
max_len = seqlen[-1]
print("Number of sequences: " + str(num_of_seq))
print("Total length: " + str(total_len))
print("Max length: " + str(max_len))
print("Min length: " + str(min_len))
|
01a60e0a0ab1e3b178cf7a56b4250ddee965e742
|
01_todo_list/test.py
|
01_todo_list/test.py
|
#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
def test_get_a_non_existing_taks(self):
"""
Get a non existing tasks
"""
response = self.tester.get('/todo/api/tasks/99',
content_type='application/json')
self.assertEqual(response.status_code, 404)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['error'], 'Not Found')
if __name__ == '__main__':
unittest.main()
|
Test para obtener una tarea que no existe
|
Test para obtener una tarea que no existe
|
Python
|
apache-2.0
|
kamaxeon/learning-flask,kamaxeon/learning-flask
|
#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
if __name__ == '__main__':
unittest.main()
Test para obtener una tarea que no existe
|
#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
def test_get_a_non_existing_taks(self):
"""
Get a non existing tasks
"""
response = self.tester.get('/todo/api/tasks/99',
content_type='application/json')
self.assertEqual(response.status_code, 404)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['error'], 'Not Found')
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
if __name__ == '__main__':
unittest.main()
<commit_msg>Test para obtener una tarea que no existe<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
def test_get_a_non_existing_taks(self):
"""
Get a non existing tasks
"""
response = self.tester.get('/todo/api/tasks/99',
content_type='application/json')
self.assertEqual(response.status_code, 404)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['error'], 'Not Found')
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
if __name__ == '__main__':
unittest.main()
Test para obtener una tarea que no existe#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
def test_get_a_non_existing_taks(self):
"""
Get a non existing tasks
"""
response = self.tester.get('/todo/api/tasks/99',
content_type='application/json')
self.assertEqual(response.status_code, 404)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['error'], 'Not Found')
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
if __name__ == '__main__':
unittest.main()
<commit_msg>Test para obtener una tarea que no existe<commit_after>#!/usr/bin/env python
# coding=utf-8
"Clase de Test"
import unittest
import json
from app import app
class BasicTestCase(unittest.TestCase):
"Test Class"
def setUp(self):
"""
Setup function
"""
self.tester = app.test_client(self)
def test_empty_list_taks(self):
"""
Test empty taks list
"""
response = self.tester.get('/todo/api/tasks',
content_type='application/json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['tasks'], [])
def test_get_a_non_existing_taks(self):
"""
Get a non existing tasks
"""
response = self.tester.get('/todo/api/tasks/99',
content_type='application/json')
self.assertEqual(response.status_code, 404)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(data['error'], 'Not Found')
if __name__ == '__main__':
unittest.main()
|
24c6e2852e319ec0d0f4e8d0539bc69a9915c3e7
|
tests/server/test_server.py
|
tests/server/test_server.py
|
import logging
import mock
import oauth2u
import oauth2u.server.log
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
|
import logging
import mock
import oauth2u
import oauth2u.server.log
def teardown_function(func):
logging.disable(logging.INFO)
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
|
Disable logging on test runner process
|
Disable logging on test runner process
|
Python
|
mit
|
globocom/oauth2u,globocom/oauth2u
|
import logging
import mock
import oauth2u
import oauth2u.server.log
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
Disable logging on test runner process
|
import logging
import mock
import oauth2u
import oauth2u.server.log
def teardown_function(func):
logging.disable(logging.INFO)
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
|
<commit_before>import logging
import mock
import oauth2u
import oauth2u.server.log
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
<commit_msg>Disable logging on test runner process<commit_after>
|
import logging
import mock
import oauth2u
import oauth2u.server.log
def teardown_function(func):
logging.disable(logging.INFO)
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
|
import logging
import mock
import oauth2u
import oauth2u.server.log
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
Disable logging on test runner processimport logging
import mock
import oauth2u
import oauth2u.server.log
def teardown_function(func):
logging.disable(logging.INFO)
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
|
<commit_before>import logging
import mock
import oauth2u
import oauth2u.server.log
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
<commit_msg>Disable logging on test runner process<commit_after>import logging
import mock
import oauth2u
import oauth2u.server.log
def teardown_function(func):
logging.disable(logging.INFO)
def test_should_have_optional_port():
server = oauth2u.Server()
assert 8000 == server.port
def test_should_accept_custom_port():
server = oauth2u.Server(8888)
assert 8888 == server.port
def test_should_configure_log_with_default_configurations(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server()
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with()
def test_should_override_default_log_parameters(monkeypatch):
log_mock = mock.Mock()
monkeypatch.setattr(oauth2u.server, 'log', log_mock)
server = oauth2u.Server(log_config={'format': '%(message)s'})
assert 1 == log_mock.configure.call_count
log_mock.configure.assert_called_with(format='%(message)s')
|
1e29540ee08ca8faaed5e3a8ab1ac9def290155b
|
fileconversions/file_converter.py
|
fileconversions/file_converter.py
|
from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
'application/pdf': conversions.NoOp,
'image/jpeg': conversions.JpegToPdf,
'image/png': conversions.PngToPdf,
'image/gif': conversions.GifToPdf,
'image/tiff': conversions.TiffToPdf,
'text/plain': conversions.TextToPdf,
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': conversions.DocxToPdf,
'application/msword': conversions.DocToPdf,
'application/vnd.openxmlformats-officedocument.presentationml.presentation': conversions.PptxToPdf,
'application/vnd.ms-powerpoint': conversions.PptToPdf,
'application/vnd.oasis.opendocument.text': conversions.OdtToPdf,
'application/rtf': conversions.RtfToPdf,
}[source_format]()
|
from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
FileFormats.PDF: conversions.NoOp,
FileFormats.JPEG: conversions.JpegToPdf,
FileFormats.PNG: conversions.PngToPdf,
FileFormats.GIF: conversions.GifToPdf,
FileFormats.TIFF: conversions.TiffToPdf,
FileFormats.TXT: conversions.TextToPdf,
FileFormats.DOCX: conversions.DocxToPdf,
FileFormats.DOC: conversions.DocToPdf,
FileFormats.PPTX: conversions.PptxToPdf,
FileFormats.PPT: conversions.PptToPdf,
FileFormats.ODT: conversions.OdtToPdf,
FileFormats.RTF: conversions.RtfToPdf,
}[source_format]()
|
Fix how we find conversion to use file formats
|
Fix how we find conversion to use file formats
|
Python
|
mit
|
wilbertom/fileconversions
|
from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
'application/pdf': conversions.NoOp,
'image/jpeg': conversions.JpegToPdf,
'image/png': conversions.PngToPdf,
'image/gif': conversions.GifToPdf,
'image/tiff': conversions.TiffToPdf,
'text/plain': conversions.TextToPdf,
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': conversions.DocxToPdf,
'application/msword': conversions.DocToPdf,
'application/vnd.openxmlformats-officedocument.presentationml.presentation': conversions.PptxToPdf,
'application/vnd.ms-powerpoint': conversions.PptToPdf,
'application/vnd.oasis.opendocument.text': conversions.OdtToPdf,
'application/rtf': conversions.RtfToPdf,
}[source_format]()
Fix how we find conversion to use file formats
|
from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
FileFormats.PDF: conversions.NoOp,
FileFormats.JPEG: conversions.JpegToPdf,
FileFormats.PNG: conversions.PngToPdf,
FileFormats.GIF: conversions.GifToPdf,
FileFormats.TIFF: conversions.TiffToPdf,
FileFormats.TXT: conversions.TextToPdf,
FileFormats.DOCX: conversions.DocxToPdf,
FileFormats.DOC: conversions.DocToPdf,
FileFormats.PPTX: conversions.PptxToPdf,
FileFormats.PPT: conversions.PptToPdf,
FileFormats.ODT: conversions.OdtToPdf,
FileFormats.RTF: conversions.RtfToPdf,
}[source_format]()
|
<commit_before>from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
'application/pdf': conversions.NoOp,
'image/jpeg': conversions.JpegToPdf,
'image/png': conversions.PngToPdf,
'image/gif': conversions.GifToPdf,
'image/tiff': conversions.TiffToPdf,
'text/plain': conversions.TextToPdf,
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': conversions.DocxToPdf,
'application/msword': conversions.DocToPdf,
'application/vnd.openxmlformats-officedocument.presentationml.presentation': conversions.PptxToPdf,
'application/vnd.ms-powerpoint': conversions.PptToPdf,
'application/vnd.oasis.opendocument.text': conversions.OdtToPdf,
'application/rtf': conversions.RtfToPdf,
}[source_format]()
<commit_msg>Fix how we find conversion to use file formats<commit_after>
|
from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
FileFormats.PDF: conversions.NoOp,
FileFormats.JPEG: conversions.JpegToPdf,
FileFormats.PNG: conversions.PngToPdf,
FileFormats.GIF: conversions.GifToPdf,
FileFormats.TIFF: conversions.TiffToPdf,
FileFormats.TXT: conversions.TextToPdf,
FileFormats.DOCX: conversions.DocxToPdf,
FileFormats.DOC: conversions.DocToPdf,
FileFormats.PPTX: conversions.PptxToPdf,
FileFormats.PPT: conversions.PptToPdf,
FileFormats.ODT: conversions.OdtToPdf,
FileFormats.RTF: conversions.RtfToPdf,
}[source_format]()
|
from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
'application/pdf': conversions.NoOp,
'image/jpeg': conversions.JpegToPdf,
'image/png': conversions.PngToPdf,
'image/gif': conversions.GifToPdf,
'image/tiff': conversions.TiffToPdf,
'text/plain': conversions.TextToPdf,
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': conversions.DocxToPdf,
'application/msword': conversions.DocToPdf,
'application/vnd.openxmlformats-officedocument.presentationml.presentation': conversions.PptxToPdf,
'application/vnd.ms-powerpoint': conversions.PptToPdf,
'application/vnd.oasis.opendocument.text': conversions.OdtToPdf,
'application/rtf': conversions.RtfToPdf,
}[source_format]()
Fix how we find conversion to use file formatsfrom . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
FileFormats.PDF: conversions.NoOp,
FileFormats.JPEG: conversions.JpegToPdf,
FileFormats.PNG: conversions.PngToPdf,
FileFormats.GIF: conversions.GifToPdf,
FileFormats.TIFF: conversions.TiffToPdf,
FileFormats.TXT: conversions.TextToPdf,
FileFormats.DOCX: conversions.DocxToPdf,
FileFormats.DOC: conversions.DocToPdf,
FileFormats.PPTX: conversions.PptxToPdf,
FileFormats.PPT: conversions.PptToPdf,
FileFormats.ODT: conversions.OdtToPdf,
FileFormats.RTF: conversions.RtfToPdf,
}[source_format]()
|
<commit_before>from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
'application/pdf': conversions.NoOp,
'image/jpeg': conversions.JpegToPdf,
'image/png': conversions.PngToPdf,
'image/gif': conversions.GifToPdf,
'image/tiff': conversions.TiffToPdf,
'text/plain': conversions.TextToPdf,
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': conversions.DocxToPdf,
'application/msword': conversions.DocToPdf,
'application/vnd.openxmlformats-officedocument.presentationml.presentation': conversions.PptxToPdf,
'application/vnd.ms-powerpoint': conversions.PptToPdf,
'application/vnd.oasis.opendocument.text': conversions.OdtToPdf,
'application/rtf': conversions.RtfToPdf,
}[source_format]()
<commit_msg>Fix how we find conversion to use file formats<commit_after>from . import conversions
from .file_formats import FileFormats
class FileConverter(object):
def get_conversion(self, source_format, target_format):
return {
FileFormats.PDF: conversions.NoOp,
FileFormats.JPEG: conversions.JpegToPdf,
FileFormats.PNG: conversions.PngToPdf,
FileFormats.GIF: conversions.GifToPdf,
FileFormats.TIFF: conversions.TiffToPdf,
FileFormats.TXT: conversions.TextToPdf,
FileFormats.DOCX: conversions.DocxToPdf,
FileFormats.DOC: conversions.DocToPdf,
FileFormats.PPTX: conversions.PptxToPdf,
FileFormats.PPT: conversions.PptToPdf,
FileFormats.ODT: conversions.OdtToPdf,
FileFormats.RTF: conversions.RtfToPdf,
}[source_format]()
|
5dbd9019744621f84c3775de12b895021c3b8eb8
|
senlin/drivers/openstack/__init__.py
|
senlin/drivers/openstack/__init__.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBlanacingClient(params):
return lbaas.LoadBalancerDriver(params)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBalancingClient(params):
return lbaas.LoadBalancerDriver(params)
|
Fix a typo in lbaas driver plugin
|
Fix a typo in lbaas driver plugin
Change-Id: I0cfbd95d41ea1d6c798a78595ac63ea049a87dbc
|
Python
|
apache-2.0
|
stackforge/senlin,openstack/senlin,Alzon/senlin,openstack/senlin,openstack/senlin,tengqm/senlin-container,stackforge/senlin,Alzon/senlin,tengqm/senlin-container
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBlanacingClient(params):
return lbaas.LoadBalancerDriver(params)
Fix a typo in lbaas driver plugin
Change-Id: I0cfbd95d41ea1d6c798a78595ac63ea049a87dbc
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBalancingClient(params):
return lbaas.LoadBalancerDriver(params)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBlanacingClient(params):
return lbaas.LoadBalancerDriver(params)
<commit_msg>Fix a typo in lbaas driver plugin
Change-Id: I0cfbd95d41ea1d6c798a78595ac63ea049a87dbc<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBalancingClient(params):
return lbaas.LoadBalancerDriver(params)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBlanacingClient(params):
return lbaas.LoadBalancerDriver(params)
Fix a typo in lbaas driver plugin
Change-Id: I0cfbd95d41ea1d6c798a78595ac63ea049a87dbc# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBalancingClient(params):
return lbaas.LoadBalancerDriver(params)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBlanacingClient(params):
return lbaas.LoadBalancerDriver(params)
<commit_msg>Fix a typo in lbaas driver plugin
Change-Id: I0cfbd95d41ea1d6c798a78595ac63ea049a87dbc<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.drivers.openstack import heat_v1
from senlin.drivers.openstack import lbaas
from senlin.drivers.openstack import neutron_v2
from senlin.drivers.openstack import nova_v2
def ComputeClient(params):
return nova_v2.NovaClient(params)
def OrchestrationClient(params):
return heat_v1.HeatClient(params)
def NetworkClient(params):
return neutron_v2.NeutronClient(params)
def LoadBalancingClient(params):
return lbaas.LoadBalancerDriver(params)
|
db80c8f857b0c4ff3a5ad02a59e6442629599914
|
setuptools/tests/test_upload_docs.py
|
setuptools/tests/test_upload_docs.py
|
import os
import zipfile
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with zipfile.ZipFile(tmp_file) as zip_file:
assert zip_file.namelist() == ['index.html']
|
import os
import zipfile
import contextlib
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
assert zip_file.namelist() == ['index.html']
|
Use closing for Python 2.6 compatibility
|
Use closing for Python 2.6 compatibility
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import os
import zipfile
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with zipfile.ZipFile(tmp_file) as zip_file:
assert zip_file.namelist() == ['index.html']
Use closing for Python 2.6 compatibility
|
import os
import zipfile
import contextlib
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
assert zip_file.namelist() == ['index.html']
|
<commit_before>import os
import zipfile
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with zipfile.ZipFile(tmp_file) as zip_file:
assert zip_file.namelist() == ['index.html']
<commit_msg>Use closing for Python 2.6 compatibility<commit_after>
|
import os
import zipfile
import contextlib
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
assert zip_file.namelist() == ['index.html']
|
import os
import zipfile
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with zipfile.ZipFile(tmp_file) as zip_file:
assert zip_file.namelist() == ['index.html']
Use closing for Python 2.6 compatibilityimport os
import zipfile
import contextlib
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
assert zip_file.namelist() == ['index.html']
|
<commit_before>import os
import zipfile
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with zipfile.ZipFile(tmp_file) as zip_file:
assert zip_file.namelist() == ['index.html']
<commit_msg>Use closing for Python 2.6 compatibility<commit_after>import os
import zipfile
import contextlib
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
from . import contexts
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
assert zip_file.namelist() == ['index.html']
|
5aad2212340a5eba4bbf4615d58ed6b3c205bc7f
|
fabtools/tests/fabfiles/python.py
|
fabtools/tests/fabfiles/python.py
|
from __future__ import with_statement
from fabric.api import *
from fabtools import require
import fabtools
@task
def python():
"""
Check Python package installation
"""
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
|
from __future__ import with_statement
from fabric.api import task
@task
def python_virtualenv():
"""
Test Python virtualenv creation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
@task
def python_package():
"""
Test Python package installation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric', download_cache='/var/cache/pip')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
|
Speed up Python tests by caching pip downloads
|
Speed up Python tests by caching pip downloads
|
Python
|
bsd-2-clause
|
prologic/fabtools,ahnjungho/fabtools,ronnix/fabtools,davidcaste/fabtools,pombredanne/fabtools,pahaz/fabtools,badele/fabtools,AMOSoft/fabtools,wagigi/fabtools-python,fabtools/fabtools,hagai26/fabtools,n0n0x/fabtools-python,sociateru/fabtools,bitmonk/fabtools
|
from __future__ import with_statement
from fabric.api import *
from fabtools import require
import fabtools
@task
def python():
"""
Check Python package installation
"""
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
Speed up Python tests by caching pip downloads
|
from __future__ import with_statement
from fabric.api import task
@task
def python_virtualenv():
"""
Test Python virtualenv creation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
@task
def python_package():
"""
Test Python package installation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric', download_cache='/var/cache/pip')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
|
<commit_before>from __future__ import with_statement
from fabric.api import *
from fabtools import require
import fabtools
@task
def python():
"""
Check Python package installation
"""
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
<commit_msg>Speed up Python tests by caching pip downloads<commit_after>
|
from __future__ import with_statement
from fabric.api import task
@task
def python_virtualenv():
"""
Test Python virtualenv creation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
@task
def python_package():
"""
Test Python package installation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric', download_cache='/var/cache/pip')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
|
from __future__ import with_statement
from fabric.api import *
from fabtools import require
import fabtools
@task
def python():
"""
Check Python package installation
"""
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
Speed up Python tests by caching pip downloadsfrom __future__ import with_statement
from fabric.api import task
@task
def python_virtualenv():
"""
Test Python virtualenv creation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
@task
def python_package():
"""
Test Python package installation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric', download_cache='/var/cache/pip')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
|
<commit_before>from __future__ import with_statement
from fabric.api import *
from fabtools import require
import fabtools
@task
def python():
"""
Check Python package installation
"""
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
<commit_msg>Speed up Python tests by caching pip downloads<commit_after>from __future__ import with_statement
from fabric.api import task
@task
def python_virtualenv():
"""
Test Python virtualenv creation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
assert fabtools.files.is_dir('/tmp/venv')
assert fabtools.files.is_file('/tmp/venv/bin/python')
@task
def python_package():
"""
Test Python package installation
"""
from fabtools import require
import fabtools
require.python.virtualenv('/tmp/venv')
with fabtools.python.virtualenv('/tmp/venv'):
require.python.package('fabric', download_cache='/var/cache/pip')
assert fabtools.files.is_file('/tmp/venv/bin/fab')
|
c27a1fc4c0251b896667e21a0a88fb44a403242f
|
cistern/migrations.py
|
cistern/migrations.py
|
import os
from playhouse.migrate import *
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=None)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
import datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
Move migration to a function
|
Move migration to a function
|
Python
|
mit
|
archangelic/cistern
|
import os
from playhouse.migrate import *
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=None)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
Move migration to a function
|
import datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
<commit_before>import os
from playhouse.migrate import *
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=None)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
<commit_msg>Move migration to a function<commit_after>
|
import datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
import os
from playhouse.migrate import *
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=None)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
Move migration to a functionimport datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
<commit_before>import os
from playhouse.migrate import *
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=None)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
<commit_msg>Move migration to a function<commit_after>import datetime
import os
from playhouse.migrate import *
def update():
cistern_folder = os.getenv('CISTERNHOME', os.path.join(os.environ['HOME'], '.cistern'))
db = SqliteDatabase(os.path.join(cistern_folder, 'cistern.db'))
migrator = SqliteMigrator(db)
date_added = DateTimeField(default=datetime.datetime.now)
migrate(
migrator.add_column('torrent', 'date_added', date_added)
)
|
5e6d52277e34c254bad6b386cf05f490baf6a6f2
|
webapp-django/accounts/models.py
|
webapp-django/accounts/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solvedChallenges=models.CharField(solved=[],max_length=256)
solvedQuestions=models.CharField(solved=[],max_length=256)
score = models.IntegerField(default=0)
def __str__(self):
return str(self.user.username)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from challenges.models import Challenge
from questionnaire.models import Question
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
Update accounts model with scoring system
|
Update accounts model with scoring system
|
Python
|
mit
|
super1337/Super1337-CTF,super1337/Super1337-CTF,super1337/Super1337-CTF
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solvedChallenges=models.CharField(solved=[],max_length=256)
solvedQuestions=models.CharField(solved=[],max_length=256)
score = models.IntegerField(default=0)
def __str__(self):
return str(self.user.username)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
Update accounts model with scoring system
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from challenges.models import Challenge
from questionnaire.models import Question
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solvedChallenges=models.CharField(solved=[],max_length=256)
solvedQuestions=models.CharField(solved=[],max_length=256)
score = models.IntegerField(default=0)
def __str__(self):
return str(self.user.username)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
<commit_msg>Update accounts model with scoring system<commit_after>
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from challenges.models import Challenge
from questionnaire.models import Question
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solvedChallenges=models.CharField(solved=[],max_length=256)
solvedQuestions=models.CharField(solved=[],max_length=256)
score = models.IntegerField(default=0)
def __str__(self):
return str(self.user.username)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
Update accounts model with scoring systemfrom django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from challenges.models import Challenge
from questionnaire.models import Question
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solvedChallenges=models.CharField(solved=[],max_length=256)
solvedQuestions=models.CharField(solved=[],max_length=256)
score = models.IntegerField(default=0)
def __str__(self):
return str(self.user.username)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
<commit_msg>Update accounts model with scoring system<commit_after>from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from challenges.models import Challenge
from questionnaire.models import Question
class UserProfile(models.Model):
user = models.OneToOneField(User)
bio = models.TextField(max_length=256, blank=True)
solved_challenges = models.ManyToManyField(Challenge)
solved_questions = models.ManyToManyField(Question)
score = models.IntegerField(default=0, editable=False)
def __str__(self):
return str(self.user.username)
def calculate_score(self):
score = 0
for chal in self.solved_challenges.all():
score = score + chal.score
for ques in self.solved_questions.all():
score = score + ques.score
return score
def save(self, *args, **kwargs):
'''On save, update score '''
self.score = self.calculate_score()
return super(UserProfile, self).save(*args, **kwargs)
# Method to link the User and UserProfile models
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
instance.userprofile.save()
|
e95f2c986f407e0c6f65ef1cd37bdefa98a01213
|
coffeedomain/setup.py
|
coffeedomain/setup.py
|
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.0',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
|
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.1',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
|
Bump version for requirejs support
|
Bump version for requirejs support
--HG--
extra : rebase_source : cf5a3545b3e1e2e70a8fe0607461564432a55464
|
Python
|
bsd-2-clause
|
sphinx-contrib/spelling,sphinx-contrib/spelling
|
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.0',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
Bump version for requirejs support
--HG--
extra : rebase_source : cf5a3545b3e1e2e70a8fe0607461564432a55464
|
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.1',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
|
<commit_before>from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.0',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
<commit_msg>Bump version for requirejs support
--HG--
extra : rebase_source : cf5a3545b3e1e2e70a8fe0607461564432a55464<commit_after>
|
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.1',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
|
from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.0',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
Bump version for requirejs support
--HG--
extra : rebase_source : cf5a3545b3e1e2e70a8fe0607461564432a55464from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.1',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
|
<commit_before>from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.0',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
<commit_msg>Bump version for requirejs support
--HG--
extra : rebase_source : cf5a3545b3e1e2e70a8fe0607461564432a55464<commit_after>from setuptools import setup, find_packages
if __name__ == '__main__':
setup(name='sphinxcontrib-coffee',
version='0.1.1',
license='BSD',
author="Stephen Sugden",
author_email="glurgle@gmail.com",
description='Sphinx extension to add CoffeeScript support',
platforms='any',
packages=find_packages(),
namespace_packages=['sphinxcontrib'])
|
39e03951ec882f4dbff1ef4c42a71339d2a5d4fa
|
gaphor/UML/tests/test_activity.py
|
gaphor/UML/tests/test_activity.py
|
import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
@pytest.fixture
def action_factory():
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == "toolbox-action"
).item_factory
def test_create_action_should_create_an_activity(diagram, action_factory):
action = action_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
def test_create_action_should_add_to_existing_activity(
diagram, action_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = action_factory(diagram)
assert action.subject.activity is activity
def test_create_action_should_add_to_existing_activity_in_package(
diagram, action_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = action_factory(diagram)
assert action.subject.activity is activity
|
import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
activity_node_names = [
"action",
"initial-node",
"activity-final-node",
"flow-final-node",
"decision-node",
"fork-node",
"object-node",
"send-signal-action",
"accept-event-action",
]
@pytest.fixture
def item_factory(request):
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == f"toolbox-{request.param}"
).item_factory
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_create_an_activity(diagram, item_factory):
action = item_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity(
diagram, item_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = item_factory(diagram)
assert action.subject.activity is activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity_in_package(
diagram, item_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = item_factory(diagram)
assert action.subject.activity is activity
|
Test all activity nodes for namespacing
|
Test all activity nodes for namespacing
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
@pytest.fixture
def action_factory():
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == "toolbox-action"
).item_factory
def test_create_action_should_create_an_activity(diagram, action_factory):
action = action_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
def test_create_action_should_add_to_existing_activity(
diagram, action_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = action_factory(diagram)
assert action.subject.activity is activity
def test_create_action_should_add_to_existing_activity_in_package(
diagram, action_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = action_factory(diagram)
assert action.subject.activity is activity
Test all activity nodes for namespacing
|
import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
activity_node_names = [
"action",
"initial-node",
"activity-final-node",
"flow-final-node",
"decision-node",
"fork-node",
"object-node",
"send-signal-action",
"accept-event-action",
]
@pytest.fixture
def item_factory(request):
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == f"toolbox-{request.param}"
).item_factory
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_create_an_activity(diagram, item_factory):
action = item_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity(
diagram, item_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = item_factory(diagram)
assert action.subject.activity is activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity_in_package(
diagram, item_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = item_factory(diagram)
assert action.subject.activity is activity
|
<commit_before>import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
@pytest.fixture
def action_factory():
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == "toolbox-action"
).item_factory
def test_create_action_should_create_an_activity(diagram, action_factory):
action = action_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
def test_create_action_should_add_to_existing_activity(
diagram, action_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = action_factory(diagram)
assert action.subject.activity is activity
def test_create_action_should_add_to_existing_activity_in_package(
diagram, action_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = action_factory(diagram)
assert action.subject.activity is activity
<commit_msg>Test all activity nodes for namespacing<commit_after>
|
import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
activity_node_names = [
"action",
"initial-node",
"activity-final-node",
"flow-final-node",
"decision-node",
"fork-node",
"object-node",
"send-signal-action",
"accept-event-action",
]
@pytest.fixture
def item_factory(request):
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == f"toolbox-{request.param}"
).item_factory
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_create_an_activity(diagram, item_factory):
action = item_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity(
diagram, item_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = item_factory(diagram)
assert action.subject.activity is activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity_in_package(
diagram, item_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = item_factory(diagram)
assert action.subject.activity is activity
|
import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
@pytest.fixture
def action_factory():
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == "toolbox-action"
).item_factory
def test_create_action_should_create_an_activity(diagram, action_factory):
action = action_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
def test_create_action_should_add_to_existing_activity(
diagram, action_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = action_factory(diagram)
assert action.subject.activity is activity
def test_create_action_should_add_to_existing_activity_in_package(
diagram, action_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = action_factory(diagram)
assert action.subject.activity is activity
Test all activity nodes for namespacingimport pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
activity_node_names = [
"action",
"initial-node",
"activity-final-node",
"flow-final-node",
"decision-node",
"fork-node",
"object-node",
"send-signal-action",
"accept-event-action",
]
@pytest.fixture
def item_factory(request):
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == f"toolbox-{request.param}"
).item_factory
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_create_an_activity(diagram, item_factory):
action = item_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity(
diagram, item_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = item_factory(diagram)
assert action.subject.activity is activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity_in_package(
diagram, item_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = item_factory(diagram)
assert action.subject.activity is activity
|
<commit_before>import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
@pytest.fixture
def action_factory():
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == "toolbox-action"
).item_factory
def test_create_action_should_create_an_activity(diagram, action_factory):
action = action_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
def test_create_action_should_add_to_existing_activity(
diagram, action_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = action_factory(diagram)
assert action.subject.activity is activity
def test_create_action_should_add_to_existing_activity_in_package(
diagram, action_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = action_factory(diagram)
assert action.subject.activity is activity
<commit_msg>Test all activity nodes for namespacing<commit_after>import pytest
from gaphor import UML
from gaphor.ui.diagrampage import tooliter
from gaphor.UML.toolbox import uml_toolbox_actions
activity_node_names = [
"action",
"initial-node",
"activity-final-node",
"flow-final-node",
"decision-node",
"fork-node",
"object-node",
"send-signal-action",
"accept-event-action",
]
@pytest.fixture
def item_factory(request):
return next(
t for t in tooliter(uml_toolbox_actions) if t.id == f"toolbox-{request.param}"
).item_factory
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_create_an_activity(diagram, item_factory):
action = item_factory(diagram)
assert action.subject.activity
assert action.subject.owner is action.subject.activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity(
diagram, item_factory, element_factory
):
activity = element_factory.create(UML.Activity)
action = item_factory(diagram)
assert action.subject.activity is activity
@pytest.mark.parametrize("item_factory", activity_node_names, indirect=True)
def test_create_action_should_add_to_existing_activity_in_package(
diagram, item_factory, element_factory
):
package = element_factory.create(UML.Package)
diagram.package = package
activity = element_factory.create(UML.Activity)
activity.package = package
action = item_factory(diagram)
assert action.subject.activity is activity
|
862301e319be09d3c163c8248f18ed23c3b1fab5
|
mla_game/apps/transcript/urls.py
|
mla_game/apps/transcript/urls.py
|
from django.conf.urls import url
urlpatterns = [
url(r'^upload-batch/', 'mla_game.apps.transcript.views.upload_batch', name='upload-batch'),
]
|
from django.conf.urls import url
urlpatterns = [
]
|
Remove unused URL, will revisit later
|
Remove unused URL, will revisit later
|
Python
|
mit
|
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
|
from django.conf.urls import url
urlpatterns = [
url(r'^upload-batch/', 'mla_game.apps.transcript.views.upload_batch', name='upload-batch'),
]
Remove unused URL, will revisit later
|
from django.conf.urls import url
urlpatterns = [
]
|
<commit_before>from django.conf.urls import url
urlpatterns = [
url(r'^upload-batch/', 'mla_game.apps.transcript.views.upload_batch', name='upload-batch'),
]
<commit_msg>Remove unused URL, will revisit later<commit_after>
|
from django.conf.urls import url
urlpatterns = [
]
|
from django.conf.urls import url
urlpatterns = [
url(r'^upload-batch/', 'mla_game.apps.transcript.views.upload_batch', name='upload-batch'),
]
Remove unused URL, will revisit laterfrom django.conf.urls import url
urlpatterns = [
]
|
<commit_before>from django.conf.urls import url
urlpatterns = [
url(r'^upload-batch/', 'mla_game.apps.transcript.views.upload_batch', name='upload-batch'),
]
<commit_msg>Remove unused URL, will revisit later<commit_after>from django.conf.urls import url
urlpatterns = [
]
|
2a843e46fabf616517847a304170fbce75afd167
|
zeus/api/resources/auth_index.py
|
zeus/api/resources/auth_index.py
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
raise
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
|
Raise non-auth errors from GitHub
|
fix: Raise non-auth errors from GitHub
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
fix: Raise non-auth errors from GitHub
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
raise
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
|
<commit_before>import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
<commit_msg>fix: Raise non-auth errors from GitHub<commit_after>
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
raise
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
fix: Raise non-auth errors from GitHubimport json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
raise
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
|
<commit_before>import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
<commit_msg>fix: Raise non-auth errors from GitHub<commit_after>import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get("/users/me")
except ApiError as exc:
if exc.code == 401:
return {"isAuthenticated": False}
raise
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(Identity.user_id == user["id"]))
email_list = list(Email.query.filter(Email.user_id == user["id"]))
return {
"isAuthenticated": True,
"user": user,
"emails": emails_schema.dump(email_list).data,
"identities": identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {"isAuthenticated": False, "user": None}
|
9174de810bc4be3376521eecdb82a84486591e73
|
oslo_utils/_i18n.py
|
oslo_utils/_i18n.py
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
from oslo import i18n
_translators = i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
Update Oslo imports to remove namespace package
|
Update Oslo imports to remove namespace package
Change-Id: I4ec9b2a310471e4e07867073e9577731ac34027d
Blueprint: drop-namespace-packages
|
Python
|
apache-2.0
|
magic0704/oslo.utils,varunarya10/oslo.utils,openstack/oslo.utils
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
from oslo import i18n
_translators = i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
Update Oslo imports to remove namespace package
Change-Id: I4ec9b2a310471e4e07867073e9577731ac34027d
Blueprint: drop-namespace-packages
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
<commit_before># Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
from oslo import i18n
_translators = i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
<commit_msg>Update Oslo imports to remove namespace package
Change-Id: I4ec9b2a310471e4e07867073e9577731ac34027d
Blueprint: drop-namespace-packages<commit_after>
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
from oslo import i18n
_translators = i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
Update Oslo imports to remove namespace package
Change-Id: I4ec9b2a310471e4e07867073e9577731ac34027d
Blueprint: drop-namespace-packages# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
<commit_before># Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
from oslo import i18n
_translators = i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
<commit_msg>Update Oslo imports to remove namespace package
Change-Id: I4ec9b2a310471e4e07867073e9577731ac34027d
Blueprint: drop-namespace-packages<commit_after># Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html .
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.utils')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
47eddebccf80bd066fd12f07316a2d283ff21774
|
numba/tests/compile_with_pycc.py
|
numba/tests/compile_with_pycc.py
|
from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** 2
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
|
from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** v
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
|
Fix function definition to actually fail
|
Fix function definition to actually fail
|
Python
|
bsd-2-clause
|
jriehl/numba,seibert/numba,ssarangi/numba,gmarkall/numba,stonebig/numba,stuartarchibald/numba,pitrou/numba,pombredanne/numba,gmarkall/numba,IntelLabs/numba,sklam/numba,gmarkall/numba,pitrou/numba,stonebig/numba,stuartarchibald/numba,numba/numba,sklam/numba,pombredanne/numba,pombredanne/numba,sklam/numba,cpcloud/numba,seibert/numba,stefanseefeld/numba,IntelLabs/numba,pombredanne/numba,ssarangi/numba,sklam/numba,stuartarchibald/numba,IntelLabs/numba,jriehl/numba,stonebig/numba,seibert/numba,numba/numba,jriehl/numba,pitrou/numba,jriehl/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,pitrou/numba,stonebig/numba,cpcloud/numba,stonebig/numba,gmarkall/numba,stuartarchibald/numba,numba/numba,stefanseefeld/numba,stuartarchibald/numba,stefanseefeld/numba,pitrou/numba,IntelLabs/numba,cpcloud/numba,seibert/numba,gmarkall/numba,jriehl/numba,numba/numba,ssarangi/numba,ssarangi/numba,stefanseefeld/numba,cpcloud/numba,pombredanne/numba,ssarangi/numba,stefanseefeld/numba,sklam/numba
|
from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** 2
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
Fix function definition to actually fail
|
from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** v
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
|
<commit_before>from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** 2
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
<commit_msg>Fix function definition to actually fail<commit_after>
|
from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** v
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
|
from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** 2
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
Fix function definition to actually failfrom numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** v
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
|
<commit_before>from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** 2
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
<commit_msg>Fix function definition to actually fail<commit_after>from numba import exportmany, export
from numba.pycc import CC
# New API
cc = CC('pycc_test_output')
@cc.export('multf', 'f4(f4, f4)')
@cc.export('multi', 'i4(i4, i4)')
def mult(a, b):
return a * b
_two = 2
# This one can't be compiled by the legacy API as it doesn't execute
# the script in a proper module.
@cc.export('square', 'i8(i8)')
def square(u):
return u * _two
# Fails because it needs _helperlib
#@cc.export('power', 'i8(i8, i8)')
def power(u, v):
return u ** v
# Legacy API
exportmany(['multf f4(f4,f4)', 'multi i4(i4,i4)'])(mult)
# Needs to link to helperlib to due with complex arguments
# export('multc c16(c16,c16)')(mult)
export('mult f8(f8, f8)')(mult)
|
6a15b33d69d8d66643bb8886f9916fa28ecaedea
|
molo/yourwords/templatetags/competition_tag.py
|
molo/yourwords/templatetags/competition_tag.py
|
from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = []
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
|
from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = YourWordsCompetition.objects.none()
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
|
Return None if there is no competition
|
Return None if there is no competition
|
Python
|
bsd-2-clause
|
praekelt/molo.yourwords,praekelt/molo.yourwords
|
from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = []
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
Return None if there is no competition
|
from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = YourWordsCompetition.objects.none()
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
|
<commit_before>from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = []
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
<commit_msg>Return None if there is no competition<commit_after>
|
from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = YourWordsCompetition.objects.none()
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
|
from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = []
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
Return None if there is no competitionfrom django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = YourWordsCompetition.objects.none()
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
|
<commit_before>from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = []
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
<commit_msg>Return None if there is no competition<commit_after>from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.templatetags.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = YourWordsCompetition.objects.none()
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
|
8c07012e423d592a4638d6dac58ca5e67d9dd5a6
|
apps/cowry/views.py
|
apps/cowry/views.py
|
from rest_framework import generics
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
from .models import Payment
class PaymentDetail(generics.RetrieveUpdateAPIView):
"""
View for working with Payments. Payments can be retrieved and the payment method and submethod can updated.
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
|
from rest_framework import generics
from rest_framework import response
from rest_framework import status
from . import payments
from .exceptions import PaymentException
from .models import Payment
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
class PaymentDetail(generics.RetrieveUpdateDestroyAPIView):
"""
View for working with Payments. Payments can be retrieved (GET), the payment method and submethod can updated (PUT)
and a payment can be cancelled (DELETE).
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
def destroy(self, request, *args, **kwargs):
payment = self.get_object()
try:
payments.cancel_payment(payment)
except (NotImplementedError, PaymentException) as e:
return response.Response(data=e, status=status.HTTP_400_BAD_REQUEST)
else:
return response.Response(status=status.HTTP_202_ACCEPTED)
|
Add payment cancel (delete) to Payment REST API.
|
Add payment cancel (delete) to Payment REST API.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
from rest_framework import generics
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
from .models import Payment
class PaymentDetail(generics.RetrieveUpdateAPIView):
"""
View for working with Payments. Payments can be retrieved and the payment method and submethod can updated.
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
Add payment cancel (delete) to Payment REST API.
|
from rest_framework import generics
from rest_framework import response
from rest_framework import status
from . import payments
from .exceptions import PaymentException
from .models import Payment
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
class PaymentDetail(generics.RetrieveUpdateDestroyAPIView):
"""
View for working with Payments. Payments can be retrieved (GET), the payment method and submethod can updated (PUT)
and a payment can be cancelled (DELETE).
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
def destroy(self, request, *args, **kwargs):
payment = self.get_object()
try:
payments.cancel_payment(payment)
except (NotImplementedError, PaymentException) as e:
return response.Response(data=e, status=status.HTTP_400_BAD_REQUEST)
else:
return response.Response(status=status.HTTP_202_ACCEPTED)
|
<commit_before>from rest_framework import generics
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
from .models import Payment
class PaymentDetail(generics.RetrieveUpdateAPIView):
"""
View for working with Payments. Payments can be retrieved and the payment method and submethod can updated.
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
<commit_msg>Add payment cancel (delete) to Payment REST API.<commit_after>
|
from rest_framework import generics
from rest_framework import response
from rest_framework import status
from . import payments
from .exceptions import PaymentException
from .models import Payment
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
class PaymentDetail(generics.RetrieveUpdateDestroyAPIView):
"""
View for working with Payments. Payments can be retrieved (GET), the payment method and submethod can updated (PUT)
and a payment can be cancelled (DELETE).
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
def destroy(self, request, *args, **kwargs):
payment = self.get_object()
try:
payments.cancel_payment(payment)
except (NotImplementedError, PaymentException) as e:
return response.Response(data=e, status=status.HTTP_400_BAD_REQUEST)
else:
return response.Response(status=status.HTTP_202_ACCEPTED)
|
from rest_framework import generics
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
from .models import Payment
class PaymentDetail(generics.RetrieveUpdateAPIView):
"""
View for working with Payments. Payments can be retrieved and the payment method and submethod can updated.
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
Add payment cancel (delete) to Payment REST API.from rest_framework import generics
from rest_framework import response
from rest_framework import status
from . import payments
from .exceptions import PaymentException
from .models import Payment
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
class PaymentDetail(generics.RetrieveUpdateDestroyAPIView):
"""
View for working with Payments. Payments can be retrieved (GET), the payment method and submethod can updated (PUT)
and a payment can be cancelled (DELETE).
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
def destroy(self, request, *args, **kwargs):
payment = self.get_object()
try:
payments.cancel_payment(payment)
except (NotImplementedError, PaymentException) as e:
return response.Response(data=e, status=status.HTTP_400_BAD_REQUEST)
else:
return response.Response(status=status.HTTP_202_ACCEPTED)
|
<commit_before>from rest_framework import generics
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
from .models import Payment
class PaymentDetail(generics.RetrieveUpdateAPIView):
"""
View for working with Payments. Payments can be retrieved and the payment method and submethod can updated.
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
<commit_msg>Add payment cancel (delete) to Payment REST API.<commit_after>from rest_framework import generics
from rest_framework import response
from rest_framework import status
from . import payments
from .exceptions import PaymentException
from .models import Payment
from .permissions import IsOrderCreator
from .serializers import PaymentSerializer
class PaymentDetail(generics.RetrieveUpdateDestroyAPIView):
"""
View for working with Payments. Payments can be retrieved (GET), the payment method and submethod can updated (PUT)
and a payment can be cancelled (DELETE).
"""
model = Payment
serializer_class = PaymentSerializer
permission_classes = (IsOrderCreator,)
def destroy(self, request, *args, **kwargs):
payment = self.get_object()
try:
payments.cancel_payment(payment)
except (NotImplementedError, PaymentException) as e:
return response.Response(data=e, status=status.HTTP_400_BAD_REQUEST)
else:
return response.Response(status=status.HTTP_202_ACCEPTED)
|
b434c8dd697b4aa5c2d6daa345c0d9de27e7c05a
|
apps/survey/urls.py
|
apps/survey/urls.py
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_update_monthly ,name='survey_update_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
Add new decorator for suvery_data
|
Add new decorator for suvery_data
|
Python
|
agpl-3.0
|
chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
Add new decorator for suvery_data
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_update_monthly ,name='survey_update_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
<commit_before>from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
<commit_msg>Add new decorator for suvery_data<commit_after>
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_update_monthly ,name='survey_update_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
Add new decorator for suvery_datafrom django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_update_monthly ,name='survey_update_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
<commit_before>from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
<commit_msg>Add new decorator for suvery_data<commit_after>from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_update_monthly ,name='survey_update_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
1f6c1a4f596222b424d9f51ca30f5eb4d80f9942
|
mopidy_alsamixer/__init__.py
|
mopidy_alsamixer/__init__.py
|
import os
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
|
import pathlib
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
|
Use pathlib to read ext.conf
|
Use pathlib to read ext.conf
|
Python
|
apache-2.0
|
mopidy/mopidy-alsamixer
|
import os
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
Use pathlib to read ext.conf
|
import pathlib
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
|
<commit_before>import os
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
<commit_msg>Use pathlib to read ext.conf<commit_after>
|
import pathlib
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
|
import os
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
Use pathlib to read ext.confimport pathlib
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
|
<commit_before>import os
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
<commit_msg>Use pathlib to read ext.conf<commit_after>import pathlib
from mopidy import config, ext
__version__ = "1.1.1"
class Extension(ext.Extension):
dist_name = "Mopidy-ALSAMixer"
ext_name = "alsamixer"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["card"] = config.Integer(minimum=0)
schema["control"] = config.String()
schema["min_volume"] = config.Integer(minimum=0, maximum=100)
schema["max_volume"] = config.Integer(minimum=0, maximum=100)
schema["volume_scale"] = config.String(
choices=("linear", "cubic", "log")
)
return schema
def setup(self, registry):
from mopidy_alsamixer.mixer import AlsaMixer
registry.add("mixer", AlsaMixer)
|
641b2d07a4250a779ad6ff31f579968f69362cc0
|
numscons/numdist/__init__.py
|
numscons/numdist/__init__.py
|
from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file
from from_template import process_file as process_f_file
|
from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file, process_str as process_c_str
from from_template import process_file as process_f_file
|
Add process_c_str function to the numdist API
|
Add process_c_str function to the numdist API
|
Python
|
bsd-3-clause
|
cournape/numscons,cournape/numscons,cournape/numscons
|
from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file
from from_template import process_file as process_f_file
Add process_c_str function to the numdist API
|
from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file, process_str as process_c_str
from from_template import process_file as process_f_file
|
<commit_before>from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file
from from_template import process_file as process_f_file
<commit_msg>Add process_c_str function to the numdist API<commit_after>
|
from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file, process_str as process_c_str
from from_template import process_file as process_f_file
|
from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file
from from_template import process_file as process_f_file
Add process_c_str function to the numdist APIfrom numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file, process_str as process_c_str
from from_template import process_file as process_f_file
|
<commit_before>from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file
from from_template import process_file as process_f_file
<commit_msg>Add process_c_str function to the numdist API<commit_after>from numdist_copy import default_lib_dirs, default_include_dirs, \
default_src_dirs, get_standard_file
from numdist_copy import msvc_runtime_library
from conv_template import process_file as process_c_file, process_str as process_c_str
from from_template import process_file as process_f_file
|
22e3ed1791698e2f2ffa7b8ce9b62a9f4c533b7b
|
open_budget_data_api/main.py
|
open_budget_data_api/main.py
|
import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 3600
return response
|
import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 600
return response
|
Reduce cache time on db queries
|
Reduce cache time on db queries
|
Python
|
mit
|
OpenBudget/open-budget-data-api,OpenBudget/open-budget-data-api
|
import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 3600
return response
Reduce cache time on db queries
|
import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 600
return response
|
<commit_before>import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 3600
return response
<commit_msg>Reduce cache time on db queries<commit_after>
|
import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 600
return response
|
import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 3600
return response
Reduce cache time on db queriesimport logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 600
return response
|
<commit_before>import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 3600
return response
<commit_msg>Reduce cache time on db queries<commit_after>import logging
import os
from flask import Flask
from flask_cors import CORS
from apisql import apisql_blueprint
app = Flask(__name__)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
MAX_ROWS = int(os.environ.get('MAX_ROWS', 1000))
app.register_blueprint(
apisql_blueprint(connection_string=os.environ['DATABASE_URL'], max_rows=MAX_ROWS, debug=False),
url_prefix='/api/'
)
CORS(app)
@app.after_request
def add_header(response):
response.cache_control.max_age = 600
return response
|
d792679461357fa17b1f852d7a72921aed2fe271
|
bermann/rdd_test.py
|
bermann/rdd_test.py
|
import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
# collect
# count
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
|
import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
def test_collect_empty_rdd_returns_empty_list(self):
rdd = RDD()
self.assertEqual([], rdd.collect())
def test_collect_non_empty_rdd_returns_contents(self):
rdd = RDD([1, 2, 3])
self.assertEqual(rdd.contents, rdd.collect())
def test_count_empty_rdd_returns_zero(self):
rdd = RDD()
self.assertEqual(0, rdd.count())
def test_collect_non_empty_rdd_returns_length(self):
rdd = RDD([1, 2, 3])
self.assertEqual(3, rdd.count())
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
|
Add tests for count and collect
|
Add tests for count and collect
|
Python
|
mit
|
oli-hall/bermann
|
import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
# collect
# count
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
Add tests for count and collect
|
import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
def test_collect_empty_rdd_returns_empty_list(self):
rdd = RDD()
self.assertEqual([], rdd.collect())
def test_collect_non_empty_rdd_returns_contents(self):
rdd = RDD([1, 2, 3])
self.assertEqual(rdd.contents, rdd.collect())
def test_count_empty_rdd_returns_zero(self):
rdd = RDD()
self.assertEqual(0, rdd.count())
def test_collect_non_empty_rdd_returns_length(self):
rdd = RDD([1, 2, 3])
self.assertEqual(3, rdd.count())
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
# collect
# count
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for count and collect<commit_after>
|
import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
def test_collect_empty_rdd_returns_empty_list(self):
rdd = RDD()
self.assertEqual([], rdd.collect())
def test_collect_non_empty_rdd_returns_contents(self):
rdd = RDD([1, 2, 3])
self.assertEqual(rdd.contents, rdd.collect())
def test_count_empty_rdd_returns_zero(self):
rdd = RDD()
self.assertEqual(0, rdd.count())
def test_collect_non_empty_rdd_returns_length(self):
rdd = RDD([1, 2, 3])
self.assertEqual(3, rdd.count())
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
|
import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
# collect
# count
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
Add tests for count and collectimport unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
def test_collect_empty_rdd_returns_empty_list(self):
rdd = RDD()
self.assertEqual([], rdd.collect())
def test_collect_non_empty_rdd_returns_contents(self):
rdd = RDD([1, 2, 3])
self.assertEqual(rdd.contents, rdd.collect())
def test_count_empty_rdd_returns_zero(self):
rdd = RDD()
self.assertEqual(0, rdd.count())
def test_collect_non_empty_rdd_returns_length(self):
rdd = RDD([1, 2, 3])
self.assertEqual(3, rdd.count())
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
# collect
# count
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for count and collect<commit_after>import unittest
from bermann import RDD
class TestRDD(unittest.TestCase):
def test_cache_is_noop(self):
rdd = RDD([1, 2, 3])
cached = rdd.cache()
self.assertEqual(rdd, cached)
def test_collect_empty_rdd_returns_empty_list(self):
rdd = RDD()
self.assertEqual([], rdd.collect())
def test_collect_non_empty_rdd_returns_contents(self):
rdd = RDD([1, 2, 3])
self.assertEqual(rdd.contents, rdd.collect())
def test_count_empty_rdd_returns_zero(self):
rdd = RDD()
self.assertEqual(0, rdd.count())
def test_collect_non_empty_rdd_returns_length(self):
rdd = RDD([1, 2, 3])
self.assertEqual(3, rdd.count())
# countByKey
# conntByValue
# distinct
# filter
# first
# flatMap
# flatMapValues
# foreach
# groupBy
# groupByKey
# isEmpty
# keyBy
# keys
# map
# mapValues
# max
# min
# name
if __name__ == '__main__':
unittest.main()
|
8dd1164979ae8fce031fea153178ab7940e21069
|
zproject/jinja2/__init__.py
|
zproject/jinja2/__init__.py
|
from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": False,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
|
from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": settings.DEBUG,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
|
Make debug_mode for default_page_params follow the setting.
|
static: Make debug_mode for default_page_params follow the setting.
For pages that don't have page_params, the default_page_params now
ensures that debug_mode will correctly follow settings.DEBUG.
This allows blueslip exception popups to work on portico pages for
development environment.
Fixes: #17540.
|
Python
|
apache-2.0
|
punchagan/zulip,andersk/zulip,andersk/zulip,zulip/zulip,hackerkid/zulip,rht/zulip,eeshangarg/zulip,eeshangarg/zulip,punchagan/zulip,punchagan/zulip,hackerkid/zulip,hackerkid/zulip,eeshangarg/zulip,rht/zulip,zulip/zulip,rht/zulip,rht/zulip,eeshangarg/zulip,kou/zulip,eeshangarg/zulip,zulip/zulip,andersk/zulip,kou/zulip,hackerkid/zulip,andersk/zulip,andersk/zulip,hackerkid/zulip,rht/zulip,zulip/zulip,kou/zulip,zulip/zulip,andersk/zulip,kou/zulip,hackerkid/zulip,hackerkid/zulip,punchagan/zulip,zulip/zulip,eeshangarg/zulip,punchagan/zulip,rht/zulip,rht/zulip,zulip/zulip,kou/zulip,kou/zulip,punchagan/zulip,andersk/zulip,punchagan/zulip,eeshangarg/zulip,kou/zulip
|
from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": False,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
static: Make debug_mode for default_page_params follow the setting.
For pages that don't have page_params, the default_page_params now
ensures that debug_mode will correctly follow settings.DEBUG.
This allows blueslip exception popups to work on portico pages for
development environment.
Fixes: #17540.
|
from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": settings.DEBUG,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
|
<commit_before>from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": False,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
<commit_msg>static: Make debug_mode for default_page_params follow the setting.
For pages that don't have page_params, the default_page_params now
ensures that debug_mode will correctly follow settings.DEBUG.
This allows blueslip exception popups to work on portico pages for
development environment.
Fixes: #17540.<commit_after>
|
from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": settings.DEBUG,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
|
from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": False,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
static: Make debug_mode for default_page_params follow the setting.
For pages that don't have page_params, the default_page_params now
ensures that debug_mode will correctly follow settings.DEBUG.
This allows blueslip exception popups to work on portico pages for
development environment.
Fixes: #17540.from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": settings.DEBUG,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
|
<commit_before>from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": False,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
<commit_msg>static: Make debug_mode for default_page_params follow the setting.
For pages that don't have page_params, the default_page_params now
ensures that debug_mode will correctly follow settings.DEBUG.
This allows blueslip exception popups to work on portico pages for
development environment.
Fixes: #17540.<commit_after>from typing import Any
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.template.defaultfilters import pluralize, slugify
from django.urls import reverse
from django.utils import translation
from django.utils.timesince import timesince
from jinja2 import Environment
from two_factor.templatetags.two_factor import device_action
from zerver.templatetags.app_filters import display_list, render_markdown_path
def environment(**options: Any) -> Environment:
env = Environment(**options)
env.globals.update(
default_page_params={
"debug_mode": settings.DEBUG,
"webpack_public_path": staticfiles_storage.url(
settings.WEBPACK_LOADER["DEFAULT"]["BUNDLE_DIR_NAME"],
),
},
static=staticfiles_storage.url,
url=reverse,
render_markdown_path=render_markdown_path,
)
env.install_gettext_translations(translation, True)
env.filters["slugify"] = slugify
env.filters["pluralize"] = pluralize
env.filters["display_list"] = display_list
env.filters["device_action"] = device_action
env.filters["timesince"] = timesince
return env
|
889a2fc46567bffb4034de5785b03b8b87289c15
|
trac/versioncontrol/web_ui/__init__.py
|
trac/versioncontrol/web_ui/__init__.py
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
|
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
|
Python
|
bsd-3-clause
|
pkdevbox/trac,pkdevbox/trac,pkdevbox/trac,pkdevbox/trac
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
<commit_before>from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
<commit_msg>Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)<commit_after>
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
<commit_before>from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
<commit_msg>Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)<commit_after>from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
bcc79588e5e49c928210d6830fbe1a7386fcf5bb
|
apps/search/tasks.py
|
apps/search/tasks.py
|
import logging
from django.conf import settings
from django.db.models.signals import pre_delete
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
import logging
import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
Stop a deprecation warning that is thrown in elasticutils.
|
Stop a deprecation warning that is thrown in elasticutils.
This is not going to be needed once https://github.com/mozilla/elasticutils/pull/160
has been released.
|
Python
|
mpl-2.0
|
jezdez/kuma,whip112/Whip112,FrankBian/kuma,YOTOV-LIMITED/kuma,SphinxKnight/kuma,jgmize/kuma,RanadeepPolavarapu/kuma,ollie314/kuma,nhenezi/kuma,FrankBian/kuma,surajssd/kuma,YOTOV-LIMITED/kuma,yfdyh000/kuma,cindyyu/kuma,SphinxKnight/kuma,openjck/kuma,MenZil/kuma,RanadeepPolavarapu/kuma,yfdyh000/kuma,whip112/Whip112,carnell69/kuma,YOTOV-LIMITED/kuma,ollie314/kuma,RanadeepPolavarapu/kuma,SphinxKnight/kuma,surajssd/kuma,safwanrahman/kuma,utkbansal/kuma,groovecoder/kuma,chirilo/kuma,openjck/kuma,cindyyu/kuma,ronakkhunt/kuma,safwanrahman/kuma,robhudson/kuma,Elchi3/kuma,Elchi3/kuma,biswajitsahu/kuma,robhudson/kuma,yfdyh000/kuma,robhudson/kuma,biswajitsahu/kuma,hoosteeno/kuma,chirilo/kuma,groovecoder/kuma,scrollback/kuma,ronakkhunt/kuma,Elchi3/kuma,davehunt/kuma,ollie314/kuma,jwhitlock/kuma,tximikel/kuma,Elchi3/kuma,carnell69/kuma,hoosteeno/kuma,utkbansal/kuma,davehunt/kuma,anaran/kuma,mastizada/kuma,carnell69/kuma,bluemini/kuma,jwhitlock/kuma,SphinxKnight/kuma,scrollback/kuma,jgmize/kuma,chirilo/kuma,cindyyu/kuma,biswajitsahu/kuma,mozilla/kuma,a2sheppy/kuma,a2sheppy/kuma,nhenezi/kuma,MenZil/kuma,ollie314/kuma,tximikel/kuma,davidyezsetz/kuma,a2sheppy/kuma,surajssd/kuma,davehunt/kuma,yfdyh000/kuma,biswajitsahu/kuma,darkwing/kuma,RanadeepPolavarapu/kuma,tximikel/kuma,jezdez/kuma,bluemini/kuma,whip112/Whip112,surajssd/kuma,nhenezi/kuma,mozilla/kuma,openjck/kuma,nhenezi/kuma,davidyezsetz/kuma,darkwing/kuma,carnell69/kuma,scrollback/kuma,MenZil/kuma,MenZil/kuma,jgmize/kuma,varunkamra/kuma,darkwing/kuma,hoosteeno/kuma,cindyyu/kuma,groovecoder/kuma,YOTOV-LIMITED/kuma,darkwing/kuma,openjck/kuma,groovecoder/kuma,robhudson/kuma,openjck/kuma,ollie314/kuma,utkbansal/kuma,davehunt/kuma,escattone/kuma,groovecoder/kuma,bluemini/kuma,ronakkhunt/kuma,ollie314/kuma,jgmize/kuma,surajssd/kuma,a2sheppy/kuma,hoosteeno/kuma,jezdez/kuma,YOTOV-LIMITED/kuma,jwhitlock/kuma,utkbansal/kuma,a2sheppy/kuma,cindyyu/kuma,varunkamra/kuma,jwhitlock/kuma,jezdez/kuma,varunkamra/kuma,carnell69/kuma,carnell69/kuma,mozilla/kuma,biswajitsahu/kuma,anaran/kuma,yfdyh000/kuma,YOTOV-LIMITED/kuma,escattone/kuma,scrollback/kuma,varunkamra/kuma,utkbansal/kuma,RanadeepPolavarapu/kuma,MenZil/kuma,SphinxKnight/kuma,nhenezi/kuma,davehunt/kuma,whip112/Whip112,hoosteeno/kuma,chirilo/kuma,biswajitsahu/kuma,mastizada/kuma,safwanrahman/kuma,davidyezsetz/kuma,anaran/kuma,Elchi3/kuma,bluemini/kuma,whip112/Whip112,FrankBian/kuma,utkbansal/kuma,varunkamra/kuma,safwanrahman/kuma,ronakkhunt/kuma,tximikel/kuma,ronakkhunt/kuma,davehunt/kuma,tximikel/kuma,anaran/kuma,chirilo/kuma,darkwing/kuma,openjck/kuma,FrankBian/kuma,mastizada/kuma,anaran/kuma,varunkamra/kuma,groovecoder/kuma,davidyezsetz/kuma,SphinxKnight/kuma,bluemini/kuma,anaran/kuma,ronakkhunt/kuma,robhudson/kuma,MenZil/kuma,jezdez/kuma,bluemini/kuma,mozilla/kuma,chirilo/kuma,yfdyh000/kuma,scrollback/kuma,cindyyu/kuma,jgmize/kuma,safwanrahman/kuma,safwanrahman/kuma,whip112/Whip112,darkwing/kuma,jwhitlock/kuma,FrankBian/kuma,jgmize/kuma,davidyezsetz/kuma,RanadeepPolavarapu/kuma,hoosteeno/kuma,mastizada/kuma,surajssd/kuma,mozilla/kuma,escattone/kuma,robhudson/kuma,tximikel/kuma,jezdez/kuma
|
import logging
from django.conf import settings
from django.db.models.signals import pre_delete
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
Stop a deprecation warning that is thrown in elasticutils.
This is not going to be needed once https://github.com/mozilla/elasticutils/pull/160
has been released.
|
import logging
import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
<commit_before>import logging
from django.conf import settings
from django.db.models.signals import pre_delete
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
<commit_msg>Stop a deprecation warning that is thrown in elasticutils.
This is not going to be needed once https://github.com/mozilla/elasticutils/pull/160
has been released.<commit_after>
|
import logging
import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
import logging
from django.conf import settings
from django.db.models.signals import pre_delete
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
Stop a deprecation warning that is thrown in elasticutils.
This is not going to be needed once https://github.com/mozilla/elasticutils/pull/160
has been released.import logging
import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
<commit_before>import logging
from django.conf import settings
from django.db.models.signals import pre_delete
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
<commit_msg>Stop a deprecation warning that is thrown in elasticutils.
This is not going to be needed once https://github.com/mozilla/elasticutils/pull/160
has been released.<commit_after>import logging
import warnings
from django.conf import settings
from django.db.models.signals import pre_delete
# ignore a deprecation warning from elasticutils until the fix is released
# refs https://github.com/mozilla/elasticutils/pull/160
warnings.filterwarnings("ignore",
category=DeprecationWarning,
module='celery.decorators')
from elasticutils.contrib.django.tasks import index_objects, unindex_objects
from wiki.signals import render_done
def render_done_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
try:
index_objects.delay(instance.get_mapping_type(), [instance.id])
except:
logging.error('Search indexing task failed',
exc_info=True)
def pre_delete_handler(**kwargs):
if not settings.ES_LIVE_INDEX or 'instance' not in kwargs:
return
instance = kwargs['instance']
unindex_objects.delay(instance.get_mapping_type(), [instance.id])
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
6155cfa0d16bfde8b412a3b2c68983ef939d518c
|
synapse/tests/test_init.py
|
synapse/tests/test_init.py
|
import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
|
import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
pass
'''
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
'''
|
Comment out broken init test
|
Comment out broken init test
|
Python
|
apache-2.0
|
vertexproject/synapse,vertexproject/synapse,vivisect/synapse,vertexproject/synapse
|
import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
Comment out broken init test
|
import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
pass
'''
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
'''
|
<commit_before>import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
<commit_msg>Comment out broken init test<commit_after>
|
import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
pass
'''
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
'''
|
import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
Comment out broken init testimport os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
pass
'''
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
'''
|
<commit_before>import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
<commit_msg>Comment out broken init test<commit_after>import os
import imp
import synapse
from synapse.tests.common import *
class InitTest(SynTest):
pass
'''
def test_init_modules(self):
os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math'
msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')'
with self.getLoggerStream('synapse', msg) as stream:
imp.reload(synapse)
self.true(stream.wait(10))
stream.seek(0)
self.isin(msg, stream.read())
self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4))
'''
|
5f70d83408d177e803ce8edfb0ebd2b909722a64
|
troposphere/certificatemanager.py
|
troposphere/certificatemanager.py
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 15.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
from .validators import integer
class ExpiryEventsConfiguration(AWSProperty):
props = {
'DaysBeforeExpiry': (integer, False),
}
class Account(AWSObject):
resource_type = "AWS::CertificateManager::Account"
props = {
'ExpiryEventsConfiguration': (ExpiryEventsConfiguration, True),
}
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
|
Update CertificateManager per 2021-03-11 changes
|
Update CertificateManager per 2021-03-11 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 15.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
Update CertificateManager per 2021-03-11 changes
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
from .validators import integer
class ExpiryEventsConfiguration(AWSProperty):
props = {
'DaysBeforeExpiry': (integer, False),
}
class Account(AWSObject):
resource_type = "AWS::CertificateManager::Account"
props = {
'ExpiryEventsConfiguration': (ExpiryEventsConfiguration, True),
}
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
|
<commit_before># Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 15.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
<commit_msg>Update CertificateManager per 2021-03-11 changes<commit_after>
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
from .validators import integer
class ExpiryEventsConfiguration(AWSProperty):
props = {
'DaysBeforeExpiry': (integer, False),
}
class Account(AWSObject):
resource_type = "AWS::CertificateManager::Account"
props = {
'ExpiryEventsConfiguration': (ExpiryEventsConfiguration, True),
}
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 15.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
Update CertificateManager per 2021-03-11 changes# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
from .validators import integer
class ExpiryEventsConfiguration(AWSProperty):
props = {
'DaysBeforeExpiry': (integer, False),
}
class Account(AWSObject):
resource_type = "AWS::CertificateManager::Account"
props = {
'ExpiryEventsConfiguration': (ExpiryEventsConfiguration, True),
}
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
|
<commit_before># Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 15.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
<commit_msg>Update CertificateManager per 2021-03-11 changes<commit_after># Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
from .validators import integer
class ExpiryEventsConfiguration(AWSProperty):
props = {
'DaysBeforeExpiry': (integer, False),
}
class Account(AWSObject):
resource_type = "AWS::CertificateManager::Account"
props = {
'ExpiryEventsConfiguration': (ExpiryEventsConfiguration, True),
}
class DomainValidationOption(AWSProperty):
props = {
'DomainName': (basestring, True),
'HostedZoneId': (basestring, False),
'ValidationDomain': (basestring, False),
}
class Certificate(AWSObject):
resource_type = "AWS::CertificateManager::Certificate"
props = {
'CertificateAuthorityArn': (basestring, False),
'CertificateTransparencyLoggingPreference': (basestring, False),
'DomainName': (basestring, True),
'DomainValidationOptions': ([DomainValidationOption], False),
'SubjectAlternativeNames': ([basestring], False),
'Tags': ((Tags, list), False),
'ValidationMethod': (basestring, False),
}
|
c947ecffd771117ce531f5058356a17b7db82fdb
|
mockaioredis/commands/__init__.py
|
mockaioredis/commands/__init__.py
|
from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
|
from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def wait_closed(self):
if self._conn:
await self._conn.wait_closed()
def close(self):
if self._conn:
self._conn.close()
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
|
Add close comands to MockRedis class
|
chore: Add close comands to MockRedis class
This closes #15
Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk>
|
Python
|
apache-2.0
|
kblin/mockaioredis,kblin/mockaioredis
|
from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
chore: Add close comands to MockRedis class
This closes #15
Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk>
|
from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def wait_closed(self):
if self._conn:
await self._conn.wait_closed()
def close(self):
if self._conn:
self._conn.close()
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
|
<commit_before>from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
<commit_msg>chore: Add close comands to MockRedis class
This closes #15
Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk><commit_after>
|
from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def wait_closed(self):
if self._conn:
await self._conn.wait_closed()
def close(self):
if self._conn:
self._conn.close()
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
|
from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
chore: Add close comands to MockRedis class
This closes #15
Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk>from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def wait_closed(self):
if self._conn:
await self._conn.wait_closed()
def close(self):
if self._conn:
self._conn.close()
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
|
<commit_before>from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
<commit_msg>chore: Add close comands to MockRedis class
This closes #15
Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk><commit_after>from mockredis import MockRedis as _MockRedis
from .generic import GenericCommandsMixin
from .hash import HashCommandsMixin
from .list import ListCommandsMixin
from .set import SetCommandsMixin
__all__ = ['MockRedis']
class MockRedis(GenericCommandsMixin, HashCommandsMixin, ListCommandsMixin, SetCommandsMixin):
"""Fake high-level aioredis.Redis interface"""
def __init__(self, connection=None, encoding=None, **kwargs):
# Just for API compatibility
self._conn = connection
self._redis = _MockRedis(**kwargs)
self._encoding = encoding
async def wait_closed(self):
if self._conn:
await self._conn.wait_closed()
def close(self):
if self._conn:
self._conn.close()
async def create_redis(address, *, db=None, password=None, ssl=None,
encoding=None, commands_factory=MockRedis,
loop=None):
'''Create a fake high-level MockRedis interface
This function is a coroutine
'''
return commands_factory(None, encoding=encoding)
|
022d8b992a88fd4c489c068ba57b4b2fcf6dde98
|
cloudsizzle/studyplanner/completedstudies/models.py
|
cloudsizzle/studyplanner/completedstudies/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField()
ocr = models.IntegerField()
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField(null=True)
ocr = models.IntegerField(null=True)
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
|
Allow null values for ocr and cr fields of CompletedCourse
|
Allow null values for ocr and cr fields of CompletedCourse
|
Python
|
mit
|
jpvanhal/cloudsizzle,jpvanhal/cloudsizzle
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField()
ocr = models.IntegerField()
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
Allow null values for ocr and cr fields of CompletedCourse
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField(null=True)
ocr = models.IntegerField(null=True)
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField()
ocr = models.IntegerField()
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
<commit_msg>Allow null values for ocr and cr fields of CompletedCourse<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField(null=True)
ocr = models.IntegerField(null=True)
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField()
ocr = models.IntegerField()
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
Allow null values for ocr and cr fields of CompletedCoursefrom django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField(null=True)
ocr = models.IntegerField(null=True)
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField()
ocr = models.IntegerField()
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
<commit_msg>Allow null values for ocr and cr fields of CompletedCourse<commit_after>from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class CompletedCourse(models.Model):
"""
Model for completed studies
"""
student = models.ForeignKey(User, related_name='completed_courses')
code = models.CharField(max_length=11)
name = models.CharField(max_length=100)
cr = models.IntegerField(null=True)
ocr = models.IntegerField(null=True)
grade = models.CharField(max_length=5)
date = models.DateField()
teacher = models.CharField(max_length=60)
class Teacher(models.Model):
"""
should be updated for the teachers to combine them with course information
"""
name = models.CharField(max_length = 30)
|
8dbcd07e0db34d39ad8f79067282d4359d79439d
|
usr/bin/graphical-app-launcher.py
|
usr/bin/graphical-app-launcher.py
|
#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
process = subprocess.Popen(graphical_app, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
|
#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
if os.environ.has_key('ARGS'):
extra_args = os.environ['ARGS']
command = graphical_app + ' ' + extra_args
else:
command = graphical_app
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
|
Support an ARGS enviromental variable for extra command arguments.
|
Support an ARGS enviromental variable for extra command arguments.
|
Python
|
apache-2.0
|
thewtex/docker-opengl,thewtex/docker-opengl
|
#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
process = subprocess.Popen(graphical_app, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
Support an ARGS enviromental variable for extra command arguments.
|
#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
if os.environ.has_key('ARGS'):
extra_args = os.environ['ARGS']
command = graphical_app + ' ' + extra_args
else:
command = graphical_app
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
|
<commit_before>#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
process = subprocess.Popen(graphical_app, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
<commit_msg>Support an ARGS enviromental variable for extra command arguments.<commit_after>
|
#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
if os.environ.has_key('ARGS'):
extra_args = os.environ['ARGS']
command = graphical_app + ' ' + extra_args
else:
command = graphical_app
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
|
#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
process = subprocess.Popen(graphical_app, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
Support an ARGS enviromental variable for extra command arguments.#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
if os.environ.has_key('ARGS'):
extra_args = os.environ['ARGS']
command = graphical_app + ' ' + extra_args
else:
command = graphical_app
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
|
<commit_before>#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
process = subprocess.Popen(graphical_app, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
<commit_msg>Support an ARGS enviromental variable for extra command arguments.<commit_after>#!/usr/bin/env python
import os
import subprocess
if __name__ == '__main__':
if os.environ.has_key('APP'):
graphical_app = os.environ['APP']
if os.environ.has_key('ARGS'):
extra_args = os.environ['ARGS']
command = graphical_app + ' ' + extra_args
else:
command = graphical_app
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdoutdata, stderrordata = process.communicate()
print(stdoutdata)
subprocess.call(['sudo', 'supervisorctl', 'shutdown'],
stdout=subprocess.PIPE)
|
830c73beafa359e01fb839901bcb91360c1de365
|
web/thesaurus_template_generators.py
|
web/thesaurus_template_generators.py
|
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {'code': [""]}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
|
"""Generator functions for thesaurus files"""
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
"""Generate a template for the given language and structure"""
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {
'name': name,
'code': [""],
}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
"""Generate a template for a `meta file`"""
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
|
Add name fields to generated templates
|
Add name fields to generated templates
|
Python
|
agpl-3.0
|
codethesaurus/codethesaur.us,codethesaurus/codethesaur.us
|
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {'code': [""]}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
Add name fields to generated templates
|
"""Generator functions for thesaurus files"""
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
"""Generate a template for the given language and structure"""
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {
'name': name,
'code': [""],
}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
"""Generate a template for a `meta file`"""
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
|
<commit_before>import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {'code': [""]}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
<commit_msg>Add name fields to generated templates<commit_after>
|
"""Generator functions for thesaurus files"""
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
"""Generate a template for the given language and structure"""
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {
'name': name,
'code': [""],
}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
"""Generate a template for a `meta file`"""
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
|
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {'code': [""]}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
Add name fields to generated templates"""Generator functions for thesaurus files"""
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
"""Generate a template for the given language and structure"""
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {
'name': name,
'code': [""],
}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
"""Generate a template for a `meta file`"""
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
|
<commit_before>import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {'code': [""]}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
<commit_msg>Add name fields to generated templates<commit_after>"""Generator functions for thesaurus files"""
import json
from web.MetaInfo import MetaInfo
def generate_language_template(language_id, structure_id, version=None):
"""Generate a template for the given language and structure"""
meta_info = MetaInfo()
if structure_id not in meta_info.data_structures:
raise ValueError
language_name = meta_info.languages.get(
language_id,
{'name': 'Human-Readable Language Name'}
)['name']
meta = {
'language': language_id,
'language_name': language_name,
'structure': structure_id,
}
if version:
meta['language_version'] = version
concepts = {
id: {
'name': name,
'code': [""],
}
for category in meta_info.structure(structure_id).categories.values()
for (id, name) in category.items()
}
return json.dumps({'meta': meta, 'concepts': concepts}, indent=2)
def generate_meta_template(structure_id, structure_name):
"""Generate a template for a `meta file`"""
meta = {
'structure': structure_id,
'structure_name': structure_name,
}
categories = {
'First Category Name': {
'concept_id1': 'Name of Concept 1',
'concept_id2': 'Name of Concept 2'
},
'Second Category Name': {
'concept_id3': 'Name of Concept 3',
'concept_id4': 'Name of Concept 4'
}
}
return json.dumps({'meta': meta, 'categories': categories})
|
7039dd833186ba8430aae55de3e856ac0426f90c
|
examples/rust_with_cffi/setup.py
|
examples/rust_with_cffi/setup.py
|
#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
|
#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust", py_limited_api="auto"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
|
Use py_limited_api="auto" in rust_with_cffi example
|
Use py_limited_api="auto" in rust_with_cffi example
|
Python
|
mit
|
PyO3/setuptools-rust,PyO3/setuptools-rust
|
#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
Use py_limited_api="auto" in rust_with_cffi example
|
#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust", py_limited_api="auto"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
<commit_msg>Use py_limited_api="auto" in rust_with_cffi example<commit_after>
|
#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust", py_limited_api="auto"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
|
#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
Use py_limited_api="auto" in rust_with_cffi example#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust", py_limited_api="auto"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
<commit_msg>Use py_limited_api="auto" in rust_with_cffi example<commit_after>#!/usr/bin/env python
import platform
import sys
from setuptools import setup
from setuptools_rust import RustExtension
setup(
name="rust-with-cffi",
version="0.1.0",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Rust",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
],
packages=["rust_with_cffi"],
rust_extensions=[
RustExtension("rust_with_cffi.rust", py_limited_api="auto"),
],
cffi_modules=["cffi_module.py:ffi"],
install_requires=["cffi"],
setup_requires=["cffi"],
include_package_data=True,
zip_safe=False,
)
|
4c799fe2c14d5c602f1eda83a3ab226eb2e7060e
|
octane/tests/test_upgrade_node.py
|
octane/tests/test_upgrade_node.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, template=None)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, network_template=None)
|
Fix test for upgrade-node command parser
|
Fix test for upgrade-node command parser
Change-Id: I87746a3806f7dada04f5e650e17796fd4e65f55b
|
Python
|
apache-2.0
|
stackforge/fuel-octane,Mirantis/octane,stackforge/fuel-octane,Mirantis/octane
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, template=None)
Fix test for upgrade-node command parser
Change-Id: I87746a3806f7dada04f5e650e17796fd4e65f55b
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, network_template=None)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, template=None)
<commit_msg>Fix test for upgrade-node command parser
Change-Id: I87746a3806f7dada04f5e650e17796fd4e65f55b<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, network_template=None)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, template=None)
Fix test for upgrade-node command parser
Change-Id: I87746a3806f7dada04f5e650e17796fd4e65f55b# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, network_template=None)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, template=None)
<commit_msg>Fix test for upgrade-node command parser
Change-Id: I87746a3806f7dada04f5e650e17796fd4e65f55b<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_parser(mocker, octane_app):
m = mocker.patch('octane.commands.upgrade_node.upgrade_node')
octane_app.run(["upgrade-node", "--isolated", "1", "2", "3"])
assert not octane_app.stdout.getvalue()
assert not octane_app.stderr.getvalue()
m.assert_called_once_with(1, [2, 3], isolated=True, network_template=None)
|
fe9e11af28e2ffe2b3da5ebb0971cd712136284c
|
nodeconductor/iaas/migrations/0011_cloudprojectmembership_availability_zone.py
|
nodeconductor/iaas/migrations/0011_cloudprojectmembership_availability_zone.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(max_length=100, blank=True),
preserve_default=True,
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True),
preserve_default=True,
),
]
|
Add help_text to availability_zone field (nc-327)
|
Add help_text to availability_zone field (nc-327)
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(max_length=100, blank=True),
preserve_default=True,
),
]
Add help_text to availability_zone field (nc-327)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True),
preserve_default=True,
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(max_length=100, blank=True),
preserve_default=True,
),
]
<commit_msg>Add help_text to availability_zone field (nc-327)<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True),
preserve_default=True,
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(max_length=100, blank=True),
preserve_default=True,
),
]
Add help_text to availability_zone field (nc-327)# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True),
preserve_default=True,
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(max_length=100, blank=True),
preserve_default=True,
),
]
<commit_msg>Add help_text to availability_zone field (nc-327)<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('iaas', '0010_auto_20150118_1834'),
]
operations = [
migrations.AddField(
model_name='cloudprojectmembership',
name='availability_zone',
field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True),
preserve_default=True,
),
]
|
a6606da129674fa47b2d32711a6c81f6bcd7d8ca
|
readthedocs/settings/postgres.py
|
readthedocs/settings/postgres.py
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = '//media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
|
Use protocol-specific URL for media.
|
Use protocol-specific URL for media.
|
Python
|
mit
|
agjohnson/readthedocs.org,tddv/readthedocs.org,VishvajitP/readthedocs.org,sils1297/readthedocs.org,GovReady/readthedocs.org,jerel/readthedocs.org,soulshake/readthedocs.org,fujita-shintaro/readthedocs.org,raven47git/readthedocs.org,Carreau/readthedocs.org,LukasBoersma/readthedocs.org,gjtorikian/readthedocs.org,Tazer/readthedocs.org,michaelmcandrew/readthedocs.org,kenshinthebattosai/readthedocs.org,asampat3090/readthedocs.org,KamranMackey/readthedocs.org,kenwang76/readthedocs.org,istresearch/readthedocs.org,titiushko/readthedocs.org,stevepiercy/readthedocs.org,pombredanne/readthedocs.org,dirn/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,tddv/readthedocs.org,attakei/readthedocs-oauth,istresearch/readthedocs.org,fujita-shintaro/readthedocs.org,safwanrahman/readthedocs.org,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,royalwang/readthedocs.org,wanghaven/readthedocs.org,emawind84/readthedocs.org,clarkperkins/readthedocs.org,d0ugal/readthedocs.org,clarkperkins/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,michaelmcandrew/readthedocs.org,sils1297/readthedocs.org,sils1297/readthedocs.org,Tazer/readthedocs.org,titiushko/readthedocs.org,stevepiercy/readthedocs.org,mhils/readthedocs.org,raven47git/readthedocs.org,espdev/readthedocs.org,wijerasa/readthedocs.org,wanghaven/readthedocs.org,KamranMackey/readthedocs.org,soulshake/readthedocs.org,LukasBoersma/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,cgourlay/readthedocs.org,nikolas/readthedocs.org,laplaceliu/readthedocs.org,laplaceliu/readthedocs.org,jerel/readthedocs.org,CedarLogic/readthedocs.org,royalwang/readthedocs.org,kdkeyser/readthedocs.org,d0ugal/readthedocs.org,SteveViss/readthedocs.org,emawind84/readthedocs.org,attakei/readthedocs-oauth,nikolas/readthedocs.org,titiushko/readthedocs.org,takluyver/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,royalwang/readthedocs.org,sunnyzwh/readthedocs.org,sid-kap/readthedocs.org,wijerasa/readthedocs.org,asampat3090/readthedocs.org,takluyver/readthedocs.org,LukasBoersma/readthedocs.org,emawind84/readthedocs.org,nyergler/pythonslides,sid-kap/readthedocs.org,takluyver/readthedocs.org,clarkperkins/readthedocs.org,Carreau/readthedocs.org,michaelmcandrew/readthedocs.org,davidfischer/readthedocs.org,hach-que/readthedocs.org,kenshinthebattosai/readthedocs.org,nyergler/pythonslides,raven47git/readthedocs.org,cgourlay/readthedocs.org,mrshoki/readthedocs.org,hach-que/readthedocs.org,GovReady/readthedocs.org,royalwang/readthedocs.org,sunnyzwh/readthedocs.org,techtonik/readthedocs.org,istresearch/readthedocs.org,laplaceliu/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,agjohnson/readthedocs.org,techtonik/readthedocs.org,mrshoki/readthedocs.org,stevepiercy/readthedocs.org,sid-kap/readthedocs.org,attakei/readthedocs-oauth,dirn/readthedocs.org,gjtorikian/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,mrshoki/readthedocs.org,attakei/readthedocs-oauth,singingwolfboy/readthedocs.org,mhils/readthedocs.org,VishvajitP/readthedocs.org,tddv/readthedocs.org,SteveViss/readthedocs.org,pombredanne/readthedocs.org,CedarLogic/readthedocs.org,sils1297/readthedocs.org,istresearch/readthedocs.org,asampat3090/readthedocs.org,sunnyzwh/readthedocs.org,CedarLogic/readthedocs.org,rtfd/readthedocs.org,cgourlay/readthedocs.org,jerel/readthedocs.org,Carreau/readthedocs.org,atsuyim/readthedocs.org,kenwang76/readthedocs.org,mhils/readthedocs.org,espdev/readthedocs.org,kdkeyser/readthedocs.org,kdkeyser/readthedocs.org,jerel/readthedocs.org,CedarLogic/readthedocs.org,nyergler/pythonslides,cgourlay/readthedocs.org,ojii/readthedocs.org,KamranMackey/readthedocs.org,atsuyim/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,michaelmcandrew/readthedocs.org,dirn/readthedocs.org,SteveViss/readthedocs.org,Tazer/readthedocs.org,rtfd/readthedocs.org,Carreau/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,emawind84/readthedocs.org,wanghaven/readthedocs.org,kenwang76/readthedocs.org,gjtorikian/readthedocs.org,titiushko/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,espdev/readthedocs.org,asampat3090/readthedocs.org,agjohnson/readthedocs.org,fujita-shintaro/readthedocs.org,singingwolfboy/readthedocs.org,raven47git/readthedocs.org,nikolas/readthedocs.org,mhils/readthedocs.org,soulshake/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,nikolas/readthedocs.org,agjohnson/readthedocs.org,GovReady/readthedocs.org,ojii/readthedocs.org,sunnyzwh/readthedocs.org,SteveViss/readthedocs.org,LukasBoersma/readthedocs.org,ojii/readthedocs.org,gjtorikian/readthedocs.org,singingwolfboy/readthedocs.org,Tazer/readthedocs.org,espdev/readthedocs.org,takluyver/readthedocs.org,KamranMackey/readthedocs.org,singingwolfboy/readthedocs.org,kenwang76/readthedocs.org,stevepiercy/readthedocs.org,wanghaven/readthedocs.org,dirn/readthedocs.org,kdkeyser/readthedocs.org,nyergler/pythonslides,d0ugal/readthedocs.org,hach-que/readthedocs.org,soulshake/readthedocs.org,GovReady/readthedocs.org,mrshoki/readthedocs.org,d0ugal/readthedocs.org,hach-que/readthedocs.org,atsuyim/readthedocs.org,sid-kap/readthedocs.org,ojii/readthedocs.org
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
Use protocol-specific URL for media.
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = '//media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
|
<commit_before>from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
<commit_msg>Use protocol-specific URL for media.<commit_after>
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = '//media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
Use protocol-specific URL for media.from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = '//media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
|
<commit_before>from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
<commit_msg>Use protocol-specific URL for media.<commit_after>from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = '//media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://odin:8983/solr',
}
}
SLUMBER_API_HOST = 'http://readthedocs.org'
WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
try:
from local_settings import *
except:
pass
|
1187deb4140ad0c7d66f0f25ae6d019f8ffb6168
|
bluebottle/homepage/views.py
|
bluebottle/homepage/views.py
|
from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer().to_representation(homepage)
return response.Response(serialized)
|
from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer(
context=self.get_serializer_context()
).to_representation(homepage)
return response.Response(serialized)
|
Add proper context to homepage serializer
|
Add proper context to homepage serializer
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer().to_representation(homepage)
return response.Response(serialized)
Add proper context to homepage serializer
|
from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer(
context=self.get_serializer_context()
).to_representation(homepage)
return response.Response(serialized)
|
<commit_before>from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer().to_representation(homepage)
return response.Response(serialized)
<commit_msg>Add proper context to homepage serializer<commit_after>
|
from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer(
context=self.get_serializer_context()
).to_representation(homepage)
return response.Response(serialized)
|
from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer().to_representation(homepage)
return response.Response(serialized)
Add proper context to homepage serializerfrom django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer(
context=self.get_serializer_context()
).to_representation(homepage)
return response.Response(serialized)
|
<commit_before>from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer().to_representation(homepage)
return response.Response(serialized)
<commit_msg>Add proper context to homepage serializer<commit_after>from django.utils import translation
from rest_framework import response
from bluebottle.utils.views import GenericAPIView
from .models import HomePage
from .serializers import HomePageSerializer
# Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object
class HomePageDetail(GenericAPIView):
serializer_class = HomePageSerializer
model = HomePage
def get(self, request, language='en'):
# Force requested language
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
homepage = HomePage().get(language)
serialized = HomePageSerializer(
context=self.get_serializer_context()
).to_representation(homepage)
return response.Response(serialized)
|
424980a48e451d1b99397843001bd75fa58e474e
|
tests/test_fullqualname.py
|
tests/test_fullqualname.py
|
"""Tests for fullqualname."""
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
|
"""Tests for fullqualname."""
import inspect
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_method():
# Test built-in method object.
obj = [1, 2, 3].append
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is not a class.
assert not inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.list.append'
else:
expected = '__builtin__.list.append'
nose.tools.assert_equals(fullqualname(obj), expected)
|
Add built-in method object test
|
Add built-in method object test
|
Python
|
bsd-3-clause
|
etgalloway/fullqualname
|
"""Tests for fullqualname."""
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
Add built-in method object test
|
"""Tests for fullqualname."""
import inspect
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_method():
# Test built-in method object.
obj = [1, 2, 3].append
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is not a class.
assert not inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.list.append'
else:
expected = '__builtin__.list.append'
nose.tools.assert_equals(fullqualname(obj), expected)
|
<commit_before>"""Tests for fullqualname."""
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
<commit_msg>Add built-in method object test<commit_after>
|
"""Tests for fullqualname."""
import inspect
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_method():
# Test built-in method object.
obj = [1, 2, 3].append
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is not a class.
assert not inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.list.append'
else:
expected = '__builtin__.list.append'
nose.tools.assert_equals(fullqualname(obj), expected)
|
"""Tests for fullqualname."""
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
Add built-in method object test"""Tests for fullqualname."""
import inspect
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_method():
# Test built-in method object.
obj = [1, 2, 3].append
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is not a class.
assert not inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.list.append'
else:
expected = '__builtin__.list.append'
nose.tools.assert_equals(fullqualname(obj), expected)
|
<commit_before>"""Tests for fullqualname."""
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
<commit_msg>Add built-in method object test<commit_after>"""Tests for fullqualname."""
import inspect
import nose
import sys
from fullqualname import fullqualname
def test_builtin_function():
# Test built-in function object.
obj = len
# Type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a function.
assert 'built-in function' in repr(obj)
if sys.version_info >= (3, ):
expected = 'builtins.len'
else:
expected = '__builtin__.len'
nose.tools.assert_equals(fullqualname(obj), expected)
def test_builtin_method():
# Test built-in method object.
obj = [1, 2, 3].append
# Object type is 'builtin_function_or_method'.
assert type(obj).__name__ == 'builtin_function_or_method'
# Object is a method.
assert 'built-in method' in repr(obj)
# Object __self__ attribute is not a class.
assert not inspect.isclass(obj.__self__)
if sys.version_info >= (3, ):
expected = 'builtins.list.append'
else:
expected = '__builtin__.list.append'
nose.tools.assert_equals(fullqualname(obj), expected)
|
2af6e066509689ed14f40b122c600bed39dea543
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.4',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.5',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
|
Bump version number to 0.5
|
Bump version number to 0.5
|
Python
|
agpl-3.0
|
ProjetPP/PPP-datamodel-Python,ProjetPP/PPP-datamodel-Python
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.4',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
Bump version number to 0.5
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.5',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.4',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
<commit_msg>Bump version number to 0.5<commit_after>
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.5',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.4',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
Bump version number to 0.5#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.5',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.4',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
<commit_msg>Bump version number to 0.5<commit_after>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.5',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='valentin.lorentz+ppp@ens-lyon.org',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
|
0b05101696989a3b88a4a630a6886c5db142175b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'irccat = pydle.utils.irccat:main',
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main',
'pydle-irccat = pydle.utils.irccat:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
Install irccat as pydle-irccat instead.
|
Install irccat as pydle-irccat instead.
|
Python
|
bsd-3-clause
|
Shizmob/pydle
|
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'irccat = pydle.utils.irccat:main',
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
Install irccat as pydle-irccat instead.
|
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main',
'pydle-irccat = pydle.utils.irccat:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
<commit_before>from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'irccat = pydle.utils.irccat:main',
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
<commit_msg>Install irccat as pydle-irccat instead.<commit_after>
|
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main',
'pydle-irccat = pydle.utils.irccat:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'irccat = pydle.utils.irccat:main',
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
Install irccat as pydle-irccat instead.from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main',
'pydle-irccat = pydle.utils.irccat:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
<commit_before>from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'irccat = pydle.utils.irccat:main',
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
<commit_msg>Install irccat as pydle-irccat instead.<commit_after>from setuptools import setup, find_packages
import pydle
setup(
name=pydle.__name__,
version=pydle.__version__,
packages=[
'pydle',
'pydle.features',
'pydle.features.rfc1459',
'pydle.features.ircv3_1',
'pydle.features.ircv3_2',
'pydle.utils'
],
requires=['tornado'],
extras_require={
'SASL': 'pure-sasl >=0.1.6' # for pydle.features.sasl
},
entry_points={
'console_scripts': [
'pydle = pydle.utils.run:main',
'ipydle = pydle.utils.console:main',
'pydle-irccat = pydle.utils.irccat:main'
]
},
author='Shiz',
author_email='hi@shiz.me',
url='https://github.com/Shizmob/pydle',
keywords='irc library python3 compact flexible',
description='A compact, flexible and standards-abiding IRC library for Python 3.',
license=pydle.__license__,
zip_safe=True,
test_suite='tests'
)
|
fd79655ff898b715273512cd8a655b262321444a
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.9",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
|
from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.8",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
|
Fix minimum django version spec
|
Fix minimum django version spec
|
Python
|
apache-2.0
|
tomhoule/django-minio-storage
|
from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.9",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
Fix minimum django version spec
|
from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.8",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
|
<commit_before>from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.9",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
<commit_msg>Fix minimum django version spec<commit_after>
|
from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.8",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
|
from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.9",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
Fix minimum django version specfrom setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.8",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
|
<commit_before>from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.9",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
<commit_msg>Fix minimum django version spec<commit_after>from setuptools import setup
setup(
name="django-minio-storage-py-pa",
license="MIT",
use_scm_version=True,
description="Django file storage using the minio python client",
author="Tom Houlé",
author_email="tom@kafunsho.be",
url="https://github.com/py-pa/django-minio-storage",
packages=['minio_storage'],
setup_requires=['setuptools_scm'],
install_requires=[
"django>=1.8",
"minio>=2.2.2",
],
extras_require={
"test": [
"coverage",
"requests",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Framework :: Django",
],
)
|
16374d0fde1a9e88211090da58fa06dba968501b
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a5',
'Beaker>=1.6.4',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
Upgrade tangled 0.1a5 => 0.1a9
|
Upgrade tangled 0.1a5 => 0.1a9
|
Python
|
mit
|
TangledWeb/tangled.session
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a5',
'Beaker>=1.6.4',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Upgrade tangled 0.1a5 => 0.1a9
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a5',
'Beaker>=1.6.4',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Upgrade tangled 0.1a5 => 0.1a9<commit_after>
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a5',
'Beaker>=1.6.4',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Upgrade tangled 0.1a5 => 0.1a9from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a5',
'Beaker>=1.6.4',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a5',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Upgrade tangled 0.1a5 => 0.1a9<commit_after>from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
0fae9740d62d25469b42e8912bcad7de7d904b26
|
setup.py
|
setup.py
|
import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
|
import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
|
Add python3.7 as a classifier
|
Add python3.7 as a classifier
|
Python
|
mit
|
AnjoMan/asciigraf
|
import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
Add python3.7 as a classifier
|
import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
|
<commit_before>import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
<commit_msg>Add python3.7 as a classifier<commit_after>
|
import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
|
import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
Add python3.7 as a classifierimport os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
|
<commit_before>import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
<commit_msg>Add python3.7 as a classifier<commit_after>import os.path
from setuptools import setup
thisdir = os.path.abspath(os.path.dirname(__file__))
version = open(os.path.join(thisdir, 'asciigraf', 'VERSION')).read().strip()
def readme():
with open("README.rst", 'r') as f:
return f.read()
setup(
name="asciigraf",
version=version,
packages=["asciigraf"],
package_data={
'': ['VERSION']
},
description="A python library for making ascii-art into network graphs.",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
long_description=readme(),
author="Opus One Solutions",
author_email="rnd@opusonesolutions.com",
url="https://github.com/opusonesolutions/asciigraf",
keywords=["graph", "network", "testing", "parser"],
license="MIT",
install_requires=[
'networkx>=1.11',
],
extras_require={
"test": ["pytest", "pytest-cov"],
},
)
|
b42d2239d24bb651f95830899d972e4302a10d77
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation"
]
)
|
#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation",
],
)
|
Use comma on every line on multi-line lists
|
Use comma on every line on multi-line lists
|
Python
|
mit
|
dominikkarall/samsungctl,Ape/samsungctl
|
#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation"
]
)
Use comma on every line on multi-line lists
|
#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation",
],
)
|
<commit_before>#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation"
]
)
<commit_msg>Use comma on every line on multi-line lists<commit_after>
|
#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation",
],
)
|
#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation"
]
)
Use comma on every line on multi-line lists#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation",
],
)
|
<commit_before>#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation"
]
)
<commit_msg>Use comma on every line on multi-line lists<commit_after>#!/usr/bin/env python
import setuptools
import samsungctl
setuptools.setup(
name=samsungctl.__title__,
version=samsungctl.__version__,
description=samsungctl.__doc__,
url=samsungctl.__url__,
author=samsungctl.__author__,
author_email=samsungctl.__author_email__,
license=samsungctl.__license__,
long_description=open("README.md").read(),
entry_points={
"console_scripts": ["samsungctl=samsungctl.__main__:main"]
},
packages=["samsungctl"],
install_requires=[],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Topic :: Home Automation",
],
)
|
13c060f47d16860e0c67ace5adbde3c9167f343e
|
setup.py
|
setup.py
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
|
Test for compatibility python2 and python3
|
Test for compatibility python2 and python3
|
Python
|
mit
|
PBrockmann/ipython-ferretmagic,PBrockmann/ipython_ferretmagic,PBrockmann/ipython_ferretmagic
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
Test for compatibility python2 and python3
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
|
<commit_before>import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
<commit_msg>Test for compatibility python2 and python3<commit_after>
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
Test for compatibility python2 and python3import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
|
<commit_before>import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
<commit_msg>Test for compatibility python2 and python3<commit_after>import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name = 'ferretmagic',
packages = ['ferretmagic'],
py_modules = ['ferretmagic'],
version = '20181001',
description = 'ipython extension for pyferret',
author = 'Patrick Brockmann',
author_email = 'Patrick.Brockmann@lsce.ipsl.fr',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/PBrockmann/ipython_ferretmagic',
download_url = 'https://github.com/PBrockmann/ipython_ferretmagic/tarball/master',
keywords = ['jupyter', 'ipython', 'ferret', 'pyferret', 'magic', 'extension'],
classifiers = [
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: Jupyter',
'Framework :: IPython'
],
)
|
c259e42ea95fdc43ad9345d702d3cab901d88f93
|
rx/core/__init__.py
|
rx/core/__init__.py
|
# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from . import observerextensions
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
|
# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
|
Remove observer extension from init
|
Remove observer extension from init
|
Python
|
mit
|
ReactiveX/RxPY,ReactiveX/RxPY
|
# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from . import observerextensions
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
Remove observer extension from init
|
# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
|
<commit_before># flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from . import observerextensions
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
<commit_msg>Remove observer extension from init<commit_after>
|
# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
|
# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from . import observerextensions
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
Remove observer extension from init# flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
|
<commit_before># flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from . import observerextensions
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
<commit_msg>Remove observer extension from init<commit_after># flake8: noqa
from .typing import Observer, Scheduler
from .disposable import Disposable
from .anonymousobserver import AnonymousObserver
from .pipe import pipe
from .observable import Observable
from .observable import AnonymousObservable, ConnectableObservable
from .observable import GroupedObservable, BlockingObservable
from .observerbase import ObserverBase
|
4068605116cb04b999c66d29056c88cf2c4ab46c
|
scripts/analytics/addon_count.py
|
scripts/analytics/addon_count.py
|
import sys
import logging
from datetime import datetime
from dateutil.parser import parse
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
try:
date = parse(sys.argv[1])
except IndexError:
date = datetime.now()
main(date)
|
import logging
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
main()
|
Remove date from addon count script
|
Remove date from addon count script
|
Python
|
apache-2.0
|
pattisdr/osf.io,saradbowman/osf.io,binoculars/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,cslzchen/osf.io,chrisseto/osf.io,acshi/osf.io,mfraezz/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,sloria/osf.io,aaxelb/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,hmoco/osf.io,acshi/osf.io,alexschiller/osf.io,acshi/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,baylee-d/osf.io,pattisdr/osf.io,caneruguz/osf.io,adlius/osf.io,caseyrollins/osf.io,mattclark/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,mluo613/osf.io,cwisecarver/osf.io,rdhyee/osf.io,rdhyee/osf.io,mluo613/osf.io,erinspace/osf.io,rdhyee/osf.io,alexschiller/osf.io,adlius/osf.io,baylee-d/osf.io,saradbowman/osf.io,laurenrevere/osf.io,chennan47/osf.io,alexschiller/osf.io,mluo613/osf.io,monikagrabowska/osf.io,icereval/osf.io,chrisseto/osf.io,pattisdr/osf.io,cslzchen/osf.io,leb2dg/osf.io,mattclark/osf.io,acshi/osf.io,crcresearch/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,felliott/osf.io,icereval/osf.io,hmoco/osf.io,caneruguz/osf.io,felliott/osf.io,aaxelb/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,mfraezz/osf.io,adlius/osf.io,icereval/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,laurenrevere/osf.io,caseyrollins/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,binoculars/osf.io,baylee-d/osf.io,adlius/osf.io,sloria/osf.io,chrisseto/osf.io,aaxelb/osf.io,TomBaxter/osf.io,sloria/osf.io,alexschiller/osf.io,felliott/osf.io,mluo613/osf.io,Nesiehr/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,chennan47/osf.io,mluo613/osf.io,mfraezz/osf.io,rdhyee/osf.io,cwisecarver/osf.io,crcresearch/osf.io,felliott/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,leb2dg/osf.io,hmoco/osf.io,Nesiehr/osf.io,binoculars/osf.io,cwisecarver/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io
|
import sys
import logging
from datetime import datetime
from dateutil.parser import parse
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
try:
date = parse(sys.argv[1])
except IndexError:
date = datetime.now()
main(date)
Remove date from addon count script
|
import logging
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
main()
|
<commit_before>import sys
import logging
from datetime import datetime
from dateutil.parser import parse
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
try:
date = parse(sys.argv[1])
except IndexError:
date = datetime.now()
main(date)
<commit_msg>Remove date from addon count script<commit_after>
|
import logging
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
main()
|
import sys
import logging
from datetime import datetime
from dateutil.parser import parse
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
try:
date = parse(sys.argv[1])
except IndexError:
date = datetime.now()
main(date)
Remove date from addon count scriptimport logging
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
main()
|
<commit_before>import sys
import logging
from datetime import datetime
from dateutil.parser import parse
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
try:
date = parse(sys.argv[1])
except IndexError:
date = datetime.now()
main(date)
<commit_msg>Remove date from addon count script<commit_after>import logging
from website.settings import ADDONS_AVAILABLE
from website.app import init_app
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def count():
counts = []
for addon in ADDONS_AVAILABLE:
user_count = addon.settings_models['user'].find().count() if addon.settings_models.get('user') else 0
node_count = addon.settings_models['node'].find().count() if addon.settings_models.get('node') else 0
counts.append({
'provider': addon.short_name,
'user_count': user_count,
'node_count': node_count
})
logger.info('{} counted. Users: {}, Nodes: {}'.format(addon.short_name, user_count, node_count))
return counts
def main():
addon_count = count()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
client.add_event('addon_count_analytics', addon_count)
else:
print(addon_count)
if __name__ == '__main__':
init_app()
main()
|
cced850df75679de3fbdf92856baf060202d9449
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.2',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.3',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
Increment version to 3.0.3 which now supports Python 2 and 3
|
Increment version to 3.0.3 which now supports Python 2 and 3
|
Python
|
mit
|
mailosaurapp/mailosaur-python,mailosaur/mailosaur-python,mailosaur/mailosaur-python
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.2',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
Increment version to 3.0.3 which now supports Python 2 and 3
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.3',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
<commit_before>from setuptools import setup
setup(name='mailosaur',
version='3.0.2',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
<commit_msg>Increment version to 3.0.3 which now supports Python 2 and 3<commit_after>
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.3',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.2',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
Increment version to 3.0.3 which now supports Python 2 and 3from setuptools import setup
setup(name='mailosaur',
version='3.0.3',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
<commit_before>from setuptools import setup
setup(name='mailosaur',
version='3.0.2',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
<commit_msg>Increment version to 3.0.3 which now supports Python 2 and 3<commit_after>from setuptools import setup
setup(name='mailosaur',
version='3.0.3',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Mailosaur Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
3faffe4188c33147199a7c189407d10ce3af8ab5
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
'sgapi',
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
# one of `sgapi` or `shotgun_api3` is required
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Remove dependence on sgapi (since we can use shotgun_api3 as well)
|
Remove dependence on sgapi (since we can use shotgun_api3 as well)
|
Python
|
bsd-3-clause
|
westernx/sgevents,westernx/sgevents
|
from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
'sgapi',
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Remove dependence on sgapi (since we can use shotgun_api3 as well)
|
from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
# one of `sgapi` or `shotgun_api3` is required
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
'sgapi',
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Remove dependence on sgapi (since we can use shotgun_api3 as well)<commit_after>
|
from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
# one of `sgapi` or `shotgun_api3` is required
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
'sgapi',
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Remove dependence on sgapi (since we can use shotgun_api3 as well)from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
# one of `sgapi` or `shotgun_api3` is required
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
'sgapi',
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Remove dependence on sgapi (since we can use shotgun_api3 as well)<commit_after>from setuptools import setup, find_packages
setup(
name='sgevents',
version='0.1-dev',
description='A simplifying Shotgun event daemon',
url='http://github.com/westernx/sgevents',
packages=find_packages(exclude=['build*', 'tests*']),
author='Mike Boers',
author_email='sgevents@mikeboers.com',
license='BSD-3',
install_requires=[
# one of `sgapi` or `shotgun_api3` is required
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
6f47a8a0dfa0fa3ad1659848cce1a764b239406a
|
setup.py
|
setup.py
|
from sqlalchemy_mptt import __version__
from setuptools import setup
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
|
import os
import re
from setuptools import setup
with open(os.path.join('sqlalchemy_mptt', '__init__.py'), 'rb') as fh:
__version__ = (re.search(r'__version__\s*=\s*u?"([^"]+)"', fh.read())
.group(1).strip())
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
|
Make the package installable at the same time of its requirements.
|
Make the package installable at the same time of its requirements.
`python setup.py --version` now works even if sqlalchemy is not
installed.
|
Python
|
mit
|
uralbash/sqlalchemy_mptt,uralbash/sqlalchemy_mptt,ITCase/sqlalchemy_mptt,ITCase/sqlalchemy_mptt
|
from sqlalchemy_mptt import __version__
from setuptools import setup
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
Make the package installable at the same time of its requirements.
`python setup.py --version` now works even if sqlalchemy is not
installed.
|
import os
import re
from setuptools import setup
with open(os.path.join('sqlalchemy_mptt', '__init__.py'), 'rb') as fh:
__version__ = (re.search(r'__version__\s*=\s*u?"([^"]+)"', fh.read())
.group(1).strip())
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
|
<commit_before>from sqlalchemy_mptt import __version__
from setuptools import setup
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
<commit_msg>Make the package installable at the same time of its requirements.
`python setup.py --version` now works even if sqlalchemy is not
installed.<commit_after>
|
import os
import re
from setuptools import setup
with open(os.path.join('sqlalchemy_mptt', '__init__.py'), 'rb') as fh:
__version__ = (re.search(r'__version__\s*=\s*u?"([^"]+)"', fh.read())
.group(1).strip())
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
|
from sqlalchemy_mptt import __version__
from setuptools import setup
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
Make the package installable at the same time of its requirements.
`python setup.py --version` now works even if sqlalchemy is not
installed.import os
import re
from setuptools import setup
with open(os.path.join('sqlalchemy_mptt', '__init__.py'), 'rb') as fh:
__version__ = (re.search(r'__version__\s*=\s*u?"([^"]+)"', fh.read())
.group(1).strip())
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
|
<commit_before>from sqlalchemy_mptt import __version__
from setuptools import setup
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
<commit_msg>Make the package installable at the same time of its requirements.
`python setup.py --version` now works even if sqlalchemy is not
installed.<commit_after>import os
import re
from setuptools import setup
with open(os.path.join('sqlalchemy_mptt', '__init__.py'), 'rb') as fh:
__version__ = (re.search(r'__version__\s*=\s*u?"([^"]+)"', fh.read())
.group(1).strip())
setup(
name='sqlalchemy_mptt',
version=__version__,
url='http://github.com/ITCase/sqlalchemy_mptt/',
author='Svintsov Dmitry',
author_email='root@uralbash.ru',
packages=['sqlalchemy_mptt', ],
include_package_data=True,
zip_safe=False,
test_suite="nose.collector",
license="MIT",
description='SQLAlchemy MPTT mixins (Nested Sets)',
package_data={
'': ['*.txt', '*.rst', '*.md'],
},
long_description="http://github.com/ITCase/sqlalchemy_mptt/",
install_requires=[
"sqlalchemy",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'Natural Language :: Russian',
'Operating System :: OS Independent',
'Programming Language :: Python',
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Framework :: Pyramid ",
"Framework :: Flask",
"Topic :: Internet",
"Topic :: Database",
'License :: OSI Approved :: MIT License',
],
)
|
6bb1c15243d04acf8980a21720f6577d73e2f4b0
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1a2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
Raise version for release due to font licensing issues resolution
|
Raise version for release due to font licensing issues resolution
|
Python
|
mit
|
TurboGears/backlash,TurboGears/backlash
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1a2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
Raise version for release due to font licensing issues resolution
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1a2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Raise version for release due to font licensing issues resolution<commit_after>
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1a2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
Raise version for release due to font licensing issues resolutionfrom setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1a2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Raise version for release due to font licensing issues resolution<commit_after>from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.2"
setup(name='backlash',
version=version,
description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2",
long_description=README,
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.2',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI'],
keywords='wsgi',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/TurboGears/backlash',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"WebOb"
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
df095a5d08a9ffa7b54be946b19f356cc3201819
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
|
#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
'six >= 1.9.0'
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
|
Add six to the requirements.
|
RDL-4689: Add six to the requirements.
|
Python
|
mit
|
infoscout/py-mstr
|
#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
RDL-4689: Add six to the requirements.
|
#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
'six >= 1.9.0'
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
|
<commit_before>#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
<commit_msg>RDL-4689: Add six to the requirements.<commit_after>
|
#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
'six >= 1.9.0'
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
|
#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
RDL-4689: Add six to the requirements.#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
'six >= 1.9.0'
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
|
<commit_before>#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
<commit_msg>RDL-4689: Add six to the requirements.<commit_after>#!/usr/bin/env python
import re
from setuptools import find_packages, setup
with open('py_mstr/__init__.py', 'rb') as f:
version = str(re.search('__version__ = "(.+?)"', f.read().decode('utf-8')).group(1))
setup(
name='py-mstr',
version=version,
packages=find_packages(),
description='Python API for Microstrategy Web Tasks',
url='http://github.com/infoscout/py-mstr',
author='InfoScout',
author_email='oss@infoscoutinc.com',
license='MIT',
install_requires=[
'pyquery >= 1.2.8, < 1.3.0',
'requests >= 2.3.0',
'six >= 1.9.0'
],
tests_require=['discover', 'mock'],
test_suite="tests",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
|
9995a3bb8b95caddc6319e68f405c70fd2a15d09
|
aldryn_faq/search_indexes.py
|
aldryn_faq/search_indexes.py
|
from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
|
from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question, Category
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
class CategoryIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return ''
def get_index_kwargs(self, language):
return {'translations__language_code': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Category
def get_search_data(self, obj, language, request):
return strip_tags(obj.name)
|
Add search index for faq categories
|
Add search index for faq categories
|
Python
|
bsd-3-clause
|
czpython/aldryn-faq,czpython/aldryn-faq,mkoistinen/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq
|
from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
Add search index for faq categories
|
from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question, Category
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
class CategoryIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return ''
def get_index_kwargs(self, language):
return {'translations__language_code': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Category
def get_search_data(self, obj, language, request):
return strip_tags(obj.name)
|
<commit_before>from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
<commit_msg>Add search index for faq categories<commit_after>
|
from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question, Category
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
class CategoryIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return ''
def get_index_kwargs(self, language):
return {'translations__language_code': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Category
def get_search_data(self, obj, language, request):
return strip_tags(obj.name)
|
from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
Add search index for faq categoriesfrom aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question, Category
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
class CategoryIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return ''
def get_index_kwargs(self, language):
return {'translations__language_code': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Category
def get_search_data(self, obj, language, request):
return strip_tags(obj.name)
|
<commit_before>from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
<commit_msg>Add search index for faq categories<commit_after>from aldryn_search.base import AldrynIndexBase
from aldryn_search.utils import strip_tags
from django.template import RequestContext
from haystack import indexes
from .models import Question, Category
class QuestionIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return obj.title
def get_index_kwargs(self, language):
return {'language': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Question
def get_search_data(self, obj, language, request):
text = strip_tags(obj.title)
text += u' ' + strip_tags(obj.answer_text)
plugins = obj.answer.cmsplugin_set.filter(language=language)
for base_plugin in plugins:
instance, plugin_type = base_plugin.get_plugin_instance()
if instance is None:
# this is an empty plugin
continue
else:
text += strip_tags(instance.render_plugin(context=RequestContext(request))) + u' '
return text
class CategoryIndex(AldrynIndexBase, indexes.Indexable):
INDEX_TITLE = True
def get_title(self, obj):
return ''
def get_index_kwargs(self, language):
return {'translations__language_code': language}
def get_index_queryset(self, language):
return self.get_model().objects.all()
def get_model(self):
return Category
def get_search_data(self, obj, language, request):
return strip_tags(obj.name)
|
082bcfefddb4ba566e35e827d9e726aacdfb80d6
|
collection_pipelines/core.py
|
collection_pipelines/core.py
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
def return_value(self):
"""Processor return value when used with __or__ operator.
Returns:
CollectionPipelineProcessor: when processor is to be chained
with other processors.
any: any other value when processor is used as an output and is
meant to return value. In this way we can assign
the output result to python variable.
"""
return self
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
"""Overwrites the '|' operator.
Args:
other (CollectionPipelineProcessor)
Returns:
whatever other.return_value() returns.
"""
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other.return_value()
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
Allow to overwrite the pipeline processor return value
|
Allow to overwrite the pipeline processor return value
|
Python
|
mit
|
povilasb/pycollection-pipelines
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
Allow to overwrite the pipeline processor return value
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
def return_value(self):
"""Processor return value when used with __or__ operator.
Returns:
CollectionPipelineProcessor: when processor is to be chained
with other processors.
any: any other value when processor is used as an output and is
meant to return value. In this way we can assign
the output result to python variable.
"""
return self
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
"""Overwrites the '|' operator.
Args:
other (CollectionPipelineProcessor)
Returns:
whatever other.return_value() returns.
"""
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other.return_value()
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
<commit_before>import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
<commit_msg>Allow to overwrite the pipeline processor return value<commit_after>
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
def return_value(self):
"""Processor return value when used with __or__ operator.
Returns:
CollectionPipelineProcessor: when processor is to be chained
with other processors.
any: any other value when processor is used as an output and is
meant to return value. In this way we can assign
the output result to python variable.
"""
return self
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
"""Overwrites the '|' operator.
Args:
other (CollectionPipelineProcessor)
Returns:
whatever other.return_value() returns.
"""
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other.return_value()
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
Allow to overwrite the pipeline processor return valueimport functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
def return_value(self):
"""Processor return value when used with __or__ operator.
Returns:
CollectionPipelineProcessor: when processor is to be chained
with other processors.
any: any other value when processor is used as an output and is
meant to return value. In this way we can assign
the output result to python variable.
"""
return self
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
"""Overwrites the '|' operator.
Args:
other (CollectionPipelineProcessor)
Returns:
whatever other.return_value() returns.
"""
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other.return_value()
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
<commit_before>import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
<commit_msg>Allow to overwrite the pipeline processor return value<commit_after>import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
def return_value(self):
"""Processor return value when used with __or__ operator.
Returns:
CollectionPipelineProcessor: when processor is to be chained
with other processors.
any: any other value when processor is used as an output and is
meant to return value. In this way we can assign
the output result to python variable.
"""
return self
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
"""Overwrites the '|' operator.
Args:
other (CollectionPipelineProcessor)
Returns:
whatever other.return_value() returns.
"""
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other.return_value()
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.